text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import unittest
class Test(unittest.TestCase):
def test_load_serializer(self):
from serialbox import Serializer
def test_load_visualizer(self):
from serialbox import Visualizer
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "7bcc70ccad003a4a1b3c85db7a459916",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 34,
"avg_line_length": 19.25,
"alnum_prop": 0.7316017316017316,
"repo_name": "MeteoSwiss-APN/serialbox",
"id": "957e4389b83ade56024add778a608fc30b446310",
"size": "343",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/test.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "30345"
},
{
"name": "C++",
"bytes": "2199189"
},
{
"name": "CMake",
"bytes": "9693"
},
{
"name": "Fortran",
"bytes": "99060"
},
{
"name": "Python",
"bytes": "152224"
},
{
"name": "Shell",
"bytes": "1456"
}
],
"symlink_target": ""
}
|
"""Run tests.
This will find all modules whose name match a given prefix in the test
directory, and run them. Various command line options provide
additional facilities.
Command line options:
-v verbose -- run tests in verbose mode with output to stdout
-q quiet -- don't print anything except if a test fails
-t testdir -- directory where the tests will be found
-x exclude -- add a test to exclude
-p profile -- profiled execution
-d dbc -- enable design-by-contract
-m match -- only run test matching the tag pattern which follow
If no non-option arguments are present, prefixes used are 'test',
'regrtest', 'smoketest' and 'unittest'.
"""
__docformat__ = "restructuredtext en"
# modified copy of some functions from test/regrtest.py from PyXml
# disable camel case warning
# pylint: disable=C0103
import sys
import os, os.path as osp
import re
import traceback
import inspect
import difflib
import tempfile
import math
import warnings
from shutil import rmtree
from operator import itemgetter
from ConfigParser import ConfigParser
from logilab.common.deprecation import deprecated
from itertools import dropwhile
import unittest as unittest_legacy
if not getattr(unittest_legacy, "__package__", None):
try:
import unittest2 as unittest
from unittest2 import SkipTest
except ImportError:
raise ImportError("You have to install python-unittest2 to use %s" % __name__)
else:
import unittest
from unittest import SkipTest
try:
from functools import wraps
except ImportError:
def wraps(wrapped):
def proxy(callable):
callable.__name__ = wrapped.__name__
return callable
return proxy
try:
from test import test_support
except ImportError:
# not always available
class TestSupport:
def unload(self, test):
pass
test_support = TestSupport()
# pylint: disable=W0622
from logilab.common.compat import any, InheritableSet, callable
# pylint: enable=W0622
from logilab.common.debugger import Debugger, colorize_source
from logilab.common.decorators import cached, classproperty
from logilab.common import textutils
__all__ = ['main', 'unittest_main', 'find_tests', 'run_test', 'spawn']
DEFAULT_PREFIXES = ('test', 'regrtest', 'smoketest', 'unittest',
'func', 'validation')
if sys.version_info >= (2, 6):
# FIXME : this does not work as expected / breaks tests on testlib
# however testlib does not work on py3k for many reasons ...
from inspect import CO_GENERATOR
else:
from compiler.consts import CO_GENERATOR
if sys.version_info >= (3, 0):
def is_generator(function):
flags = function.__code__.co_flags
return flags & CO_GENERATOR
else:
def is_generator(function):
flags = function.func_code.co_flags
return flags & CO_GENERATOR
# used by unittest to count the number of relevant levels in the traceback
__unittest = 1
def with_tempdir(callable):
"""A decorator ensuring no temporary file left when the function return
Work only for temporary file create with the tempfile module"""
@wraps(callable)
def proxy(*args, **kargs):
old_tmpdir = tempfile.gettempdir()
new_tmpdir = tempfile.mkdtemp(prefix="temp-lgc-")
tempfile.tempdir = new_tmpdir
try:
return callable(*args, **kargs)
finally:
try:
rmtree(new_tmpdir, ignore_errors=True)
finally:
tempfile.tempdir = old_tmpdir
return proxy
def in_tempdir(callable):
"""A decorator moving the enclosed function inside the tempfile.tempfdir
"""
@wraps(callable)
def proxy(*args, **kargs):
old_cwd = os.getcwd()
os.chdir(tempfile.tempdir)
try:
return callable(*args, **kargs)
finally:
os.chdir(old_cwd)
return proxy
def within_tempdir(callable):
"""A decorator run the enclosed function inside a tmpdir removed after execution
"""
proxy = with_tempdir(in_tempdir(callable))
proxy.__name__ = callable.__name__
return proxy
def find_tests(testdir,
prefixes=DEFAULT_PREFIXES, suffix=".py",
excludes=(),
remove_suffix=True):
"""
Return a list of all applicable test modules.
"""
tests = []
for name in os.listdir(testdir):
if not suffix or name.endswith(suffix):
for prefix in prefixes:
if name.startswith(prefix):
if remove_suffix and name.endswith(suffix):
name = name[:-len(suffix)]
if name not in excludes:
tests.append(name)
tests.sort()
return tests
## PostMortem Debug facilities #####
def start_interactive_mode(result):
"""starts an interactive shell so that the user can inspect errors
"""
debuggers = result.debuggers
descrs = result.error_descrs + result.fail_descrs
if len(debuggers) == 1:
# don't ask for test name if there's only one failure
debuggers[0].start()
else:
while True:
testindex = 0
print "Choose a test to debug:"
# order debuggers in the same way than errors were printed
print "\n".join(['\t%s : %s' % (i, descr) for i, (_, descr)
in enumerate(descrs)])
print "Type 'exit' (or ^D) to quit"
print
try:
todebug = raw_input('Enter a test name: ')
if todebug.strip().lower() == 'exit':
print
break
else:
try:
testindex = int(todebug)
debugger = debuggers[descrs[testindex][0]]
except (ValueError, IndexError):
print "ERROR: invalid test number %r" % (todebug, )
else:
debugger.start()
except (EOFError, KeyboardInterrupt):
print
break
# test utils ##################################################################
class SkipAwareTestResult(unittest._TextTestResult):
def __init__(self, stream, descriptions, verbosity,
exitfirst=False, pdbmode=False, cvg=None, colorize=False):
super(SkipAwareTestResult, self).__init__(stream,
descriptions, verbosity)
self.skipped = []
self.debuggers = []
self.fail_descrs = []
self.error_descrs = []
self.exitfirst = exitfirst
self.pdbmode = pdbmode
self.cvg = cvg
self.colorize = colorize
self.pdbclass = Debugger
self.verbose = verbosity > 1
def descrs_for(self, flavour):
return getattr(self, '%s_descrs' % flavour.lower())
def _create_pdb(self, test_descr, flavour):
self.descrs_for(flavour).append( (len(self.debuggers), test_descr) )
if self.pdbmode:
self.debuggers.append(self.pdbclass(sys.exc_info()[2]))
def _iter_valid_frames(self, frames):
"""only consider non-testlib frames when formatting traceback"""
lgc_testlib = osp.abspath(__file__)
std_testlib = osp.abspath(unittest.__file__)
invalid = lambda fi: osp.abspath(fi[1]) in (lgc_testlib, std_testlib)
for frameinfo in dropwhile(invalid, frames):
yield frameinfo
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string.
This method is overridden here because we want to colorize
lines if --color is passed, and display local variables if
--verbose is passed
"""
exctype, exc, tb = err
output = ['Traceback (most recent call last)']
frames = inspect.getinnerframes(tb)
colorize = self.colorize
frames = enumerate(self._iter_valid_frames(frames))
for index, (frame, filename, lineno, funcname, ctx, ctxindex) in frames:
filename = osp.abspath(filename)
if ctx is None: # pyc files or C extensions for instance
source = '<no source available>'
else:
source = ''.join(ctx)
if colorize:
filename = textutils.colorize_ansi(filename, 'magenta')
source = colorize_source(source)
output.append(' File "%s", line %s, in %s' % (filename, lineno, funcname))
output.append(' %s' % source.strip())
if self.verbose:
output.append('%r == %r' % (dir(frame), test.__module__))
output.append('')
output.append(' ' + ' local variables '.center(66, '-'))
for varname, value in sorted(frame.f_locals.items()):
output.append(' %s: %r' % (varname, value))
if varname == 'self': # special handy processing for self
for varname, value in sorted(vars(value).items()):
output.append(' self.%s: %r' % (varname, value))
output.append(' ' + '-' * 66)
output.append('')
output.append(''.join(traceback.format_exception_only(exctype, exc)))
return '\n'.join(output)
def addError(self, test, err):
"""err -> (exc_type, exc, tcbk)"""
exc_type, exc, _ = err
if isinstance(exc, SkipTest):
assert exc_type == SkipTest
self.addSkip(test, exc)
else:
if self.exitfirst:
self.shouldStop = True
descr = self.getDescription(test)
super(SkipAwareTestResult, self).addError(test, err)
self._create_pdb(descr, 'error')
def addFailure(self, test, err):
if self.exitfirst:
self.shouldStop = True
descr = self.getDescription(test)
super(SkipAwareTestResult, self).addFailure(test, err)
self._create_pdb(descr, 'fail')
def addSkip(self, test, reason):
self.skipped.append((test, reason))
if self.showAll:
self.stream.writeln("SKIPPED")
elif self.dots:
self.stream.write('S')
def printErrors(self):
super(SkipAwareTestResult, self).printErrors()
self.printSkippedList()
def printSkippedList(self):
# format (test, err) compatible with unittest2
for test, err in self.skipped:
descr = self.getDescription(test)
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % ('SKIPPED', descr))
self.stream.writeln("\t%s" % err)
def printErrorList(self, flavour, errors):
for (_, descr), (test, err) in zip(self.descrs_for(flavour), errors):
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour, descr))
self.stream.writeln(self.separator2)
self.stream.writeln(err)
self.stream.writeln('no stdout'.center(len(self.separator2)))
self.stream.writeln('no stderr'.center(len(self.separator2)))
# Add deprecation warnings about new api used by module level fixtures in unittest2
# http://www.voidspace.org.uk/python/articles/unittest2.shtml#setupmodule-and-teardownmodule
class _DebugResult(object): # simplify import statement among unittest flavors..
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
from logilab.common.decorators import monkeypatch
@monkeypatch(unittest.TestSuite)
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
# add testlib specific deprecation warning and switch to new api
if hasattr(module, 'teardown_module'):
warnings.warn('Please rename teardown_module() to tearDownModule() instead.',
DeprecationWarning)
setattr(module, 'tearDownModule', module.teardown_module)
# end of monkey-patching
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
try:
tearDownModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
@monkeypatch(unittest.TestSuite)
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
# add testlib specific deprecation warning and switch to new api
if hasattr(module, 'setup_module'):
warnings.warn('Please rename setup_module() to setUpModule() instead.',
DeprecationWarning)
setattr(module, 'setUpModule', module.setup_module)
# end of monkey-patching
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
try:
setUpModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
# backward compatibility: TestSuite might be imported from lgc.testlib
TestSuite = unittest.TestSuite
class keywords(dict):
"""Keyword args (**kwargs) support for generative tests."""
class starargs(tuple):
"""Variable arguments (*args) for generative tests."""
def __new__(cls, *args):
return tuple.__new__(cls, args)
unittest_main = unittest.main
class InnerTestSkipped(SkipTest):
"""raised when a test is skipped"""
pass
def parse_generative_args(params):
args = []
varargs = ()
kwargs = {}
flags = 0 # 2 <=> starargs, 4 <=> kwargs
for param in params:
if isinstance(param, starargs):
varargs = param
if flags:
raise TypeError('found starargs after keywords !')
flags |= 2
args += list(varargs)
elif isinstance(param, keywords):
kwargs = param
if flags & 4:
raise TypeError('got multiple keywords parameters')
flags |= 4
elif flags & 2 or flags & 4:
raise TypeError('found parameters after kwargs or args')
else:
args.append(param)
return args, kwargs
class InnerTest(tuple):
def __new__(cls, name, *data):
instance = tuple.__new__(cls, data)
instance.name = name
return instance
class Tags(InheritableSet): # 2.4 compat
"""A set of tag able validate an expression"""
def __init__(self, *tags, **kwargs):
self.inherit = kwargs.pop('inherit', True)
if kwargs:
raise TypeError("%s are an invalid keyword argument for this function" % kwargs.keys())
if len(tags) == 1 and not isinstance(tags[0], basestring):
tags = tags[0]
super(Tags, self).__init__(tags, **kwargs)
def __getitem__(self, key):
return key in self
def match(self, exp):
return eval(exp, {}, self)
# duplicate definition from unittest2 of the _deprecate decorator
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
DeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
class TestCase(unittest.TestCase):
"""A unittest.TestCase extension with some additional methods."""
maxDiff = None
pdbclass = Debugger
tags = Tags()
def __init__(self, methodName='runTest'):
super(TestCase, self).__init__(methodName)
# internal API changed in python2.4 and needed by DocTestCase
if sys.version_info >= (2, 4):
self.__exc_info = sys.exc_info
self.__testMethodName = self._testMethodName
else:
# let's give easier access to _testMethodName to every subclasses
if hasattr(self, "__testMethodName"):
self._testMethodName = self.__testMethodName
self._current_test_descr = None
self._options_ = None
@classproperty
@cached
def datadir(cls): # pylint: disable=E0213
"""helper attribute holding the standard test's data directory
NOTE: this is a logilab's standard
"""
mod = __import__(cls.__module__)
return osp.join(osp.dirname(osp.abspath(mod.__file__)), 'data')
# cache it (use a class method to cache on class since TestCase is
# instantiated for each test run)
@classmethod
def datapath(cls, *fname):
"""joins the object's datadir and `fname`"""
return osp.join(cls.datadir, *fname)
def set_description(self, descr):
"""sets the current test's description.
This can be useful for generative tests because it allows to specify
a description per yield
"""
self._current_test_descr = descr
# override default's unittest.py feature
def shortDescription(self):
"""override default unittest shortDescription to handle correctly
generative tests
"""
if self._current_test_descr is not None:
return self._current_test_descr
return super(TestCase, self).shortDescription()
def quiet_run(self, result, func, *args, **kwargs):
try:
func(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
result.addError(self, self.__exc_info())
return False
return True
def _get_test_method(self):
"""return the test method"""
return getattr(self, self._testMethodName)
def optval(self, option, default=None):
"""return the option value or default if the option is not define"""
return getattr(self._options_, option, default)
def __call__(self, result=None, runcondition=None, options=None):
"""rewrite TestCase.__call__ to support generative tests
This is mostly a copy/paste from unittest.py (i.e same
variable names, same logic, except for the generative tests part)
"""
from logilab.common.pytest import FILE_RESTART
if result is None:
result = self.defaultTestResult()
result.pdbclass = self.pdbclass
self._options_ = options
# if result.cvg:
# result.cvg.start()
testMethod = self._get_test_method()
if runcondition and not runcondition(testMethod):
return # test is skipped
result.startTest(self)
try:
if not self.quiet_run(result, self.setUp):
return
generative = is_generator(testMethod.im_func)
# generative tests
if generative:
self._proceed_generative(result, testMethod,
runcondition)
else:
status = self._proceed(result, testMethod)
success = (status == 0)
if not self.quiet_run(result, self.tearDown):
return
if not generative and success:
if hasattr(options, "exitfirst") and options.exitfirst:
# add this test to restart file
try:
restartfile = open(FILE_RESTART, 'a')
try:
descr = '.'.join((self.__class__.__module__,
self.__class__.__name__,
self._testMethodName))
restartfile.write(descr+os.linesep)
finally:
restartfile.close()
except Exception, ex:
print >> sys.__stderr__, "Error while saving \
succeeded test into", osp.join(os.getcwd(), FILE_RESTART)
raise ex
result.addSuccess(self)
finally:
# if result.cvg:
# result.cvg.stop()
result.stopTest(self)
def _proceed_generative(self, result, testfunc, runcondition=None):
# cancel startTest()'s increment
result.testsRun -= 1
success = True
try:
for params in testfunc():
if runcondition and not runcondition(testfunc,
skipgenerator=False):
if not (isinstance(params, InnerTest)
and runcondition(params)):
continue
if not isinstance(params, (tuple, list)):
params = (params, )
func = params[0]
args, kwargs = parse_generative_args(params[1:])
# increment test counter manually
result.testsRun += 1
status = self._proceed(result, func, args, kwargs)
if status == 0:
result.addSuccess(self)
success = True
else:
success = False
# XXX Don't stop anymore if an error occured
#if status == 2:
# result.shouldStop = True
if result.shouldStop: # either on error or on exitfirst + error
break
except:
# if an error occurs between two yield
result.addError(self, self.__exc_info())
success = False
return success
def _proceed(self, result, testfunc, args=(), kwargs=None):
"""proceed the actual test
returns 0 on success, 1 on failure, 2 on error
Note: addSuccess can't be called here because we have to wait
for tearDown to be successfully executed to declare the test as
successful
"""
kwargs = kwargs or {}
try:
testfunc(*args, **kwargs)
except self.failureException:
result.addFailure(self, self.__exc_info())
return 1
except KeyboardInterrupt:
raise
except InnerTestSkipped, e:
result.addSkip(self, e)
return 1
except SkipTest, e:
result.addSkip(self, e)
return 0
except:
result.addError(self, self.__exc_info())
return 2
return 0
def defaultTestResult(self):
"""return a new instance of the defaultTestResult"""
return SkipAwareTestResult()
skip = _deprecate(unittest.TestCase.skipTest)
assertEquals = _deprecate(unittest.TestCase.assertEqual)
assertNotEquals = _deprecate(unittest.TestCase.assertNotEqual)
assertAlmostEquals = _deprecate(unittest.TestCase.assertAlmostEqual)
assertNotAlmostEquals = _deprecate(unittest.TestCase.assertNotAlmostEqual)
def innerSkip(self, msg=None):
"""mark a generative test as skipped for the <msg> reason"""
msg = msg or 'test was skipped'
raise InnerTestSkipped(msg)
@deprecated('Please use assertDictEqual instead.')
def assertDictEquals(self, dict1, dict2, msg=None, context=None):
"""compares two dicts
If the two dict differ, the first difference is shown in the error
message
:param dict1: a Python Dictionary
:param dict2: a Python Dictionary
:param msg: custom message (String) in case of failure
"""
dict1 = dict(dict1)
msgs = []
for key, value in dict2.items():
try:
if dict1[key] != value:
msgs.append('%r != %r for key %r' % (dict1[key], value,
key))
del dict1[key]
except KeyError:
msgs.append('missing %r key' % key)
if dict1:
msgs.append('dict2 is lacking %r' % dict1)
if msg:
self.failureException(msg)
elif msgs:
if context is not None:
base = '%s\n' % context
else:
base = ''
self.fail(base + '\n'.join(msgs))
@deprecated('Please use assertItemsEqual instead.')
def assertUnorderedIterableEquals(self, got, expected, msg=None):
"""compares two iterable and shows difference between both
:param got: the unordered Iterable that we found
:param expected: the expected unordered Iterable
:param msg: custom message (String) in case of failure
"""
got, expected = list(got), list(expected)
self.assertSetEqual(set(got), set(expected), msg)
if len(got) != len(expected):
if msg is None:
msg = ['Iterable have the same elements but not the same number',
'\t<element>\t<expected>i\t<got>']
got_count = {}
expected_count = {}
for element in got:
got_count[element] = got_count.get(element, 0) + 1
for element in expected:
expected_count[element] = expected_count.get(element, 0) + 1
# we know that got_count.key() == expected_count.key()
# because of assertSetEqual
for element, count in got_count.iteritems():
other_count = expected_count[element]
if other_count != count:
msg.append('\t%s\t%s\t%s' % (element, other_count, count))
self.fail(msg)
assertUnorderedIterableEqual = assertUnorderedIterableEquals
assertUnordIterEquals = assertUnordIterEqual = assertUnorderedIterableEqual
@deprecated('Please use assertSetEqual instead.')
def assertSetEquals(self,got,expected, msg=None):
"""compares two sets and shows difference between both
Don't use it for iterables other than sets.
:param got: the Set that we found
:param expected: the second Set to be compared to the first one
:param msg: custom message (String) in case of failure
"""
if not(isinstance(got, set) and isinstance(expected, set)):
warnings.warn("the assertSetEquals function if now intended for set only."\
"use assertUnorderedIterableEquals instead.",
DeprecationWarning, 2)
return self.assertUnorderedIterableEquals(got, expected, msg)
items={}
items['missing'] = expected - got
items['unexpected'] = got - expected
if any(items.itervalues()):
if msg is None:
msg = '\n'.join('%s:\n\t%s' % (key, "\n\t".join(str(value) for value in values))
for key, values in items.iteritems() if values)
self.fail(msg)
@deprecated('Please use assertListEqual instead.')
def assertListEquals(self, list_1, list_2, msg=None):
"""compares two lists
If the two list differ, the first difference is shown in the error
message
:param list_1: a Python List
:param list_2: a second Python List
:param msg: custom message (String) in case of failure
"""
_l1 = list_1[:]
for i, value in enumerate(list_2):
try:
if _l1[0] != value:
from pprint import pprint
pprint(list_1)
pprint(list_2)
self.fail('%r != %r for index %d' % (_l1[0], value, i))
del _l1[0]
except IndexError:
if msg is None:
msg = 'list_1 has only %d elements, not %s '\
'(at least %r missing)'% (i, len(list_2), value)
self.fail(msg)
if _l1:
if msg is None:
msg = 'list_2 is lacking %r' % _l1
self.fail(msg)
@deprecated('Non-standard. Please use assertMultiLineEqual instead.')
def assertLinesEquals(self, string1, string2, msg=None, striplines=False):
"""compare two strings and assert that the text lines of the strings
are equal.
:param string1: a String
:param string2: a String
:param msg: custom message (String) in case of failure
:param striplines: Boolean to trigger line stripping before comparing
"""
lines1 = string1.splitlines()
lines2 = string2.splitlines()
if striplines:
lines1 = [l.strip() for l in lines1]
lines2 = [l.strip() for l in lines2]
self.assertListEqual(lines1, lines2, msg)
assertLineEqual = assertLinesEquals
@deprecated('Non-standard: please copy test method to your TestCase class')
def assertXMLWellFormed(self, stream, msg=None, context=2):
"""asserts the XML stream is well-formed (no DTD conformance check)
:param context: number of context lines in standard message
(show all data if negative).
Only available with element tree
"""
try:
from xml.etree.ElementTree import parse
self._assertETXMLWellFormed(stream, parse, msg)
except ImportError:
from xml.sax import make_parser, SAXParseException
parser = make_parser()
try:
parser.parse(stream)
except SAXParseException, ex:
if msg is None:
stream.seek(0)
for _ in xrange(ex.getLineNumber()):
line = stream.readline()
pointer = ('' * (ex.getLineNumber() - 1)) + '^'
msg = 'XML stream not well formed: %s\n%s%s' % (ex, line, pointer)
self.fail(msg)
@deprecated('Non-standard: please copy test method to your TestCase class')
def assertXMLStringWellFormed(self, xml_string, msg=None, context=2):
"""asserts the XML string is well-formed (no DTD conformance check)
:param context: number of context lines in standard message
(show all data if negative).
Only available with element tree
"""
try:
from xml.etree.ElementTree import fromstring
except ImportError:
from elementtree.ElementTree import fromstring
self._assertETXMLWellFormed(xml_string, fromstring, msg)
def _assertETXMLWellFormed(self, data, parse, msg=None, context=2):
"""internal function used by /assertXML(String)?WellFormed/ functions
:param data: xml_data
:param parse: appropriate parser function for this data
:param msg: error message
:param context: number of context lines in standard message
(show all data if negative).
Only available with element tree
"""
from xml.parsers.expat import ExpatError
try:
from xml.etree.ElementTree import ParseError
except ImportError:
# compatibility for <python2.7
ParseError = ExpatError
try:
parse(data)
except (ExpatError, ParseError), ex:
if msg is None:
if hasattr(data, 'readlines'): #file like object
data.seek(0)
lines = data.readlines()
else:
lines = data.splitlines(True)
nb_lines = len(lines)
context_lines = []
# catch when ParseError doesn't set valid lineno
if ex.lineno is not None:
if context < 0:
start = 1
end = nb_lines
else:
start = max(ex.lineno-context, 1)
end = min(ex.lineno+context, nb_lines)
line_number_length = len('%i' % end)
line_pattern = " %%%ii: %%s" % line_number_length
for line_no in xrange(start, ex.lineno):
context_lines.append(line_pattern % (line_no, lines[line_no-1]))
context_lines.append(line_pattern % (ex.lineno, lines[ex.lineno-1]))
context_lines.append('%s^\n' % (' ' * (1 + line_number_length + 2 +ex.offset)))
for line_no in xrange(ex.lineno+1, end+1):
context_lines.append(line_pattern % (line_no, lines[line_no-1]))
rich_context = ''.join(context_lines)
msg = 'XML stream not well formed: %s\n%s' % (ex, rich_context)
self.fail(msg)
@deprecated('Non-standard: please copy test method to your TestCase class')
def assertXMLEqualsTuple(self, element, tup):
"""compare an ElementTree Element to a tuple formatted as follow:
(tagname, [attrib[, children[, text[, tail]]]])"""
# check tag
self.assertTextEquals(element.tag, tup[0])
# check attrib
if len(element.attrib) or len(tup)>1:
if len(tup)<=1:
self.fail( "tuple %s has no attributes (%s expected)"%(tup,
dict(element.attrib)))
self.assertDictEqual(element.attrib, tup[1])
# check children
if len(element) or len(tup)>2:
if len(tup)<=2:
self.fail( "tuple %s has no children (%i expected)"%(tup,
len(element)))
if len(element) != len(tup[2]):
self.fail( "tuple %s has %i children%s (%i expected)"%(tup,
len(tup[2]),
('', 's')[len(tup[2])>1], len(element)))
for index in xrange(len(tup[2])):
self.assertXMLEqualsTuple(element[index], tup[2][index])
#check text
if element.text or len(tup)>3:
if len(tup)<=3:
self.fail( "tuple %s has no text value (%r expected)"%(tup,
element.text))
self.assertTextEquals(element.text, tup[3])
#check tail
if element.tail or len(tup)>4:
if len(tup)<=4:
self.fail( "tuple %s has no tail value (%r expected)"%(tup,
element.tail))
self.assertTextEquals(element.tail, tup[4])
def _difftext(self, lines1, lines2, junk=None, msg_prefix='Texts differ'):
junk = junk or (' ', '\t')
# result is a generator
result = difflib.ndiff(lines1, lines2, charjunk=lambda x: x in junk)
read = []
for line in result:
read.append(line)
# lines that don't start with a ' ' are diff ones
if not line.startswith(' '):
self.fail('\n'.join(['%s\n'%msg_prefix]+read + list(result)))
@deprecated('Non-standard. Please use assertMultiLineEqual instead.')
def assertTextEquals(self, text1, text2, junk=None,
msg_prefix='Text differ', striplines=False):
"""compare two multiline strings (using difflib and splitlines())
:param text1: a Python BaseString
:param text2: a second Python Basestring
:param junk: List of Caracters
:param msg_prefix: String (message prefix)
:param striplines: Boolean to trigger line stripping before comparing
"""
msg = []
if not isinstance(text1, basestring):
msg.append('text1 is not a string (%s)'%(type(text1)))
if not isinstance(text2, basestring):
msg.append('text2 is not a string (%s)'%(type(text2)))
if msg:
self.fail('\n'.join(msg))
lines1 = text1.strip().splitlines(True)
lines2 = text2.strip().splitlines(True)
if striplines:
lines1 = [line.strip() for line in lines1]
lines2 = [line.strip() for line in lines2]
self._difftext(lines1, lines2, junk, msg_prefix)
assertTextEqual = assertTextEquals
@deprecated('Non-standard: please copy test method to your TestCase class')
def assertStreamEquals(self, stream1, stream2, junk=None,
msg_prefix='Stream differ'):
"""compare two streams (using difflib and readlines())"""
# if stream2 is stream2, readlines() on stream1 will also read lines
# in stream2, so they'll appear different, although they're not
if stream1 is stream2:
return
# make sure we compare from the beginning of the stream
stream1.seek(0)
stream2.seek(0)
# compare
self._difftext(stream1.readlines(), stream2.readlines(), junk,
msg_prefix)
assertStreamEqual = assertStreamEquals
@deprecated('Non-standard: please copy test method to your TestCase class')
def assertFileEquals(self, fname1, fname2, junk=(' ', '\t')):
"""compares two files using difflib"""
self.assertStreamEqual(open(fname1), open(fname2), junk,
msg_prefix='Files differs\n-:%s\n+:%s\n'%(fname1, fname2))
assertFileEqual = assertFileEquals
@deprecated('Non-standard: please copy test method to your TestCase class')
def assertDirEquals(self, path_a, path_b):
"""compares two files using difflib"""
assert osp.exists(path_a), "%s doesn't exists" % path_a
assert osp.exists(path_b), "%s doesn't exists" % path_b
all_a = [ (ipath[len(path_a):].lstrip('/'), idirs, ifiles)
for ipath, idirs, ifiles in os.walk(path_a)]
all_a.sort(key=itemgetter(0))
all_b = [ (ipath[len(path_b):].lstrip('/'), idirs, ifiles)
for ipath, idirs, ifiles in os.walk(path_b)]
all_b.sort(key=itemgetter(0))
iter_a, iter_b = iter(all_a), iter(all_b)
partial_iter = True
ipath_a, idirs_a, ifiles_a = data_a = None, None, None
while True:
try:
ipath_a, idirs_a, ifiles_a = datas_a = iter_a.next()
partial_iter = False
ipath_b, idirs_b, ifiles_b = datas_b = iter_b.next()
partial_iter = True
self.assert_(ipath_a == ipath_b,
"unexpected %s in %s while looking %s from %s" %
(ipath_a, path_a, ipath_b, path_b))
errors = {}
sdirs_a = set(idirs_a)
sdirs_b = set(idirs_b)
errors["unexpected directories"] = sdirs_a - sdirs_b
errors["missing directories"] = sdirs_b - sdirs_a
sfiles_a = set(ifiles_a)
sfiles_b = set(ifiles_b)
errors["unexpected files"] = sfiles_a - sfiles_b
errors["missing files"] = sfiles_b - sfiles_a
msgs = [ "%s: %s"% (name, items)
for name, items in errors.iteritems() if items]
if msgs:
msgs.insert(0, "%s and %s differ :" % (
osp.join(path_a, ipath_a),
osp.join(path_b, ipath_b),
))
self.fail("\n".join(msgs))
for files in (ifiles_a, ifiles_b):
files.sort()
for index, path in enumerate(ifiles_a):
self.assertFileEquals(osp.join(path_a, ipath_a, path),
osp.join(path_b, ipath_b, ifiles_b[index]))
except StopIteration:
break
assertDirEqual = assertDirEquals
def assertIsInstance(self, obj, klass, msg=None, strict=False):
"""check if an object is an instance of a class
:param obj: the Python Object to be checked
:param klass: the target class
:param msg: a String for a custom message
:param strict: if True, check that the class of <obj> is <klass>;
else check with 'isinstance'
"""
if strict:
warnings.warn('[API] Non-standard. Strict parameter has vanished',
DeprecationWarning, stacklevel=2)
if msg is None:
if strict:
msg = '%r is not of class %s but of %s'
else:
msg = '%r is not an instance of %s but of %s'
msg = msg % (obj, klass, type(obj))
if strict:
self.assert_(obj.__class__ is klass, msg)
else:
self.assert_(isinstance(obj, klass), msg)
@deprecated('Please use assertIsNone instead.')
def assertNone(self, obj, msg=None):
"""assert obj is None
:param obj: Python Object to be tested
"""
if msg is None:
msg = "reference to %r when None expected"%(obj,)
self.assert_( obj is None, msg )
@deprecated('Please use assertIsNotNone instead.')
def assertNotNone(self, obj, msg=None):
"""assert obj is not None"""
if msg is None:
msg = "unexpected reference to None"
self.assert_( obj is not None, msg )
@deprecated('Non-standard. Please use assertAlmostEqual instead.')
def assertFloatAlmostEquals(self, obj, other, prec=1e-5,
relative=False, msg=None):
"""compares if two floats have a distance smaller than expected
precision.
:param obj: a Float
:param other: another Float to be comparted to <obj>
:param prec: a Float describing the precision
:param relative: boolean switching to relative/absolute precision
:param msg: a String for a custom message
"""
if msg is None:
msg = "%r != %r" % (obj, other)
if relative:
prec = prec*math.fabs(obj)
self.assert_(math.fabs(obj - other) < prec, msg)
def failUnlessRaises(self, excClass, callableObj=None, *args, **kwargs):
"""override default failUnlessRaises method to return the raised
exception instance.
Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
CAUTION! There are subtle differences between Logilab and unittest2
- exc is not returned in standard version
- context capabilities in standard version
- try/except/else construction (minor)
:param excClass: the Exception to be raised
:param callableObj: a callable Object which should raise <excClass>
:param args: a List of arguments for <callableObj>
:param kwargs: a List of keyword arguments for <callableObj>
"""
# XXX cube vcslib : test_branches_from_app
if callableObj is None:
_assert = super(TestCase, self).assertRaises
return _assert(excClass, callableObj, *args, **kwargs)
try:
callableObj(*args, **kwargs)
except excClass, exc:
class ProxyException:
def __init__(self, obj):
self._obj = obj
def __getattr__(self, attr):
warn_msg = ("This exception was retrieved with the old testlib way "
"`exc = self.assertRaises(Exc, callable)`, please use "
"the context manager instead'")
warnings.warn(warn_msg, DeprecationWarning, 2)
return self._obj.__getattribute__(attr)
return ProxyException(exc)
else:
if hasattr(excClass, '__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise self.failureException("%s not raised" % excName)
assertRaises = failUnlessRaises
import doctest
class SkippedSuite(unittest.TestSuite):
def test(self):
"""just there to trigger test execution"""
self.skipped_test('doctest module has no DocTestSuite class')
class DocTestFinder(doctest.DocTestFinder):
def __init__(self, *args, **kwargs):
self.skipped = kwargs.pop('skipped', ())
doctest.DocTestFinder.__init__(self, *args, **kwargs)
def _get_test(self, obj, name, module, globs, source_lines):
"""override default _get_test method to be able to skip tests
according to skipped attribute's value
Note: Python (<=2.4) use a _name_filter which could be used for that
purpose but it's no longer available in 2.5
Python 2.5 seems to have a [SKIP] flag
"""
if getattr(obj, '__name__', '') in self.skipped:
return None
return doctest.DocTestFinder._get_test(self, obj, name, module,
globs, source_lines)
class DocTest(TestCase):
"""trigger module doctest
I don't know how to make unittest.main consider the DocTestSuite instance
without this hack
"""
skipped = ()
def __call__(self, result=None, runcondition=None, options=None):\
# pylint: disable=W0613
try:
finder = DocTestFinder(skipped=self.skipped)
if sys.version_info >= (2, 4):
suite = doctest.DocTestSuite(self.module, test_finder=finder)
if sys.version_info >= (2, 5):
# XXX iirk
doctest.DocTestCase._TestCase__exc_info = sys.exc_info
else:
suite = doctest.DocTestSuite(self.module)
except AttributeError:
suite = SkippedSuite()
# doctest may gork the builtins dictionnary
# This happen to the "_" entry used by gettext
old_builtins = __builtins__.copy()
try:
return suite.run(result)
finally:
__builtins__.clear()
__builtins__.update(old_builtins)
run = __call__
def test(self):
"""just there to trigger test execution"""
MAILBOX = None
class MockSMTP:
"""fake smtplib.SMTP"""
def __init__(self, host, port):
self.host = host
self.port = port
global MAILBOX
self.reveived = MAILBOX = []
def set_debuglevel(self, debuglevel):
"""ignore debug level"""
def sendmail(self, fromaddr, toaddres, body):
"""push sent mail in the mailbox"""
self.reveived.append((fromaddr, toaddres, body))
def quit(self):
"""ignore quit"""
class MockConfigParser(ConfigParser):
"""fake ConfigParser.ConfigParser"""
def __init__(self, options):
ConfigParser.__init__(self)
for section, pairs in options.iteritems():
self.add_section(section)
for key, value in pairs.iteritems():
self.set(section, key, value)
def write(self, _):
raise NotImplementedError()
class MockConnection:
"""fake DB-API 2.0 connexion AND cursor (i.e. cursor() return self)"""
def __init__(self, results):
self.received = []
self.states = []
self.results = results
def cursor(self):
"""Mock cursor method"""
return self
def execute(self, query, args=None):
"""Mock execute method"""
self.received.append( (query, args) )
def fetchone(self):
"""Mock fetchone method"""
return self.results[0]
def fetchall(self):
"""Mock fetchall method"""
return self.results
def commit(self):
"""Mock commiy method"""
self.states.append( ('commit', len(self.received)) )
def rollback(self):
"""Mock rollback method"""
self.states.append( ('rollback', len(self.received)) )
def close(self):
"""Mock close method"""
pass
def mock_object(**params):
"""creates an object using params to set attributes
>>> option = mock_object(verbose=False, index=range(5))
>>> option.verbose
False
>>> option.index
[0, 1, 2, 3, 4]
"""
return type('Mock', (), params)()
def create_files(paths, chroot):
"""Creates directories and files found in <path>.
:param paths: list of relative paths to files or directories
:param chroot: the root directory in which paths will be created
>>> from os.path import isdir, isfile
>>> isdir('/tmp/a')
False
>>> create_files(['a/b/foo.py', 'a/b/c/', 'a/b/c/d/e.py'], '/tmp')
>>> isdir('/tmp/a')
True
>>> isdir('/tmp/a/b/c')
True
>>> isfile('/tmp/a/b/c/d/e.py')
True
>>> isfile('/tmp/a/b/foo.py')
True
"""
dirs, files = set(), set()
for path in paths:
path = osp.join(chroot, path)
filename = osp.basename(path)
# path is a directory path
if filename == '':
dirs.add(path)
# path is a filename path
else:
dirs.add(osp.dirname(path))
files.add(path)
for dirpath in dirs:
if not osp.isdir(dirpath):
os.makedirs(dirpath)
for filepath in files:
open(filepath, 'w').close()
class AttrObject: # XXX cf mock_object
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def tag(*args, **kwargs):
"""descriptor adding tag to a function"""
def desc(func):
assert not hasattr(func, 'tags')
func.tags = Tags(*args, **kwargs)
return func
return desc
def require_version(version):
""" Compare version of python interpreter to the given one. Skip the test
if older.
"""
def check_require_version(f):
version_elements = version.split('.')
try:
compare = tuple([int(v) for v in version_elements])
except ValueError:
raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
current = sys.version_info[:3]
if current < compare:
def new_f(self, *args, **kwargs):
self.skipTest('Need at least %s version of python. Current version is %s.' % (version, '.'.join([str(element) for element in current])))
new_f.__name__ = f.__name__
return new_f
else:
return f
return check_require_version
def require_module(module):
""" Check if the given module is loaded. Skip the test if not.
"""
def check_require_module(f):
try:
__import__(module)
return f
except ImportError:
def new_f(self, *args, **kwargs):
self.skipTest('%s can not be imported.' % module)
new_f.__name__ = f.__name__
return new_f
return check_require_module
|
{
"content_hash": "552ffac3cd49caa20de13a23d4389344",
"timestamp": "",
"source": "github",
"line_count": 1371,
"max_line_length": 152,
"avg_line_length": 37.42961342086068,
"alnum_prop": 0.5714007327149427,
"repo_name": "pronto/dotfiles",
"id": "a35ad98cdd700d9577a5ab0b84519f0044b0b5bb",
"size": "52181",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".vim/pylibs/logilab/common/testlib.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "51"
},
{
"name": "Python",
"bytes": "1873707"
},
{
"name": "Shell",
"bytes": "1795"
},
{
"name": "VimL",
"bytes": "669686"
},
{
"name": "Visual Basic",
"bytes": "130230"
}
],
"symlink_target": ""
}
|
#!/usr/bin/env python3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Builds an app that uses Material Web Components
This script runs the tooling necessary to produce an output script that
can be run by Chromium for a project using Material Web Components.
The main steps are:
- Generate depfiles with all the transitive MWC dependencies for GN to
correctly identify dirty builds.
- Use rollup to resolve bare imports and generate a single application package.
- Run Terser to minify the output.
"""
# TODO(calamity): This is mostly a copy of optimize_webui.py. Figure out
# which parts of this script are unnecessary and remove.
import argparse
import itertools
import json
import os
import glob
import platform
import re
import shutil
import sys
import tempfile
_HERE_PATH = os.path.dirname(__file__)
_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..'))
_CWD = os.getcwd() # typically out/<gn_name>/.
_BASE_EXCLUDES = []
_URL_MAPPINGS = []
_MWC_PATH = os.path.join(_SRC_PATH, 'third_party', 'material_web_components',
'components-chromium', 'node_modules')
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
import node
import node_modules
_DEBUG_DLOG_ENABLED = False
def DLOG(*args):
if _DEBUG_DLOG_ENABLED:
print(*args)
def _request_list_path(out_path, host_url):
host = host_url[host_url.find('://') + 3:-1]
return os.path.join(out_path, host + '_requestlist.txt')
def _get_dep_path(dep, host_url, in_path, out_path):
DLOG('Input dep: ' + dep)
dep = dep.replace('../', '', 1)
DLOG('Host URL : ' + host_url)
DLOG('in_path : ' + in_path)
if dep.startswith(host_url):
result = dep.replace(host_url, os.path.relpath(in_path, _CWD))
elif not (dep.startswith('chrome://') or dep.startswith('//')):
result = os.path.relpath(out_path, _CWD) + '/' + dep
else:
result = dep
DLOG('Result : ' + result, '\n')
return result
# Get a list of all files that were bundled with rollup and update the
# depfile accordingly such that Ninja knows when to re-trigger.
def _update_dep_file(args, manifest):
in_path = os.path.join(_CWD, args.input)
out_path = os.path.join(_CWD, args.out_folder)
# Gather the dependencies of all bundled root files.
request_list = []
for out_file in manifest:
request_list += manifest[out_file]
request_list = map(
lambda dep: _get_dep_path(dep, args.host_url, in_path, out_path),
request_list)
deps = map(os.path.normpath, request_list)
out_file_name = args.js_out_files[0]
with open(os.path.join(_CWD, args.depfile), 'w') as f:
deps_file_header = os.path.join(args.out_folder, out_file_name)
f.write(deps_file_header + ': ' + ' '.join(deps))
# Autogenerate a rollup config file so that we can import the plugin and
# pass it information about the location of the directories and files to exclude
# from the bundle.
def _generate_rollup_config(tmp_out_dir, path_to_plugin, in_path, host_url,
excludes, external_paths):
rollup_config_file = os.path.join(tmp_out_dir, 'rollup.config.js')
excludes_string = '[' + ', '.join(["'%s'" % e for e in excludes]) + ']'
config_content = r'''
import plugin from '{plugin_path}';
export default ({{
plugins: [
plugin('{in_path}', '{host_url}', {exclude_list},
{external_path_list}, /* allowEmptyExtension= */ true) ]
}});
'''.format(plugin_path=path_to_plugin.replace('\\', '/'),
in_path=in_path.replace('\\', '/'),
host_url=host_url,
exclude_list=json.dumps(excludes),
external_path_list=json.dumps(external_paths),
node_root_dir=_MWC_PATH)
DLOG('Rollup Config:\n' + config_content)
with open(rollup_config_file, 'w') as f:
f.write(config_content)
return rollup_config_file
# Create the manifest file from the sourcemap generated by rollup and return the
# list of bundles.
def _generate_manifest_file(tmp_out_dir, in_path, request_list_path):
generated_sourcemaps = glob.glob('%s/*.map' % tmp_out_dir)
manifest = {}
output_filenames = []
for sourcemap_file in generated_sourcemaps:
with open(sourcemap_file, 'r') as f:
sourcemap = json.loads(f.read())
if not 'sources' in sourcemap:
raise Exception('rollup could not construct source map')
sources = sourcemap['sources']
replaced_sources = []
for source in sources:
source.replace('../', '', 1)
replaced_sources.append(
source.replace('../' + os.path.basename(in_path) + '/',
''))
filename = sourcemap_file[:-len('.map')]
manifest[os.path.basename(filename)] = replaced_sources
output_filenames.append(filename)
with open(request_list_path, 'w') as f:
f.write(json.dumps(manifest))
return output_filenames
def build(tmp_out_dir, in_path, out_path, request_list_path, args, excludes,
external_paths):
if not os.path.exists(tmp_out_dir):
os.makedirs(tmp_out_dir)
path_to_plugin = os.path.join(os.path.abspath(_SRC_PATH), 'chrome',
'browser', 'resources', 'tools',
'rollup_plugin.js')
rollup_config_file = _generate_rollup_config(tmp_out_dir, path_to_plugin,
in_path, args.host_url,
excludes, external_paths)
rollup_args = [os.path.join(in_path, f) for f in args.js_module_in_files]
# Confirm names are as expected. This is necessary to avoid having to replace
# import statements in the generated output files.
# TODO(calamity): Is it worth adding import statement replacement to support
# arbitrary names?
bundled_paths = []
for index, js_file in enumerate(args.js_module_in_files):
base_file_name = os.path.basename(js_file)
expected_name = '%s.rollup.js' % base_file_name[:-len('.js')]
assert args.js_out_files[index] == expected_name, \
'Output file corresponding to %s should be named %s' % \
(js_file, expected_name)
bundled_paths.append(os.path.join(tmp_out_dir, expected_name))
# This indicates that rollup is expected to generate a shared chunk file as
# well as one file per module. Set its name using --chunkFileNames. Note:
# Currently, this only supports 2 entry points, which generate 2 corresponding
# outputs and 1 shared output.
if (len(args.js_out_files) == 3):
assert len(args.js_module_in_files) == 2, \
'Expect 2 module entry points for generating 3 outputs'
shared_file_name = args.js_out_files[2]
rollup_args += ['--chunkFileNames', shared_file_name]
bundled_paths.append(os.path.join(tmp_out_dir, shared_file_name))
node.RunNode([node_modules.PathToRollup()] + rollup_args + [
'--format',
'esm',
'--dir',
tmp_out_dir,
'--entryFileNames',
'[name].rollup.js',
'--sourcemap',
'--sourcemapExcludeSources',
'--config',
rollup_config_file,
'--silent',
])
# Create the manifest file from the sourcemaps generated by rollup.
generated_paths = _generate_manifest_file(tmp_out_dir, in_path,
request_list_path)
assert len(generated_paths) == len(bundled_paths), \
'unexpected number of bundles - %s - generated by rollup' % \
(len(generated_paths))
for bundled_file in bundled_paths:
with open(bundled_file, 'r') as f:
output = f.read()
assert "<if expr" not in output, \
'Unexpected <if expr> found in bundled output. Check that all ' + \
'input files using such expressions are preprocessed.'
return bundled_paths
def _build(in_folder, args):
in_path = os.path.normpath(os.path.join(_CWD,
in_folder)).replace('\\', '/')
out_path = os.path.join(_CWD, args.out_folder).replace('\\', '/')
request_list_path = _request_list_path(out_path, args.host_url)
tmp_out_dir = tempfile.mkdtemp(dir=out_path).replace('\\', '/')
excludes = _BASE_EXCLUDES + [
# This file is dynamically created by C++. Need to specify an exclusion
# URL for both the relative URL and chrome:// URL syntax.
'strings.js',
'strings.m.js',
'%s/strings.js' % args.host_url,
'%s/strings.m.js' % args.host_url,
]
excludes.extend(args.exclude or [])
external_paths = args.external_paths or []
try:
if args.js_module_in_files:
bundled_paths = build(tmp_out_dir, in_path, out_path,
request_list_path, args, excludes,
external_paths)
# Pass the JS files through Uglify and write the output to its final
# destination.
for index, js_out_file in enumerate(args.js_out_files):
node.RunNode([
node_modules.PathToTerser(),
os.path.join(tmp_out_dir, js_out_file), '--comments',
'/Copyright|license|LICENSE|\<\/?if/', '--output',
os.path.join(out_path, js_out_file)
])
finally:
shutil.rmtree(tmp_out_dir)
return request_list_path
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--depfile', required=True, help='GN depfile to write')
parser.add_argument('--exclude',
nargs='*',
help='paths that rollup will not rewrite')
parser.add_argument('--external_paths',
nargs='*',
help='url to filesystem path replacements')
parser.add_argument('--host', required=True, help='host of the WebUI')
parser.add_argument('--input',
required=True,
help='directory where input files are')
parser.add_argument('--js_out_files', nargs='*', required=True)
parser.add_argument('--out_folder', required=True)
parser.add_argument('--js_module_in_files', nargs='*', required=True)
parser.add_argument('--out_manifest',
help='manifest file to auto-generate grd')
args = parser.parse_args(argv)
args.depfile = os.path.normpath(args.depfile)
args.input = os.path.normpath(args.input)
args.out_folder = os.path.normpath(args.out_folder)
scheme_end_index = args.host.find('://')
if (scheme_end_index == -1):
args.host_url = 'chrome://%s/' % args.host
else:
args.host_url = args.host
request_list_path = _build(args.input, args)
# Prior call to _build() generated an output request_list file, containing
# information about all files that were bundled. Grab it from there.
request_list = json.loads(open(request_list_path, 'r').read())
# Output a manifest file that will be used to auto-generate a grd file later.
if args.out_manifest:
manifest_data = {
'base_dir': args.out_folder,
'files': list(request_list.keys()),
}
with open(os.path.normpath(os.path.join(_CWD, args.out_manifest)), 'w') \
as manifest_file:
json.dump(manifest_data, manifest_file)
_update_dep_file(args, request_list)
if __name__ == '__main__':
main(sys.argv[1:])
|
{
"content_hash": "7b132cebb5ff15f4062c6321ad8b842b",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 83,
"avg_line_length": 38.57516339869281,
"alnum_prop": 0.6005591324974585,
"repo_name": "ric2b/Vivaldi-browser",
"id": "5ca45bb5f4cdd812deeee25e018ccbb01a6d5736",
"size": "11804",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chromium/third_party/material_web_components/build_mwc_app.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""Tests for core."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import pickle
import threading
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import pywrap_tfe
from tensorflow.python.eager import context
from tensorflow.python.eager import core
from tensorflow.python.eager import def_function
from tensorflow.python.eager import execute as execute_lib
from tensorflow.python.eager import executor
from tensorflow.python.eager import test
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_resource_variable_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import variables
def execute(op_name, num_outputs, inputs, attrs=None):
return execute_lib.execute(
op_name, num_outputs, inputs, attrs, context.context())
def truncated_normal(shape):
return execute(
b'TruncatedNormal',
1,
inputs=[shape],
attrs=('dtype', dtypes.float32.as_datatype_enum, 'T',
shape.dtype.as_datatype_enum, 'seed', 0, 'seed2', 0))[0]
def current_device():
return array_ops.identity(1.).device
def configure_virtual_cpus():
cpus = config.list_physical_devices('CPU')
# Set 2 virtual CPUs
config.set_logical_device_configuration(cpus[0], [
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration()
])
class TFETest(test_util.TensorFlowTestCase):
def setUp(self):
super(TFETest, self).setUp()
context._reset_context()
configure_virtual_cpus()
def _test_hashable(self, a, b, hashable):
if hashable:
self.assertIsInstance(b, collections.Hashable)
self.assertLen(set([a, b]), 2)
else:
# TODO(gjn): Figure out how to make this work for tf.Tensor
# self.assertNotIsInstance(b, collections.Hashable)
with self.assertRaisesRegexp(TypeError, 'unhashable'):
set([a, b])
def testEquality(self):
default = ops.Tensor._USE_EQUALITY
try:
def _v1_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, 1.0)
self.assertIsNot(a, 1.0)
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
def _v2_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertEqual(a, 1.0)
self.assertIsNot(a, 1.0)
self.assertEqual(a, b)
self.assertIsNot(a, b)
constant_a = constant_op.constant(1.0)
constant_b = constant_op.constant(1.0)
ops.disable_tensor_equality()
self._test_hashable(constant_a, constant_b, True)
_v1_check(constant_a, constant_b)
ops.enable_tensor_equality()
_v2_check(constant_a, constant_b)
self._test_hashable(constant_a, constant_b, False)
variable_a = variables.Variable(1.0)
variable_b = variables.Variable(1.0)
ops.disable_tensor_equality()
_v1_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, True)
ops.enable_tensor_equality()
_v2_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, False)
# We only test numpy behaviour in v2 mode since we'd like to match that.
numpy_a = np.array(1.0)
numpy_b = np.array(1.0)
_v2_check(numpy_a, numpy_b)
self._test_hashable(numpy_a, numpy_b, False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityNan(self):
default = ops.Tensor._USE_EQUALITY
try:
def _v1_check(a, b):
self.assertEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, float('nan'))
self.assertIsNot(a, float('nan'))
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
def _v2_check(a, b):
self.assertNotEqual(a, a)
self.assertIs(a, a)
self.assertNotEqual(a, float('nan'))
self.assertIsNot(a, float('nan'))
self.assertNotEqual(a, b)
self.assertIsNot(a, b)
constant_a = constant_op.constant(float('nan'))
constant_b = constant_op.constant(float('nan'))
ops.disable_tensor_equality()
self._test_hashable(constant_a, constant_b, True)
_v1_check(constant_a, constant_b)
ops.enable_tensor_equality()
_v2_check(constant_a, constant_b)
self._test_hashable(constant_a, constant_b, False)
variable_a = variables.Variable(float('nan'))
variable_b = variables.Variable(float('nan'))
ops.disable_tensor_equality()
_v1_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, True)
ops.enable_tensor_equality()
_v2_check(variable_a, variable_b)
self._test_hashable(variable_a, variable_b, False)
numpy_a = np.array(float('nan'))
numpy_b = np.array(float('nan'))
_v2_check(numpy_a, numpy_b)
self._test_hashable(numpy_a, numpy_b, False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityCompare(self):
default = ops.Tensor._USE_EQUALITY
try:
tf_a = constant_op.constant([1, 2])
tf_b = constant_op.constant([1, 2])
tf_c = constant_op.constant([1, 1])
np_a = np.array([1, 2])
np_b = np.array([1, 2])
np_c = np.array([1, 1])
ops.disable_tensor_equality()
# We don't do element-wise comparison
self.assertNotEqual(tf_a, tf_b)
self.assertNotEqual(tf_a, tf_c)
# We can compare list of tensors
self.assertEqual([tf_a, tf_b], [tf_a, tf_b])
self.assertNotEqual([tf_a, tf_b], [tf_b, tf_b])
# We can compare existence in a list
self.assertIn(tf_a, [tf_a, tf_b])
self.assertIn(tf_a, [tf_b, tf_a])
self.assertNotIn(tf_a, [tf_b, tf_c])
ops.enable_tensor_equality()
# We do element-wise comparison but can't convert results array to bool
with self.assertRaises(ValueError):
bool(tf_a == tf_b)
self.assertAllEqual(tf_a == tf_b, [True, True])
with self.assertRaises(ValueError):
bool(tf_a == tf_c)
self.assertAllEqual(tf_a == tf_c, [True, False])
self.assertNotAllEqual(tf_a, tf_c)
with self.assertRaises(ValueError):
bool(np_a == np_b)
self.assertAllEqual(np_a == np_b, [True, True])
with self.assertRaises(ValueError):
bool(np_a == np_c)
self.assertAllEqual(np_a == np_c, [True, False])
self.assertNotAllEqual(np_a, np_c)
# Warning even though we technically shouldn't be able to compare here,
# since the id is the same both TF & numpy will handle lists with the same
# value without raising an error
self.assertEqual([tf_a, tf_b], [tf_a, tf_b])
with self.assertRaises(ValueError):
bool([tf_a, tf_b] == [tf_b, tf_b])
self.assertEqual([np_a, np_b], [np_a, np_b])
with self.assertRaises(ValueError):
bool([np_a, np_b] == [np_b, np_b])
# Similar to lists we shouldn't be able to do a `in` check such as
# `if a in [a,b]`. However if `a` is the first element, it works due to
# short circuiting
self.assertIn(tf_a, [tf_a, tf_b])
with self.assertRaises(ValueError):
bool(tf_a in [tf_b, tf_a])
with self.assertRaises(ValueError):
bool(tf_a in [tf_b, tf_c])
self.assertIn(np_a, [np_a, np_b])
with self.assertRaises(ValueError):
bool(np_a in [np_b, np_a])
with self.assertRaises(ValueError):
bool(np_a in [np_b, np_c])
# rank 0
self.assertAllEqual(
constant_op.constant(1) == constant_op.constant(1), True)
self.assertAllEqual(
constant_op.constant(1) == constant_op.constant(2), False)
self.assertAllEqual(np.array(1) == np.array(1), True)
self.assertAllEqual(np.array(1) == np.array(2), False)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testEqualityBroadcast(self):
default = ops.Tensor._USE_EQUALITY
try:
tf_a = constant_op.constant([1, 1])
tf_b = constant_op.constant([1, 1])
tf_c = constant_op.constant([[1, 1], [1, 1]])
tf_d = constant_op.constant([[1, 2], [1, 2]])
tf_e = constant_op.constant([1, 1, 1])
np_a = np.array([1, 1])
np_b = np.array([1, 1])
np_c = np.array([[1, 1], [1, 1]])
np_d = np.array([[1, 2], [1, 2]])
np_e = np.array([1, 1, 1])
ops.disable_tensor_equality()
# We don't do element-wise comparison
self.assertNotEqual(tf_a, tf_b)
self.assertNotEqual(tf_a, tf_c)
self.assertNotEqual(tf_a, tf_d)
ops.enable_tensor_equality()
# We do element-wise comparison but can't convert results array to bool
with self.assertRaises(ValueError):
bool(tf_a == tf_b)
self.assertAllEqual(tf_a == tf_b, [True, True])
with self.assertRaises(ValueError):
bool(tf_a == tf_c)
self.assertAllEqual(tf_a == tf_c, [[True, True], [True, True]])
with self.assertRaises(ValueError):
bool(tf_a == tf_d)
self.assertAllEqual(tf_a == tf_d, [[True, False], [True, False]])
self.assertFalse(bool(tf_a == tf_e))
self.assertTrue(bool(tf_a != tf_e))
self.assertNotAllEqual(tf_a, tf_e)
with self.assertRaises(ValueError):
bool(np_a == np_b)
self.assertAllEqual(np_a == np_b, [True, True])
with self.assertRaises(ValueError):
bool(np_a == np_c)
self.assertAllEqual(np_a == np_c, [[True, True], [True, True]])
self.assertAllEqual(np_a == np_d, [[True, False], [True, False]])
self.assertFalse(bool(np_a == np_e))
self.assertTrue(bool(np_a != np_e))
self.assertNotAllEqual(np_a, np_e)
finally:
if default:
ops.enable_tensor_equality()
else:
ops.disable_tensor_equality()
def testContext(self):
ctx = context.Context()
self.assertTrue(ctx.executing_eagerly())
self.assertEqual('', ctx.scope_name)
ctx.scope_name = 'foo'
self.assertEqual('foo', ctx.scope_name)
self.assertEqual(context.SYNC, ctx.execution_mode)
ctx.execution_mode = context.ASYNC
self.assertEqual(context.ASYNC, ctx.execution_mode)
ctx.execution_mode = context.SYNC
self.assertEqual(context.SYNC, ctx.execution_mode)
self.assertEqual('', ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device('GPU:0'):
self.assertEqual('/job:localhost/replica:0/task:0/device:GPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device(None):
self.assertEqual('', ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device('CPU:0'):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
with ctx.device(ctx.list_logical_devices('CPU')[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
gpus = ctx.list_logical_devices('GPU')
if gpus:
with ctx.device(gpus[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:GPU:0',
ctx.device_name)
self.assertEqual(ctx.device_name, ctx.device_spec.to_string())
has_cpu_device = False
for x in ctx.devices():
has_cpu_device = has_cpu_device or 'CPU' in x
self.assertTrue(has_cpu_device)
del ctx
def testDevice_supportsLogicalDevice(self):
ctx = context.Context()
cpus = ctx.list_logical_devices('CPU')
with ctx.device(cpus[0]):
self.assertEqual('/job:localhost/replica:0/task:0/device:CPU:0',
ctx.device_name)
def testDevice_supportsDeviceSpec(self):
ctx = context.Context()
device_name = '/job:localhost/replica:0/task:0/device:CPU:0'
device_spec = pydev.DeviceSpec.from_string(device_name)
with ctx.device(device_spec):
self.assertEqual(device_name, ctx.device_name)
def testAsyncBasic(self):
ctx = context.Context(execution_mode=context.ASYNC)
ctx.ensure_initialized()
has_cpu_device = False
for x in ctx.devices():
has_cpu_device = has_cpu_device or 'CPU' in x
self.assertTrue(has_cpu_device)
del ctx
def testMultiCpuPlacement(self):
with ops.device('cpu:1'):
x = array_ops.identity(1.0)
with ops.device('cpu:0'):
y = array_ops.identity(x)
self.assertEqual(x.device, '/job:localhost/replica:0/task:0/device:CPU:1')
self.assertEqual(y.device, '/job:localhost/replica:0/task:0/device:CPU:0')
@test_util.run_gpu_only
def testShouldCopy(self):
with ops.device('GPU:0'):
x = array_ops.identity(1.0)
self.assertEqual(x.device, '/job:localhost/replica:0/task:0/device:GPU:0')
y = array_ops.identity(x)
# The value we're testing y.device against will depend on what the behavior
# of not explicitly specifying a device in the context is. This behavior is
# subject to change (for example, in the future we may want to use GPUs, if
# available, when no device is explicitly provided)
self.assertEqual(y.device, current_device())
def testContextSwitchStackContainsEagerMode(self):
# Eager execution has been enabled, and no other context switch has
# occurred, so `context_switches` should contain exactly one entry.
self.assertEqual(len(context.context().context_switches.stack), 1)
switch = context.context().context_switches.stack[0]
# The entry should log that eager mode was entered.
self.assertIs(switch.enter_context_fn, context.eager_mode)
# It is not possible to build a graph function when eager execution
# is enabled; the stack entry should reflect this fact.
self.assertFalse(switch.is_building_function)
@test_util.run_gpu_only
def testInt32GPU(self):
with ops.device('gpu:0'):
xent = nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=[[0.0, 0.0]], labels=[0])
self.assertAllClose(xent, [0.69314718])
def _runInThread(self, target, args):
t = threading.Thread(target=target, args=args)
try:
t.start()
t.join()
except Exception as e:
raise e
# Test that different thread local values are initialized to the same values
# in different threads.
def testContextThreadLocalMembers(self):
def get_context_values(ctx):
return [
ctx.executing_eagerly(),
ctx.scope_name,
ctx.device_name,
ctx.num_gpus()
]
def get_values(ctx, values):
values.extend(get_context_values(ctx))
context_values = []
ctx = context.Context()
self._runInThread(get_values, (ctx, context_values))
self.assertAllEqual(context_values, get_context_values(ctx))
@test_util.run_gpu_only
def testContextConfig(self):
ctx = context.Context(config=config_pb2.ConfigProto(
device_count={'GPU': 0}))
self.assertEquals(0, ctx.num_gpus())
def testPickle(self):
tmp_dir = self.get_temp_dir()
fname = os.path.join(tmp_dir, 't.pickle')
with open(fname, 'wb') as f:
t = constant_op.constant(10.0)
pickle.dump(t, f)
with open(fname, 'rb') as f:
t = pickle.load(f)
self.assertAllEqual(t.numpy(), 10.0)
@test_util.run_gpu_only
def testDevicePlacementEnforcesConsistency(self):
cpu = context.device('cpu:0')
gpu = context.device('gpu:0')
cpu.__enter__()
self.assertEndsWith(current_device(), 'CPU:0')
gpu.__enter__()
self.assertEndsWith(current_device(), 'GPU:0')
with self.assertRaisesRegexp(
RuntimeError, 'Exiting device scope without proper scope nesting'):
cpu.__exit__()
self.assertEndsWith(current_device(), 'GPU:0')
gpu.__exit__()
self.assertEndsWith(current_device(), 'CPU:0')
cpu.__exit__()
@test_util.run_gpu_only
def testReEntrant(self):
cpu = context.device('cpu:0')
gpu = context.device('gpu:0')
with cpu:
with gpu:
with gpu:
self.assertEndsWith(current_device(), 'GPU:0')
self.assertEndsWith(current_device(), 'GPU:0')
self.assertEndsWith(current_device(), 'CPU:0')
with gpu:
self.assertEndsWith(current_device(), 'GPU:0')
@test_util.run_gpu_only
def testTensorPlacement(self):
x = constant_op.constant(1.).gpu()
with context.device('gpu:0'):
y = constant_op.constant(2.)
# Add would fail if t2 were not on GPU
result = execute(
b'Add', 1, inputs=[x, y],
attrs=('T', x.dtype.as_datatype_enum))[0].cpu().numpy()
self.assertEqual(3, result)
@test_util.run_gpu_only
def testResourceTensorPlacement(self):
with context.device('gpu:0'):
v = resource_variable_ops.ResourceVariable(1.0)
with context.device('cpu:0'):
# Check that even though we specified the cpu device we'll run the read op
# in the device where the handle is.
self.assertAllEqual(
gen_resource_variable_ops.read_variable_op(v.handle, v.dtype), 1.0)
@test_util.run_gpu_only
def testCopyBetweenDevices(self):
x = constant_op.constant([[1., 2.], [3., 4.]])
x = x.cpu()
x = x.gpu()
x = x.gpu()
x = x.cpu()
# Invalid device
with self.assertRaises(RuntimeError):
x.gpu(context.context().num_gpus() + 1)
@test_util.run_gpu_only
def testCopyBetweenDevicesAsync(self):
with context.execution_mode(context.ASYNC):
x = constant_op.constant([[1., 2.], [3., 4.]])
x = x.cpu()
x = x.gpu()
x = x.gpu()
x = x.cpu()
context.context().executor.wait()
# Invalid device
with self.assertRaises(RuntimeError):
x.gpu(context.context().num_gpus() + 1)
context.context().executor.wait()
context.context().executor.clear_error()
@test_util.run_gpu_only
def testCopyScope(self):
constant = constant_op.constant(1.0)
with ops.device('gpu:0'):
with context.device_policy(context.DEVICE_PLACEMENT_SILENT):
c = constant + 1.0
self.assertAllEqual(c, 2.0)
def testPyFunctionNullContext(self):
def simple_fn(unused_handle):
return 1.
with ops.device('CPU:0'):
test_var = variables.Variable([2., 3.])
@def_function.function
def test_fn(v):
script_ops.eager_py_func(simple_fn, [v.handle], dtypes.float32)
return 1.
self.assertAllEqual(test_fn(test_var), 1.0)
def testPyFunctionAsync(self):
def simple_fn(v):
one = constant_op.constant(1.)
return v + one
@def_function.function
def test_fn(v):
return script_ops.eager_py_func(simple_fn, [v], dtypes.float32)
async_executor = executor.new_executor(enable_async=True)
with context.executor_scope(async_executor):
test_var = variables.Variable(2.)
self.assertAllEqual(test_fn(test_var), 3.0)
async_executor.wait()
@test_util.run_gpu_only
def testNumpyForceCPU(self):
cpu = constant_op.constant([[1., 2.], [3., 4.]])
c2g = cpu.gpu()
self.assertAllEqual(c2g, cpu.numpy())
def testCopyFromCPUToCPU(self):
ta = constant_op.constant([[1, 2], [3, 4]])
tb = ta.cpu()
self.assertNotEqual(id(ta), id(tb))
self.assertAllEqual(ta, tb.numpy())
def testRegisterExceptionClass(self):
with self.assertRaises(TypeError):
pywrap_tfe.TFE_Py_RegisterExceptionClass(str)
pywrap_tfe.TFE_Py_RegisterExceptionClass(core._NotOkStatusException) # pylint: disable=protected-access
# TODO(agarwal): add tests passing incorrect typed values to attrs.
def testExecuteBasic(self):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = execute(
b'Mul',
num_outputs=1,
inputs=[three, five],
attrs=('T', three.dtype.as_datatype_enum))[0]
self.assertAllEqual(15, product)
def testExecuteBasicAsync(self):
with context.execution_mode(context.ASYNC):
three = constant_op.constant(3)
five = constant_op.constant(5)
product = execute(
b'Mul',
num_outputs=1,
inputs=[three, five],
attrs=('T', three.dtype.as_datatype_enum))[0]
self.assertAllEqual(15, product)
# Error: Invalid arguments
# TODO(b/149995282): When an exception is thrown in ASYNC mode, it seems
# there are things left over that cause mutex corruption when
# _reset_context() is called before the next test is executed.
#
# context.set_execution_mode(context.ASYNC)
# with self.assertRaises(errors.InvalidArgumentError):
# execute(
# b'MatMul',
# num_outputs=1,
# inputs=[three, five],
# attrs=('transpose_a', False, 'transpose_b', False, 'T',
# three.dtype.as_datatype_enum))
# context.context().executor.wait()
#
context.context().executor.clear_error()
context.context().execution_mode = context.SYNC
def testExecuteTooManyNumOutputs(self):
# num_outputs provided is 50, but only one output is produced.
product = execute(
b'Mul',
num_outputs=50,
inputs=[constant_op.constant(3),
constant_op.constant(5)],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
self.assertAllEqual(15, product)
def testExecuteTooFewNumOutputs(self):
# num_outputs provided is 0, but one output is produced.
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'Mul',
num_outputs=0,
inputs=[constant_op.constant(3),
constant_op.constant(5)],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
@test_util.run_gpu_only
def testMatMulGPU(self):
three = constant_op.constant([[3.]]).gpu()
five = constant_op.constant([[5.]]).gpu()
product = execute(
b'MatMul',
num_outputs=1,
inputs=[three, five],
attrs=('transpose_a', False, 'transpose_b', False, 'T',
three.dtype.as_datatype_enum))[0]
self.assertAllEqual([[15.0]], product)
def testExecuteStringAttr(self):
checked_three = execute(
b'CheckNumerics',
num_outputs=1,
inputs=[constant_op.constant(3.)],
attrs=('message', 'just checking', 'T',
dtypes.float32.as_datatype_enum))[0]
self.assertEqual([[3]], checked_three.numpy())
def testExecuteStringAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'CheckNumerics',
num_outputs=1,
inputs=[constant_op.constant(3.)],
attrs=('message', 1, 'T', dtypes.float32.as_datatype_enum))
def testExecuteFloatAttr(self):
almost_equal = execute(
b'ApproximateEqual',
num_outputs=1,
inputs=[constant_op.constant(3.0), constant_op.constant(2.9)],
attrs=('tolerance', 0.3, 'T', dtypes.float32.as_datatype_enum))[0]
self.assertTrue(almost_equal)
def testExecuteFloatAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'ApproximateEqual',
num_outputs=1,
inputs=[constant_op.constant(3.0), constant_op.constant(2.9)],
attrs=('tolerance', '0.3', 'T', dtypes.float32.as_datatype_enum))
def testExecuteIntAttr(self):
total = execute(
b'AddN',
num_outputs=1,
inputs=[constant_op.constant(3), constant_op.constant(4)],
attrs=('T', dtypes.int32.as_datatype_enum, 'N', 2))[0]
self.assertAllEqual(7, total)
def testExecuteIntAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
_ = execute(
b'AddN',
num_outputs=1,
inputs=[constant_op.constant(3), constant_op.constant(4)],
attrs=('T', dtypes.int32.as_datatype_enum, 'N', '2'))
# Looks like we don't have an existing op with list(bool) attrs.
def testExecuteBoolAttr(self):
product = execute(
b'MatMul',
num_outputs=1,
inputs=[constant_op.constant([[3]]),
constant_op.constant([[5]])],
attrs=('transpose_a', True, 'transpose_b', False, 'T',
dtypes.int32.as_datatype_enum))[0]
self.assertAllEqual([[15]], product)
def testExecuteShapeAttr(self):
execute(
b'VarHandleOp',
num_outputs=1,
inputs=[],
attrs=('shape', [1, 2], 'dtype', dtypes.int32.as_datatype_enum,
'container', '', 'shared_name', ''))
def testExecuteShapeAttrBadValue(self):
with self.assertRaisesRegex(
errors.InvalidArgumentError,
'Expecting a Dimension for attr shape, got object'):
execute(
b'VarHandleOp',
num_outputs=1,
inputs=[],
attrs=('shape', [object()], 'dtype', dtypes.int32.as_datatype_enum,
'container', '', 'shared_name', ''))
def testExecuteListStringAttr(self):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description',
'tensor_summary', 'labels', ['3',
'summary'], 'display_name', 'test'))
def testExecuteListStringAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description', '',
'labels', 3, 'display_name', 'test'))
def testExecuteListStringAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'TensorSummary',
num_outputs=1,
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum, 'description', '',
'labels', [3], 'display_name', 'test'))
def testExecuteListFloatAttr(self):
b = execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries', [4.0,
6.0]))[0]
self.assertAllEqual([0, 1, 2], b)
def testExecuteListFloatAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries', 4.0))
def testExecuteListFloatAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Bucketize',
num_outputs=1,
inputs=[constant_op.constant([3.0, 5.0, 7.0])],
attrs=('T', dtypes.float32.as_datatype_enum, 'boundaries',
['4.0', '6.0']))
def testExecuteListIntAttr(self):
b = execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims', [0, 2]))[0]
self.assertAllEqual([3], b)
def testExecuteListIntAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims', 0))
def testExecuteListIntAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Squeeze',
num_outputs=1,
inputs=[constant_op.constant([[[3.0]]])],
attrs=('T', dtypes.float32.as_datatype_enum, 'squeeze_dims',
['0', '2']))
def testExecuteListTypeListShapeAttr(self):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[[1, 2]], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListTypeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', dtypes.float64.as_datatype_enum, 'shapes',
[[1, 2]], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListTypeAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', '1', 'shapes', [[1, 2]], 'capacity', -1,
'container', '', 'shared_name', ''))
def testExecuteListShapeAttrBadValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[1, 2], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteListShapeAttrBadListValue(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Barrier',
num_outputs=1,
inputs=[],
attrs=('component_types', [dtypes.float64.as_datatype_enum], 'shapes',
[1], 'capacity', -1, 'container', '', 'shared_name', ''))
def testExecuteMultipleOutputs(self):
split_dim = 1
value = [[0, 1, 2], [3, 4, 5]]
x1, x2, x3 = execute(
b'Split',
num_outputs=3,
inputs=[constant_op.constant(split_dim),
constant_op.constant(value)],
attrs=('num_split', 3, 'T', dtypes.int32.as_datatype_enum))
self.assertAllEqual([[0], [3]], x1)
self.assertAllEqual([[1], [4]], x2)
self.assertAllEqual([[2], [5]], x3)
def testExecuteBadNumOutputsArgument(self):
with self.assertRaises(TypeError):
execute(
b'Relu', [],
inputs=[constant_op.constant(3.0)],
attrs=('T', dtypes.float32.as_datatype_enum))
def testExecuteUnknownOp(self):
with self.assertRaises(errors.NotFoundError):
execute(b'BlahBlahBlah', num_outputs=1, inputs=[], attrs=None)
def testExecuteUnknownAttr(self):
with self.assertRaises(errors.InvalidArgumentError):
execute(
b'Identity',
num_outputs=1,
inputs=[constant_op.constant(3)],
attrs=('T', dtypes.int32.as_datatype_enum, 'unknown_attr', 'blah'))
def testComposition(self):
def add(x, y):
return execute(
b'Add',
num_outputs=1,
inputs=[x, y],
attrs=('T', dtypes.int32.as_datatype_enum))[0]
x = constant_op.constant(1)
three_x = add(add(x, x), x)
self.assertEquals(dtypes.int32, three_x.dtype)
self.assertAllEqual(3, three_x)
@test_util.run_gpu_only
def testOperationWithNoInputsRunsOnDevice(self):
shape = constant_op.constant([], dtype=dtypes.int32)
# x: Run the "TruncatedNormal" op CPU and copy result to GPU.
x = truncated_normal(shape).gpu()
# y: Explicitly run the "TruncatedNormal" op on GPU.
with context.device('gpu:0'):
y = truncated_normal(shape)
# Add would fail if x and y were not on the same device.
execute(
b'Add', 1, inputs=[x, y], attrs=('T', x.dtype.as_datatype_enum))
def testInvalidDevice(self):
with self.assertRaises(ValueError):
with context.device('pu:0'):
_ = constant_op.constant(1)
def testConvertMixedEagerTensors(self):
array = np.zeros((), dtype=np.float32)
tensor = constant_op.constant(0., dtype=dtypes.float32)
types, tensors = execute_lib.convert_to_mixed_eager_tensors(
[array, tensor], context.context())
for typ, t in zip(types, tensors):
self.assertEquals(typ, dtypes.float32)
self.assertIsInstance(t, ops.EagerTensor)
def testConvertMixedEagerTensorsWithVariables(self):
var = resource_variable_ops.ResourceVariable(1.0)
types, tensors = execute_lib.convert_to_mixed_eager_tensors(
['foo', var], context.context())
self.assertAllEqual([dtypes.string, dtypes.float32], types)
for t in tensors:
self.assertIsInstance(t, ops.EagerTensor)
# TODO(b/123637108): re-enable
@test_util.run_gpu_only
def disabled_testSmallIntegerOpsForcedToCPU(self):
a = constant_op.constant((1, 2, 3, 4, 5), dtype=dtypes.int64)
b = constant_op.constant((2, 3, 4, 5, 6), dtype=dtypes.int64)
with context.device('gpu:0'):
c = a + b
# Op forced to CPU since all constants are integers and small.
self.assertEqual(c.device, '/job:localhost/replica:0/task:0/device:CPU:0')
a = array_ops.zeros((8, 10), dtype=dtypes.int64)
b = array_ops.ones((8, 10), dtype=dtypes.int64)
with context.device('gpu:0'):
c = a + b
# Op not forced to CPU since the tensors are larger than 64 elements.
self.assertEqual(c.device, '/job:localhost/replica:0/task:0/device:GPU:0')
a = constant_op.constant((1, 2, 3, 4, 5), dtype=dtypes.float32)
b = constant_op.constant((2, 3, 4, 5, 6), dtype=dtypes.float32)
with context.device('gpu:0'):
c = a + b
# Op not forced to CPU since the constants are not integers.
self.assertEqual(c.device, '/job:localhost/replica:0/task:0/device:GPU:0')
def testExecutionModeIsStoredThreadLocal(self):
cv = threading.Condition()
count = [0]
num_threads = 10
def execution_mode_test(cond, count, num_threads, ctx, mode):
cond.acquire()
# Ensure that all threads set their mode simultaneously
# Note that this is not a simple assignment, as the execution_mode is an
# @property with a custom setter.
ctx.execution_mode = mode
count[0] = count[0] + 1
if count[0] < num_threads:
cond.wait()
else:
cond.notify_all()
cond.release()
self.assertEqual(ctx.execution_mode, mode)
ctx = context.Context()
threads = []
for i in range(num_threads):
t = threading.Thread(
target=execution_mode_test,
args=(cv, count, num_threads, ctx,
context.SYNC if i % 2 == 0 else context.ASYNC))
t.start()
threads.append(t)
for t in threads:
t.join()
def testEmptyResourceReturned(self):
with ops.device('CPU:0'):
v = variables.Variable(1.)
empty_handle = array_ops.gather(
v.handle[array_ops.newaxis], array_ops.zeros([0], dtype=dtypes.int32))
self.assertEqual(
[0],
empty_handle.shape.as_list())
class SendRecvTest(test_util.TensorFlowTestCase):
cpu_device = '/job:localhost/replica:0/task:0/device:CPU:0'
def _send(self, tensor, tensor_name, to_device):
return execute(
b'_Send', num_outputs=0, inputs=[tensor],
attrs=('T', tensor.dtype.as_datatype_enum,
'tensor_name', tensor_name,
'send_device', tensor.device,
'send_device_incarnation', 0,
'recv_device', to_device,
'client_terminated', True))
def _recv(self, dtype, tensor_name, from_device):
device_name = context.context().device_name
if not device_name:
device_name = self.cpu_device
return execute(
b'_Recv', num_outputs=1, inputs=[],
attrs=('tensor_type', dtype.as_datatype_enum,
'tensor_name', tensor_name,
'send_device', from_device,
'send_device_incarnation', 0,
'recv_device', device_name,
'client_terminated', False))[0]
def setUp(self):
super(SendRecvTest, self).setUp()
context._reset_context()
configure_virtual_cpus()
def testBasic(self):
t0 = constant_op.constant(1.0)
t1 = constant_op.constant(2.0)
self._send(t0, 't0', self.cpu_device)
self._send(t1, 't1', self.cpu_device)
self.assertAllEqual(
self._recv(dtypes.float32, 't0', self.cpu_device),
1.0)
self.assertAllEqual(
self._recv(dtypes.float32, 't1', self.cpu_device),
2.0)
@test_util.run_gpu_only
def testLocalCrossDevice(self):
gpu_device_name = '/job:localhost/replica:0/task:0/device:GPU:0'
with ops.device('GPU:0'):
t0 = array_ops.identity(1.0)
self._send(t0, 't0', self.cpu_device)
with ops.device('cpu:0'):
self.assertAllEqual(
self._recv(dtypes.float32, 't0', gpu_device_name),
1.0)
self._send(constant_op.constant(2.0), 't1', gpu_device_name)
with ops.device('GPU:0'):
self.assertAllEqual(
self._recv(dtypes.float32, 't1', self.cpu_device),
2.0)
class EagerTensorCacheTest(test_util.TensorFlowTestCase):
def setUp(self):
super(EagerTensorCacheTest, self).setUp()
context._reset_context()
configure_virtual_cpus()
def testCacheSkipsTensorsTooLarge(self):
cache = context._EagerTensorCache(max_items=100, max_tensor_size=3)
cache.put('1', array_ops.zeros((2, 2)))
self.assertIsNone(cache.get('1'))
cache.put('2', array_ops.zeros((2)))
self.assertIsNotNone(cache.get('2'))
if __name__ == '__main__':
context.set_log_device_placement(True)
test.main()
|
{
"content_hash": "28d59e505d3661d4533f76a53ba176de",
"timestamp": "",
"source": "github",
"line_count": 1102,
"max_line_length": 108,
"avg_line_length": 34.39201451905626,
"alnum_prop": 0.6237994722955145,
"repo_name": "gunan/tensorflow",
"id": "47b3966827fd9e18eb9adebd1c2808adb7c6b981",
"size": "38589",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/eager/core_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5003"
},
{
"name": "Batchfile",
"bytes": "45924"
},
{
"name": "C",
"bytes": "774953"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "77908225"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "104215"
},
{
"name": "Go",
"bytes": "1841471"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "962443"
},
{
"name": "Jupyter Notebook",
"bytes": "556650"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1479029"
},
{
"name": "Makefile",
"bytes": "58603"
},
{
"name": "Objective-C",
"bytes": "104667"
},
{
"name": "Objective-C++",
"bytes": "297830"
},
{
"name": "PHP",
"bytes": "23994"
},
{
"name": "Pascal",
"bytes": "3739"
},
{
"name": "Pawn",
"bytes": "17039"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "39476740"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7459"
},
{
"name": "Shell",
"bytes": "650007"
},
{
"name": "Smarty",
"bytes": "34649"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
}
|
"""
This module is for Differential Phase Contrast (DPC) imaging based on
Fourier shift fitting
"""
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.optimize import minimize
from collections import namedtuple
import warnings
import logging
logger = logging.getLogger(__name__)
def image_reduction(im, roi=None, bad_pixels=None):
"""
Sum the image data over rows and columns.
Parameters
----------
im : ndarray
Input image.
roi : ndarray, optional
[r, c, row, col], selects ROI im[r : r + row, c : c + col]. Default is
None, which uses the whole image.
bad_pixels : list, optional
List of (row, column) tuples marking bad pixels.
[(1, 5), (2, 6)] --> 2 bad pixels --> (1, 5) and (2, 6). Default is
None.
Returns
-------
xline : ndarray
The row vector of the sums of each column.
yline : ndarray
The column vector of the sums of each row.
"""
if bad_pixels:
im = im.copy()
for row, column in bad_pixels:
im[row, column] = 0
if roi:
r, c, row, col = roi
im = im[r:(r + row), c:(c + col)]
xline = np.sum(im, axis=0)
yline = np.sum(im, axis=1)
return xline, yline
def _rss_factory(length):
"""
A factory function for returning a residue function for use in dpc fitting.
The main reason to do this is to generate a closure over beta so that
linspace is only called once.
Parameters
----------
length : int
The length of the data vector that the returned function can deal with.
Returns
-------
function
A function with signature f(v, xdata, ydata) which is suitable for use
as a cost function for use with scipy.optimize.
"""
beta = 1j * (np.linspace(-(length-1)//2, (length-1)//2, length))
def _rss(v, ref_reduction, diff_reduction):
"""
Internal function used by fit()
Cost function to be minimized in nonlinear fitting
Parameters
----------
v : list
Fit parameters.
v[0], amplitude of the sample transmission function at one scanning
point;
v[1], the phase gradient (along x or y direction) of the sample
transmission function.
ref_reduction : ndarray
Extra argument passed to the objective function. In DPC, it's the
sum of the reference image data along x or y direction.
diff_refuction : ndarray
Extra argument passed to the objective function. In DPC, it's the
sum of one captured diffraction pattern along x or y direction.
Returns
--------
float
Residue value.
"""
diff = diff_reduction - ref_reduction * v[0] * np.exp(v[1] * beta)
return np.sum((diff * np.conj(diff)).real)
return _rss
def dpc_fit(rss, ref_reduction, diff_reduction, start_point,
solver='Nelder-Mead', tol=1e-6, max_iters=2000):
"""
Nonlinear fitting for 2 points.
Parameters
----------
rss : callable
Objective function to be minimized in DPC fitting.
ref_reduction : ndarray
Extra argument passed to the objective function. In DPC, it's the sum
of the reference image data along x or y direction.
diff_reduction : ndarray
Extra argument passed to the objective function. In DPC, it's the sum
of one captured diffraction pattern along x or y direction.
start_point : list
start_point[0], start-searching value for the amplitude of the sample
transmission function at one scanning point.
start_point[1], start-searching value for the phase gradient (along x
or y direction) of the sample transmission function at one scanning
point.
solver : str, optional
Type of solver, one of the following (default 'Nelder-Mead'):
* 'Nelder-Mead'
* 'Powell'
* 'CG'
* 'BFGS'
* 'Anneal'
* 'L-BFGS-B'
* 'TNC'
* 'COBYLA'
* 'SLSQP'
tol : float, optional
Termination criteria of nonlinear fitting. Default is 1e-6.
max_iters : int, optional
Maximum iterations of nonlinear fitting. Default is 2000.
Returns
-------
tuple
Fitting result: intensity attenuation and phase gradient.
"""
return minimize(rss, start_point, args=(ref_reduction, diff_reduction),
method=solver, tol=tol, options=dict(maxiter=max_iters)).x
# attributes
dpc_fit.solver = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Anneal', 'L-BFGS-B',
'TNC', 'COBYLA', 'SLSQP']
def recon(gx, gy, scan_xstep, scan_ystep, padding=0, weighting=0.5):
"""Reconstruct the final phase image.
Parameters
----------
gx : ndarray
Phase gradient along x direction.
gy : ndarray
Phase gradient along y direction.
scan_xstep : float
Scanning step size in x direction (in micro-meter).
scan_ystep : float
Scanning step size in y direction (in micro-meter).
padding : int, optional
Pad a N-by-M array to be a
``(N*(2*padding+1))``-by-``(M*(2*padding+1))`` array with the
image in the middle with a (N*padding, M*padding) thick edge
of zeros. Default is 0.
padding = 0 --> v (the original image, size = (N, M))
0 0 0
padding = 1 --> 0 v 0 (the padded image, size = (3 * N, 3 * M))
0 0 0
weighting : float, optional
Weighting parameter for the phase gradient along x and y direction when
constructing the final phase image.
Valid in [0, 1]. Default value = 0.5, which means that gx and gy
equally contribute to the final phase image.
Returns
-------
phase : ndarray
Final phase image.
"""
if weighting < 0 or weighting > 1:
raise ValueError('weighting should be within the range of [0, 1]!')
pad = 2 * padding + 1
gx = np.asarray(gx)
rows, cols = gx.shape
pad_row = rows * pad
pad_col = cols * pad
gx_padding = np.zeros((pad_row, pad_col), dtype='d')
gy_padding = np.zeros((pad_row, pad_col), dtype='d')
roi_slice = (slice(padding * rows, (padding + 1) * rows),
slice(padding * cols, (padding + 1) * cols))
gx_padding[roi_slice] = gx
gy_padding[roi_slice] = gy
tx = np.fft.fftshift(np.fft.fft2(gx_padding))
ty = np.fft.fftshift(np.fft.fft2(gy_padding))
mid_col = pad_col // 2 + 1
mid_row = pad_row // 2 + 1
ax = (2 * np.pi * np.arange(1 - mid_col, pad_col - mid_col + 1) /
(pad_col * scan_xstep))
ay = (2 * np.pi * np.arange(1 - mid_row, pad_row - mid_row + 1) /
(pad_row * scan_ystep))
kappax, kappay = np.meshgrid(ax, ay)
div_v = kappax ** 2 * (1 - weighting) + kappay ** 2 * weighting
with warnings.catch_warnings():
# It appears that having nans in data arrays is normal mode of
# operation for this function. So let's disable warnings.
warnings.filterwarnings('ignore', category=RuntimeWarning)
c = -1j * (kappax * tx * (1 - weighting) + kappay * ty * weighting) / div_v
c = np.fft.ifftshift(np.where(div_v == 0, 0, c))
phase = np.fft.ifft2(c)[roi_slice].real
return phase
# holy hacks, Batman! 'index' here is a single element list so
# that I can keep track of how many images have been computed
dpc_internal_state = namedtuple('dpc_internal_state',
['ax', 'ay', 'gx', 'gy', 'ref_fx', 'ref_fy',
'index'])
def dpc_runner(ref, image_sequence, start_point, pixel_size, focus_to_det,
scan_rows, scan_cols, scan_xstep, scan_ystep, energy, padding=0,
weighting=0.5, solver='Nelder-Mead', roi=None, bad_pixels=None,
negate=True, scale=True):
"""Wraps `lazy_dpc`
See docstring for `lazy_dpc` and `reconstruct_phase_from_partial_info`
for the input parameters for this function and what it returns
"""
if len(pixel_size) == 2:
# make sure the pixels are the same size
if pixel_size[0] != pixel_size[1]:
raise ValueError("In DPC, pixels must be square. You provided"
"pixel values of {}".format(pixel_size))
dpc_gen = lazy_dpc(ref, image_sequence, start_point, scan_rows, scan_cols,
solver, roi, bad_pixels)
# exhaust the generator, keeping only the last result
for dpc_state in dpc_gen:
pass
# compute the final results
phase, amplitude = reconstruct_phase_from_partial_info(
dpc_state, energy, scan_xstep, scan_ystep, pixel_size[0],
focus_to_det, negate, scale, padding, weighting)
return phase, amplitude
def lazy_dpc(ref, image_sequence, start_point, scan_rows, scan_cols,
solver='Nelder-Mead', roi=None, bad_pixels=None, dpc_state=None):
"""
Controller function to run the whole Differential Phase Contrast (DPC)
imaging calculation.
Parameters
----------
ref : ndarray
The reference image for a DPC calculation.
image_sequence : iterable of 2D arrays
Return diffraction patterns (2D Numpy arrays) when iterated over.
start_point : list
start_point[0], start-searching value for the amplitude of the sample
transmission function at one scanning point.
start_point[1], start-searching value for the phase gradient (along x
or y direction) of the sample transmission function at one scanning
point.
scan_rows : int
Number of scanned rows.
scan_cols : int
Number of scanned columns.
solver : str, optional
Type of solver, one of the following (default 'Nelder-Mead'):
* 'Nelder-Mead'
* 'Powell'
* 'CG'
* 'BFGS'
* 'Anneal'
* 'L-BFGS-B'
* 'TNC'
* 'COBYLA'
* 'SLSQP'
roi : ndarray, optional
[r, c, row, col], selects ROI im[r : r + row, c : c + col]. Default is
None.
bad_pixels : list, optional
List of (row, column) tuples marking bad pixels.
[(1, 5), (2, 6)] --> 2 bad pixels --> (1, 5) and (2, 6). Default is
None.
Yields
------
dpc_state : namedtuple
The internal state that `dpc_runner` requires for each iteration.
Can be passed to reconstruct_phase_from_partial_info which, along
with some additional info, will produce the final phase image
References: text [1]_
.. [1] Yan, H. et al. Quantitative x-ray phase imaging at the nanoscale by
multilayer Laue lenses. Sci. Rep. 3, 1307; DOI:10.1038/srep01307 (2013).
"""
def initialize_state(scan_rows, scan_cols, ref, roi, bad_pixels):
# Initialize ax, ay, gx, and gy
ax = np.zeros((scan_rows, scan_cols), dtype='d')
ay = np.zeros((scan_rows, scan_cols), dtype='d')
gx = np.zeros((scan_rows, scan_cols), dtype='d')
gy = np.zeros((scan_rows, scan_cols), dtype='d')
# Dimension reduction along x and y direction
refx, refy = image_reduction(ref, roi, bad_pixels)
ref_fx = np.fft.fftshift(np.fft.ifft(refx))
ref_fy = np.fft.fftshift(np.fft.ifft(refy))
return dpc_internal_state(ax, ay, gx, gy, ref_fx, ref_fy, [0])
if dpc_state is None:
dpc_state = initialize_state(scan_rows, scan_cols, ref, roi,
bad_pixels)
# 1-D IFFT
ffx = _rss_factory(len(dpc_state.ref_fx))
ffy = _rss_factory(len(dpc_state.ref_fy))
# Same calculation on each diffraction pattern
for im in image_sequence:
i, j = np.unravel_index(dpc_state.index[0], (scan_rows, scan_cols))
# Dimension reduction along x and y direction
imx, imy = image_reduction(im, roi, bad_pixels)
# 1-D IFFT
fx = np.fft.fftshift(np.fft.ifft(imx))
fy = np.fft.fftshift(np.fft.ifft(imy))
# Nonlinear fitting
_ax, _gx = dpc_fit(ffx, dpc_state.ref_fx, fx, start_point, solver)
_ay, _gy = dpc_fit(ffy, dpc_state.ref_fy, fy, start_point, solver)
# Store one-point intermediate results
dpc_state.gx[i, j] = _gx
dpc_state.gy[i, j] = _gy
dpc_state.ax[i, j] = _ax
dpc_state.ay[i, j] = _ay
dpc_state.index[0] += 1
yield dpc_state
def reconstruct_phase_from_partial_info(dpc_state, energy, scan_xstep,
scan_ystep, pixel_size=None,
focus_to_det=None, negate=True,
scale=True, padding=0, weighting=0.5):
"""Using the partial results from dpc_runner, reconstruct the phase image
Parameters
----------
dpc_state : namedtuple
The thing yielded from `dpc_runner`
energy : float
Energy of the scanning x-ray in keV.
focus_to_det : float
Focus to detector distance in um.
scan_xstep : float
Scanning step size in x direction (in micro-meter).
scan_ystep : float
Scanning step size in y direction (in micro-meter).
pixel_size : Number, optional
The size of the detector pixels. Pixels must be square. If
`pixel_size and `focus_to_det` are provided, it is assumed that you
want to scale the image.
focus_to_det : Number, optional
The distance from the focal point of the beam to the detector.
Must be provided as a pair with `pixel_size`.
negate : bool, optional
If True (default), negate the phase gradient along x direction before
reconstructing the final phase image. Default is True.
scale : bool, optional
If True, scale gx and gy according to the experiment set up.
If False, ignore pixel_size, focus_to_det, energy. Default is True.
padding : int, optional
Pad a N-by-M array to be a
``(N*(2*padding+1))``-by-``(M*(2*padding+1))`` array with the image in
the middle with a (N*padding, M*padding) thick edge of
zeros. Default is 0.
padding = 0 --> v (the original image, size = (N, M))
0 0 0
padding = 1 --> 0 v 0 (the padded image, size = (3 * N, 3 * M))
0 0 0
weighting : float, optional
Weighting parameter for the phase gradient along x and y direction when
constructing the final phase image.
Valid in [0, 1]. Default value = 0.5, which means that gx and gy
equally contribute to the final phase image.
Returns
-------
phase : ndarray
The final reconstructed phase image.
amplitude : ndarray
Amplitude of the sample transmission function.
"""
if weighting < 0 or weighting > 1:
raise ValueError('weighting should be within the range of [0, 1]!')
gx = None
gy = dpc_state.gy
if pixel_size and focus_to_det:
# Convert to wavelength
lambda_ = 12.4e-4 / energy
# pre-compute the scaling factor
scale = pixel_size / (lambda_ * focus_to_det)
gx = dpc_state.gx * len(dpc_state.ref_fx) * scale
gy = dpc_state.gy * len(dpc_state.ref_fy) * scale
if negate:
if gx is not None:
gx *= -1
else:
gx = dpc_state.gx * -1
# Reconstruct the final phase image
phase = recon(gx, gy, scan_xstep, scan_ystep, padding, weighting)
return phase, (dpc_state.ax + dpc_state.ay) / 2
# attributes
dpc_runner.solver = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Anneal',
'L-BFGS-B', 'TNC', 'COBYLA', 'SLSQP']
|
{
"content_hash": "aecae0ac96349e3fa5d38f7faad6493d",
"timestamp": "",
"source": "github",
"line_count": 478,
"max_line_length": 83,
"avg_line_length": 33.09414225941423,
"alnum_prop": 0.5908085213983185,
"repo_name": "Nikea/scikit-xray",
"id": "4d603a33f2c774f79eba310fad4a9da5c37ed87f",
"size": "18301",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "skbeam/core/dpc.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "28"
},
{
"name": "C",
"bytes": "18904"
},
{
"name": "Python",
"bytes": "653066"
},
{
"name": "Shell",
"bytes": "38"
}
],
"symlink_target": ""
}
|
"""
This module implements an interface to enumlib, Gus Hart"s excellent Fortran
code for enumerating derivative structures.
This module depends on a compiled enumlib with the executables enum.x and
makestr.x available in the path. Please download the library at
http://enum.sourceforge.net/ and follow the instructions in the README to
compile these two executables accordingly.
If you use this module, please cite the following:
Gus L. W. Hart and Rodney W. Forcade, "Algorithm for generating derivative
structures," Phys. Rev. B 77 224115 (26 June 2008)
Gus L. W. Hart and Rodney W. Forcade, "Generating derivative structures from
multilattices: Application to hcp alloys," Phys. Rev. B 80 014120 (July 2009)
Gus L. W. Hart, Lance J. Nelson, and Rodney W. Forcade, "Generating
derivative structures at a fixed concentration," Comp. Mat. Sci. 59
101-107 (March 2012)
"""
import re
import math
import subprocess
import itertools
import logging
import glob
import numpy as np
from monty.fractions import lcm
import fractions
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.core.sites import PeriodicSite
from pymatgen.core.structure import Structure
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.core.periodic_table import DummySpecie
from monty.os.path import which
from monty.dev import requires
from monty.tempfile import ScratchDir
from threading import Timer
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Jul 16, 2012"
logger = logging.getLogger(__name__)
# Favor the use of the newer "enum.x" by Gus Hart instead of the older
# "multienum.x"
enum_cmd = which('enum.x') or which('multienum.x')
# prefer makestr.x at present
makestr_cmd = which('makestr.x') or which('makeStr.x') or which('makeStr.py')
@requires(enum_cmd and makestr_cmd,
"EnumlibAdaptor requires the executables 'enum.x' or 'multienum.x' "
"and 'makestr.x' or 'makeStr.py' to be in the path. Please download the "
"library at http://enum.sourceforge.net/ and follow the instructions in "
"the README to compile these two executables accordingly.")
class EnumlibAdaptor:
"""
An adaptor for enumlib.
.. attribute:: structures
List of all enumerated structures.
"""
amount_tol = 1e-5
def __init__(self, structure, min_cell_size=1, max_cell_size=1,
symm_prec=0.1, enum_precision_parameter=0.001,
refine_structure=False, check_ordered_symmetry=True,
timeout=None):
"""
Initializes the adapter with a structure and some parameters.
Args:
structure: An input structure.
min_cell_size (int): The minimum cell size wanted. Defaults to 1.
max_cell_size (int): The maximum cell size wanted. Defaults to 1.
symm_prec (float): Symmetry precision. Defaults to 0.1.
enum_precision_parameter (float): Finite precision parameter for
enumlib. Default of 0.001 is usually ok, but you might need to
tweak it for certain cells.
refine_structure (bool): If you are starting from a structure that
has been relaxed via some electronic structure code,
it is usually much better to start with symmetry determination
and then obtain a refined structure. The refined structure have
cell parameters and atomic positions shifted to the expected
symmetry positions, which makes it much less sensitive precision
issues in enumlib. If you are already starting from an
experimental cif, refinement should have already been done and
it is not necessary. Defaults to False.
check_ordered_symmetry (bool): Whether to check the symmetry of
the ordered sites. If the symmetry of the ordered sites is
lower, the lowest symmetry ordered sites is included in the
enumeration. This is important if the ordered sites break
symmetry in a way that is important getting possible
structures. But sometimes including ordered sites
slows down enumeration to the point that it cannot be
completed. Switch to False in those cases. Defaults to True.
timeout (float): If specified, will kill enumlib after specified
time in minutes. This can be useful for gracefully handling
enumerations in a high-throughput context, for some enumerations
which will not terminate in a realistic length of time.
"""
if refine_structure:
finder = SpacegroupAnalyzer(structure, symm_prec)
self.structure = finder.get_refined_structure()
else:
self.structure = structure
self.min_cell_size = min_cell_size
self.max_cell_size = max_cell_size
self.symm_prec = symm_prec
self.enum_precision_parameter = enum_precision_parameter
self.check_ordered_symmetry = check_ordered_symmetry
self.structures = None
self.timeout = timeout
def run(self):
"""
Run the enumeration.
"""
# Create a temporary directory for working.
with ScratchDir(".") as d:
logger.debug("Temp dir : {}".format(d))
# Generate input files
self._gen_input_file()
# Perform the actual enumeration
num_structs = self._run_multienum()
# Read in the enumeration output as structures.
if num_structs > 0:
self.structures = self._get_structures(num_structs)
else:
raise EnumError("Unable to enumerate structure.")
def _gen_input_file(self):
"""
Generate the necessary struct_enum.in file for enumlib. See enumlib
documentation for details.
"""
coord_format = "{:.6f} {:.6f} {:.6f}"
# Using symmetry finder, get the symmetrically distinct sites.
fitter = SpacegroupAnalyzer(self.structure, self.symm_prec)
symmetrized_structure = fitter.get_symmetrized_structure()
logger.debug("Spacegroup {} ({}) with {} distinct sites".format(
fitter.get_space_group_symbol(),
fitter.get_space_group_number(),
len(symmetrized_structure.equivalent_sites))
)
"""
Enumlib doesn"t work when the number of species get too large. To
simplify matters, we generate the input file only with disordered sites
and exclude the ordered sites from the enumeration. The fact that
different disordered sites with the exact same species may belong to
different equivalent sites is dealt with by having determined the
spacegroup earlier and labelling the species differently.
"""
# index_species and index_amounts store mappings between the indices
# used in the enum input file, and the actual species and amounts.
index_species = []
index_amounts = []
# Stores the ordered sites, which are not enumerated.
ordered_sites = []
disordered_sites = []
coord_str = []
for sites in symmetrized_structure.equivalent_sites:
if sites[0].is_ordered:
ordered_sites.append(sites)
else:
sp_label = []
species = {k: v for k, v in sites[0].species.items()}
if sum(species.values()) < 1 - EnumlibAdaptor.amount_tol:
# Let us first make add a dummy element for every single
# site whose total occupancies don't sum to 1.
species[DummySpecie("X")] = 1 - sum(species.values())
for sp in species.keys():
if sp not in index_species:
index_species.append(sp)
sp_label.append(len(index_species) - 1)
index_amounts.append(species[sp] * len(sites))
else:
ind = index_species.index(sp)
sp_label.append(ind)
index_amounts[ind] += species[sp] * len(sites)
sp_label = "/".join(["{}".format(i) for i in sorted(sp_label)])
for site in sites:
coord_str.append("{} {}".format(
coord_format.format(*site.coords),
sp_label))
disordered_sites.append(sites)
def get_sg_info(ss):
finder = SpacegroupAnalyzer(Structure.from_sites(ss),
self.symm_prec)
return finder.get_space_group_number()
target_sgnum = get_sg_info(symmetrized_structure.sites)
curr_sites = list(itertools.chain.from_iterable(disordered_sites))
sgnum = get_sg_info(curr_sites)
ordered_sites = sorted(ordered_sites, key=lambda sites: len(sites))
logger.debug("Disordered sites has sg # %d" % (sgnum))
self.ordered_sites = []
# progressively add ordered sites to our disordered sites
# until we match the symmetry of our input structure
if self.check_ordered_symmetry:
while sgnum != target_sgnum and len(ordered_sites) > 0:
sites = ordered_sites.pop(0)
temp_sites = list(curr_sites) + sites
new_sgnum = get_sg_info(temp_sites)
if sgnum != new_sgnum:
logger.debug("Adding %s in enum. New sg # %d"
% (sites[0].specie, new_sgnum))
index_species.append(sites[0].specie)
index_amounts.append(len(sites))
sp_label = len(index_species) - 1
for site in sites:
coord_str.append("{} {}".format(
coord_format.format(*site.coords),
sp_label))
disordered_sites.append(sites)
curr_sites = temp_sites
sgnum = new_sgnum
else:
self.ordered_sites.extend(sites)
for sites in ordered_sites:
self.ordered_sites.extend(sites)
self.index_species = index_species
lattice = self.structure.lattice
output = [self.structure.formula, "bulk"]
for vec in lattice.matrix:
output.append(coord_format.format(*vec))
output.append("%d" % len(index_species))
output.append("%d" % len(coord_str))
output.extend(coord_str)
output.append("{} {}".format(self.min_cell_size, self.max_cell_size))
output.append(str(self.enum_precision_parameter))
output.append("full")
ndisordered = sum([len(s) for s in disordered_sites])
base = int(ndisordered*lcm(*[f.limit_denominator(ndisordered * self.max_cell_size).denominator
for f in map(fractions.Fraction, index_amounts)]))
# This multiplicative factor of 10 is to prevent having too small bases
# which can lead to rounding issues in the next step.
# An old bug was that a base was set to 8, with a conc of 0.4:0.6. That
# resulted in a range that overlaps and a conc of 0.5 satisfying this
# enumeration. See Cu7Te5.cif test file.
base *= 10
# base = ndisordered #10 ** int(math.ceil(math.log10(ndisordered)))
# To get a reasonable number of structures, we fix concentrations to the
# range expected in the original structure.
total_amounts = sum(index_amounts)
for amt in index_amounts:
conc = amt / total_amounts
if abs(conc * base - round(conc * base)) < 1e-5:
output.append("{} {} {}".format(int(round(conc * base)),
int(round(conc * base)),
base))
else:
min_conc = int(math.floor(conc * base))
output.append("{} {} {}".format(min_conc - 1, min_conc + 1,
base))
output.append("")
logger.debug("Generated input file:\n{}".format("\n".join(output)))
with open("struct_enum.in", "w") as f:
f.write("\n".join(output))
def _run_multienum(self):
p = subprocess.Popen([enum_cmd],
stdout=subprocess.PIPE,
stdin=subprocess.PIPE, close_fds=True)
if self.timeout:
timed_out = False
timer = Timer(self.timeout*60, lambda p: p.kill(), [p])
try:
timer.start()
output = p.communicate()[0].decode("utf-8")
finally:
if not timer.is_alive():
timed_out = True
timer.cancel()
if timed_out:
raise TimeoutError('Enumeration took too long.')
else:
output = p.communicate()[0].decode("utf-8")
count = 0
start_count = False
for line in output.strip().split("\n"):
if line.strip().endswith("RunTot"):
start_count = True
elif start_count and re.match(r"\d+\s+.*", line.strip()):
count = int(line.split()[-1])
logger.debug("Enumeration resulted in {} structures".format(count))
return count
def _get_structures(self, num_structs):
structs = []
if ".py" in makestr_cmd:
options = ["-input", "struct_enum.out", str(1), str(num_structs)]
else:
options = ["struct_enum.out", str(0), str(num_structs - 1)]
rs = subprocess.Popen([makestr_cmd] + options,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE, close_fds=True)
stdout, stderr = rs.communicate()
if stderr:
logger.warning(stderr.decode())
# sites retrieved from enumlib will lack site properties
# to ensure consistency, we keep track of what site properties
# are missing and set them to None
# TODO: improve this by mapping ordered structure to original
# disorded structure, and retrieving correct site properties
disordered_site_properties = {}
if len(self.ordered_sites) > 0:
original_latt = self.ordered_sites[0].lattice
# Need to strip sites of site_properties, which would otherwise
# result in an index error. Hence Structure is reconstructed in
# the next step.
site_properties = {}
for site in self.ordered_sites:
for k, v in site.properties.items():
disordered_site_properties[k] = None
if k in site_properties:
site_properties[k].append(v)
else:
site_properties[k] = [v]
ordered_structure = Structure(
original_latt,
[site.species for site in self.ordered_sites],
[site.frac_coords for site in self.ordered_sites],
site_properties=site_properties
)
inv_org_latt = np.linalg.inv(original_latt.matrix)
for file in glob.glob('vasp.*'):
with open(file) as f:
data = f.read()
data = re.sub(r'scale factor', "1", data)
data = re.sub(r'(\d+)-(\d+)', r'\1 -\2', data)
poscar = Poscar.from_string(data, self.index_species)
sub_structure = poscar.structure
# Enumeration may have resulted in a super lattice. We need to
# find the mapping from the new lattice to the old lattice, and
# perform supercell construction if necessary.
new_latt = sub_structure.lattice
sites = []
if len(self.ordered_sites) > 0:
transformation = np.dot(new_latt.matrix, inv_org_latt)
transformation = [[int(round(cell)) for cell in row]
for row in transformation]
logger.debug("Supercell matrix: {}".format(transformation))
s = ordered_structure * transformation
sites.extend([site.to_unit_cell() for site in s])
super_latt = sites[-1].lattice
else:
super_latt = new_latt
for site in sub_structure:
if site.specie.symbol != "X": # We exclude vacancies.
sites.append(
PeriodicSite(site.species,
site.frac_coords,
super_latt,
to_unit_cell=True,
properties=disordered_site_properties)
)
else:
logger.debug("Skipping sites that include species X.")
structs.append(Structure.from_sites(sorted(sites)))
logger.debug("Read in a total of {} structures.".format(num_structs))
return structs
class EnumError(BaseException):
"""
Error subclass for enumeration errors.
"""
pass
|
{
"content_hash": "784a072dca036efe74dede0b8f88e752",
"timestamp": "",
"source": "github",
"line_count": 413,
"max_line_length": 102,
"avg_line_length": 42.83050847457627,
"alnum_prop": 0.5709197806546441,
"repo_name": "mbkumar/pymatgen",
"id": "f95fb1d70aceed314c54026ff450df29215eb42d",
"size": "17799",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymatgen/command_line/enumlib_caller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5100"
},
{
"name": "CSS",
"bytes": "7550"
},
{
"name": "Common Lisp",
"bytes": "3029065"
},
{
"name": "HTML",
"bytes": "827"
},
{
"name": "Makefile",
"bytes": "5573"
},
{
"name": "Perl",
"bytes": "229104"
},
{
"name": "Propeller Spin",
"bytes": "4026362"
},
{
"name": "Python",
"bytes": "6933839"
},
{
"name": "Roff",
"bytes": "1135003"
}
],
"symlink_target": ""
}
|
import unittest
import os
import uuid
from studio.gcloud_worker import GCloudWorkerManager
from studio.ec2cloud_worker import EC2WorkerManager
from local_worker_test import stubtest_worker
from timeout_decorator import timeout
from studio.extra_util import has_aws_credentials
from env_detect import on_gcp, on_aws
#900
CLOUD_TEST_TIMEOUT = 60
@unittest.skipIf(
not on_gcp(),
'User indicated not on gcp')
class UserIndicatedOnGCPTest(unittest.TestCase):
def test_on_enviornment(self):
self.assertTrue('GOOGLE_APPLICATION_CREDENTIALS' in os.environ.keys())
@unittest.skipIf(
(not on_gcp()) or
'GOOGLE_APPLICATION_CREDENTIALS' not in os.environ.keys(),
'Skipping due to userinput or GCP Not detected')
class GCloudWorkerTest(unittest.TestCase):
_multiprocess_shared_ = True
def get_worker_manager(self):
project = 'studio-ed756'
return GCloudWorkerManager(project)
@timeout(CLOUD_TEST_TIMEOUT, use_signals=False)
def test_worker(self):
experiment_name = 'test_gcloud_worker_' + str(uuid.uuid4())
with stubtest_worker(
self,
experiment_name=experiment_name,
runner_args=['--cloud=gcloud', '--force-git',
'--cloud-timeout=120'],
config_name='test_config_http_client.yaml',
test_script='tf_hello_world.py',
script_args=['arg0'],
expected_output='[ 2.0 6.0 ]',
):
pass
@timeout(CLOUD_TEST_TIMEOUT, use_signals=False)
def test_worker_spot(self):
experiment_name = 'test_gcloud_spot_worker_' + str(uuid.uuid4())
with stubtest_worker(
self,
experiment_name=experiment_name,
runner_args=['--cloud=gcspot', '--force-git',
'--cloud-timeout=120'],
config_name='test_config_http_client.yaml',
test_script='tf_hello_world.py',
script_args=['arg0'],
expected_output='[ 2.0 6.0 ]',
):
pass
@timeout(CLOUD_TEST_TIMEOUT, use_signals=False)
def test_worker_spot_container(self):
experiment_name = 'test_gcloud_spot_simg_' + str(uuid.uuid4())
with stubtest_worker(
self,
experiment_name=experiment_name,
runner_args=['--cloud=gcspot',
'--force-git',
'--cloud-timeout=120',
'--container=shub://vsoch/hello-world'],
config_name='test_config_http_client.yaml',
test_script='',
script_args=[],
expected_output='RaawwWWWWWRRRR!!',
test_workspace=False
):
pass
@unittest.skipIf(
not on_aws(),
'User indicated not on aws')
class UserIndicatedOnAWSTest(unittest.TestCase):
def test_on_enviornment(self):
self.assertTrue(has_aws_credentials())
@unittest.skipIf(
(not on_aws()) or not has_aws_credentials(),
'Skipping due to userinput or AWS Not detected')
class EC2WorkerTest(unittest.TestCase):
_multiprocess_shared_ = True
def get_worker_manager(self):
return EC2WorkerManager()
@timeout(CLOUD_TEST_TIMEOUT, use_signals=False)
def test_worker(self):
experiment_name = 'test_ec2_worker_' + str(uuid.uuid4())
with stubtest_worker(
self,
experiment_name=experiment_name,
runner_args=['--cloud=ec2', '--force-git', '--gpus=1',
'--cloud-timeout=120', '--ssh-keypair=peterz-k1'],
config_name='test_config_http_client.yaml',
test_script='tf_hello_world.py',
script_args=['arg0'],
expected_output='[ 2.0 6.0 ]',
):
pass
@timeout(CLOUD_TEST_TIMEOUT, use_signals=False)
def test_worker_spot(self):
experiment_name = 'test_ec2_worker_' + str(uuid.uuid4())
stubtest_worker(
self,
experiment_name=experiment_name,
runner_args=[
'--cloud=ec2spot',
'--force-git',
'--bid=50%',
'--cloud-timeout=120',
],
config_name='test_config_http_client.yaml',
test_script='tf_hello_world.py',
script_args=['arg0'],
expected_output='[ 2.0 6.0 ]',
)
def test_get_ondemand_prices(self):
wm = self.get_worker_manager()
prices = wm._get_ondemand_prices(['c4.large', 'p2.xlarge'])
expected_prices = {'c4.large': 0.1, 'p2.xlarge': 0.9}
self.assertEquals(prices, expected_prices)
@timeout(CLOUD_TEST_TIMEOUT, use_signals=False)
def test_worker_spot_container(self):
experiment_name = 'test_gcloud_spot_simg_' + str(uuid.uuid4())
with stubtest_worker(
self,
experiment_name=experiment_name,
runner_args=['--cloud=ec2spot',
'--force-git',
'--cloud-timeout=120',
'--container=shub://vsoch/hello-world'],
config_name='test_config_http_client.yaml',
test_script='',
script_args=[],
expected_output='RaawwWWWWWRRRR!!',
test_workspace=False
):
pass
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "03543aae3b6ce0744e2cff82dbd9683a",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 78,
"avg_line_length": 32.72560975609756,
"alnum_prop": 0.5667970933482392,
"repo_name": "studioml/studio",
"id": "73ec549199d67ec4644523a2ceb490dbc004cb90",
"size": "5367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/cloud_worker_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "484"
},
{
"name": "HTML",
"bytes": "27833"
},
{
"name": "Python",
"bytes": "435537"
},
{
"name": "Shell",
"bytes": "19536"
}
],
"symlink_target": ""
}
|
import os
import sys
import subprocess
import re
import time
from collections import namedtuple
from conans import tools
from conans.model.version import Version
from conans.model.ref import ConanFileReference
from cpt import __version__ as package_tools_version, get_client_version
from cpt.config import ConfigManager, GlobalConf
from cpt.printer import Printer
from cpt.profiles import load_profile, patch_default_base_profile
from conans.client.conan_api import ProfileData
class CreateRunner(object):
def __init__(self, profile_abs_path, reference, conan_api, uploader,
exclude_vcvars_precommand=False, build_policy=None, require_overrides=None, runner=None,
cwd=None, printer=None, upload=False, upload_only_recipe=None,
test_folder=None, config_url=None, config_args=None,
upload_dependencies=None, conanfile=None, skip_recipe_export=False,
update_dependencies=False, lockfile=None, profile_build_abs_path=None, global_conf=None):
self.printer = printer or Printer()
self._cwd = cwd or os.getcwd()
self._uploader = uploader
self._upload = upload
self._conan_api = conan_api
self._profile_abs_path = profile_abs_path
self._reference = reference
self._exclude_vcvars_precommand = exclude_vcvars_precommand
self._build_policy = build_policy.split(",") if \
isinstance(build_policy, str) else \
build_policy
self._require_overrides = require_overrides.split(",") if \
isinstance(require_overrides, str) else \
require_overrides
self._runner = PrintRunner(runner or os.system, self.printer)
self._test_folder = test_folder
self._config_url = config_url
self._config_args = config_args
self._upload_only_recipe = upload_only_recipe
self._conanfile = conanfile
self._lockfile = lockfile
self._upload_dependencies = upload_dependencies.split(",") if \
isinstance(upload_dependencies, str) else \
upload_dependencies
self._upload_dependencies = self._upload_dependencies or []
self.skip_recipe_export = skip_recipe_export
self._update_dependencies = update_dependencies
self._results = None
self._profile_build_abs_path = profile_build_abs_path
self._global_conf = global_conf
patch_default_base_profile(conan_api, profile_abs_path)
client_version = get_client_version()
if client_version < Version("1.12.0"):
cache = self._conan_api._client_cache
elif client_version < Version("1.18.0"):
cache = self._conan_api._cache
else:
if not conan_api.app:
conan_api.create_app()
cache = conan_api.app.cache
self._profile = load_profile(profile_abs_path, cache)
if isinstance(self._test_folder, str) and self._test_folder.lower() == "false":
self._test_folder = False
@property
def settings(self):
return self._profile.settings
@property
def results(self):
return self._results
def run(self):
client_version = get_client_version()
if self._config_url:
ConfigManager(self._conan_api, self.printer).install(url=self._config_url, args=self._config_args)
if self._global_conf:
global_conf = GlobalConf(self._conan_api, self.printer)
global_conf.populate(self._global_conf)
context = tools.no_op()
compiler = self.settings.get("compiler", None)
if not self._exclude_vcvars_precommand:
if compiler == "Visual Studio" and "compiler.version" in self.settings:
compiler_set = namedtuple("compiler", "version")(self.settings["compiler.version"])
mock_sets = namedtuple("mock_settings",
"arch compiler get_safe")(self.settings["arch"], compiler_set,
lambda x: self.settings.get(x, None))
context = tools.vcvars(mock_sets)
with context:
self.printer.print_rule()
self.printer.print_profile(tools.load(self._profile_abs_path))
if self._profile_build_abs_path is not None:
self.printer.print_profile(tools.load(self._profile_build_abs_path))
with self.printer.foldable_output("conan_create"):
if client_version < Version("1.10.0"):
name, version, user, channel = self._reference
else:
name, version, user, channel, _ = self._reference
if self._build_policy:
self._build_policy = [] if self._build_policy == ["all"] else self._build_policy
# https://github.com/conan-io/conan-package-tools/issues/184
with tools.environment_append({"_CONAN_CREATE_COMMAND_": "1"}):
params = {"name": name, "version": version, "user": user,
"channel": channel, "build_modes": self._build_policy,
"require_overrides": self._require_overrides,
"profile_name": self._profile_abs_path,
"profile_build_name": self._profile_build_abs_path}
self.printer.print_message("Calling 'conan create'")
self.printer.print_dict(params)
with tools.chdir(self._cwd):
if Version(client_version) >= "1.8.0":
from conans.errors import ConanInvalidConfiguration
exc_class = ConanInvalidConfiguration
else:
exc_class = None
try:
if client_version < Version("1.12.0"):
self._results = self._conan_api.create(self._conanfile, name=name, version=version,
user=user, channel=channel,
build_modes=self._build_policy,
require_overrides=self._require_overrides,
profile_name=self._profile_abs_path,
test_folder=self._test_folder,
not_export=self.skip_recipe_export,
update=self._update_dependencies)
else:
if self._profile_build_abs_path is not None:
if client_version < Version("1.38.0"):
profile_build = ProfileData(profiles=[self._profile_build_abs_path], settings=None,
options=None, env=None)
else:
profile_build = ProfileData(profiles=[self._profile_build_abs_path], settings=None,
options=None, env=None, conf=None)
else:
profile_build = None
self._results = self._conan_api.create(self._conanfile, name=name, version=version,
user=user, channel=channel,
build_modes=self._build_policy,
require_overrides=self._require_overrides,
profile_names=[self._profile_abs_path],
test_folder=self._test_folder,
not_export=self.skip_recipe_export,
update=self._update_dependencies,
lockfile=self._lockfile,
profile_build=profile_build)
except exc_class as e:
self.printer.print_rule()
self.printer.print_message("Skipped configuration by the recipe: "
"%s" % str(e))
self.printer.print_rule()
return
for installed in self._results['installed']:
reference = installed["recipe"]["id"]
if client_version >= Version("1.10.0"):
reference = ConanFileReference.loads(reference)
reference = str(reference.copy_clear_rev())
if ((reference == str(self._reference)) or
(reference in self._upload_dependencies) or
("all" in self._upload_dependencies)) and \
installed['packages']:
package_id = installed['packages'][0]['id']
if installed['packages'][0]["built"]:
if "@" not in reference:
reference += "@"
if self._upload_only_recipe:
self._uploader.upload_recipe(reference, self._upload)
else:
self._uploader.upload_packages(reference,
self._upload, package_id)
else:
self.printer.print_message("Skipping upload for %s, "
"it hasn't been built" % package_id)
class DockerCreateRunner(object):
def __init__(self, profile_text, base_profile_text, base_profile_name, reference,
conan_pip_package=None, docker_image=None, sudo_docker_command=None,
sudo_pip_command=False,
docker_image_skip_update=False, build_policy=None, require_overrides=None,
docker_image_skip_pull=False,
always_update_conan_in_docker=False,
upload=False, upload_retry=None, upload_only_recipe=None,
upload_force=None,
runner=None,
docker_shell="", docker_conan_home="",
docker_platform_param="", docker_run_options="",
lcow_user_workaround="",
test_folder=None,
pip_install=None,
docker_pip_command=None,
config_url=None,
config_args=None,
printer=None,
upload_dependencies=None,
conanfile=None,
force_selinux=None,
skip_recipe_export=False,
update_dependencies=False,
lockfile=None,
profile_build_text=None,
base_profile_build_text=None,
cwd=None,
global_conf=None):
self.printer = printer or Printer()
self._upload = upload
self._upload_retry = upload_retry
self._upload_only_recipe = upload_only_recipe
self._upload_force = upload_force
self._reference = reference
self._conan_pip_package = conan_pip_package
self._build_policy = build_policy
self._require_overrides = require_overrides
self._docker_image = docker_image
self._always_update_conan_in_docker = always_update_conan_in_docker
self._docker_image_skip_update = docker_image_skip_update
self._docker_image_skip_pull = docker_image_skip_pull
self._sudo_docker_command = sudo_docker_command or ""
self._sudo_pip_command = sudo_pip_command
self._profile_text = profile_text
self._base_profile_text = base_profile_text
self._base_profile_name = base_profile_name
self._docker_shell = docker_shell
self._docker_conan_home = docker_conan_home
self._docker_platform_param = docker_platform_param
self._docker_run_options = docker_run_options or ""
self._lcow_user_workaround = lcow_user_workaround
self._runner = PrintRunner(runner, self.printer)
self._test_folder = test_folder
self._pip_install = pip_install
self._docker_pip_command = docker_pip_command
self._config_url = config_url
self._config_args = config_args
self._upload_dependencies = upload_dependencies or []
self._conanfile = conanfile
self._lockfile = lockfile
self._force_selinux = force_selinux
self._skip_recipe_export = skip_recipe_export
self._update_dependencies = update_dependencies
self._profile_build_text = profile_build_text
self._base_profile_build_text = base_profile_build_text
self._cwd = cwd or os.getcwd()
self._global_conf = global_conf
def _pip_update_conan_command(self):
commands = []
# Hack for testing when retrieving cpt from artifactory repo
if "conan-package-tools" not in self._conan_pip_package:
commands.append("%s %s install conan_package_tools==%s "
"--upgrade --no-cache" % (self._sudo_pip_command,
self._docker_pip_command,
package_tools_version))
if self._conan_pip_package:
commands.append("%s %s install %s --no-cache" % (self._sudo_pip_command,
self._docker_pip_command,
self._conan_pip_package))
else:
commands.append("%s %s install conan --upgrade --no-cache" % (self._sudo_pip_command,
self._docker_pip_command))
if self._pip_install:
commands.append("%s %s install %s --upgrade --no-cache" % (self._sudo_pip_command,
self._docker_pip_command,
" ".join(self._pip_install)))
command = " && ".join(commands)
return command
@staticmethod
def is_selinux_running():
if tools.which("getenforce"):
output = subprocess.check_output("getenforce", shell=True)
return "Enforcing" in output.decode()
return False
def run(self, pull_image=True, docker_entry_script=None):
envs = self.get_env_vars()
env_vars_text = " ".join(['-e %s="%s"' % (key, value)
for key, value in envs.items() if value])
# Run the build
if pull_image:
if not self._docker_image_skip_pull:
self.pull_image()
if not self._docker_image_skip_update and not self._always_update_conan_in_docker:
# Update the downloaded image
with self.printer.foldable_output("update conan"):
try:
command = '%s docker run %s --name conan_runner ' \
' %s %s %s "%s"' % (self._sudo_docker_command,
env_vars_text,
self._docker_run_options,
self._docker_image,
self._docker_shell,
self._pip_update_conan_command())
ret = self._runner(command)
if ret != 0:
raise Exception("Error updating the image: %s" % command)
# Save the image with the updated installed
# packages and remove the intermediate container
command = "%s docker commit conan_runner %s" % (self._sudo_docker_command,
self._docker_image)
ret = self._runner(command)
if ret != 0:
raise Exception("Error commiting the image: %s" % command)
finally:
command = "%s docker rm conan_runner" % self._sudo_docker_command
ret = self._runner(command)
if ret != 0:
raise Exception("Error removing the temp container: %s" % command)
if self._always_update_conan_in_docker:
update_command = self._pip_update_conan_command() + " && "
else:
update_command = ""
volume_options = ":z" if (DockerCreateRunner.is_selinux_running() or self._force_selinux) else ""
command = ('%s docker run --rm -v "%s:%s/project%s" %s %s %s %s %s '
'"%s cd project && '
'%s run_create_in_docker "' % (self._sudo_docker_command,
self._cwd,
self._docker_conan_home,
volume_options,
env_vars_text,
self._docker_run_options,
self._docker_platform_param,
self._docker_image,
self._docker_shell,
self._lcow_user_workaround,
update_command))
# Push entry command before to build
if docker_entry_script:
command = command.replace("run_create_in_docker",
"%s && run_create_in_docker" % docker_entry_script)
self.printer.print_in_docker(self._docker_image)
ret = self._runner(command)
if ret != 0:
raise Exception("Error building: %s" % command)
self.printer.print_message("Exiting docker...")
def pull_image(self):
with self.printer.foldable_output("docker pull"):
for retry in range(1, 4):
ret = self._runner("%s docker pull %s" % (self._sudo_docker_command, self._docker_image))
if ret == 0:
break
elif retry == 3:
raise Exception("Error pulling the image: %s" % self._docker_image)
self.printer.print_message("Could not pull docker image '{}'. Retry ({})"
.format(self._docker_image, retry))
time.sleep(3)
def get_env_vars(self):
ret = {key: value for key, value in os.environ.items() if key.startswith("CONAN_") and
key != "CONAN_USER_HOME"}
ret["CONAN_REFERENCE"] = self._reference
ret["CPT_PROFILE"] = escape_env(self._profile_text)
ret["CPT_BASE_PROFILE"] = escape_env(self._base_profile_text)
ret["CPT_BASE_PROFILE_NAME"] = escape_env(self._base_profile_name)
ret["CPT_PROFILE_BUILD"] = escape_env(self._profile_build_text)
ret["CPT_GLOBAL_CONF"] = escape_env(self._global_conf)
ret["CONAN_USERNAME"] = escape_env(self._reference.user or ret.get("CONAN_USERNAME"))
ret["CONAN_TEMP_TEST_FOLDER"] = "1" # test package folder to a temp one
ret["CPT_UPLOAD_ENABLED"] = self._upload
ret["CPT_UPLOAD_RETRY"] = self._upload_retry
ret["CPT_UPLOAD_ONLY_RECIPE"] = self._upload_only_recipe
ret["CPT_UPLOAD_FORCE"] = self._upload_force
ret["CPT_BUILD_POLICY"] = escape_env(self._build_policy)
ret["CPT_REQUIRE_OVERRIDES"] = escape_env(self._require_overrides)
ret["CPT_TEST_FOLDER"] = escape_env(self._test_folder)
ret["CPT_CONFIG_URL"] = escape_env(self._config_url)
ret["CPT_CONFIG_ARGS"] = escape_env(self._config_args)
ret["CPT_UPLOAD_DEPENDENCIES"] = escape_env(self._upload_dependencies)
ret["CPT_CONANFILE"] = escape_env(self._conanfile)
ret["CPT_LOCKFILE"] = escape_env(self._lockfile)
ret["CPT_SKIP_RECIPE_EXPORT"] = self._skip_recipe_export
ret["CPT_UPDATE_DEPENDENCIES"] = self._update_dependencies
ret.update({key: value for key, value in os.environ.items() if key.startswith("PIP_")})
return ret
def unscape_env(text):
if not text:
return text
return text.replace("@@", "\n").replace('||', '"')
def escape_env(text):
if not text:
return text
return text.replace("\r", "").replace("\n", "@@").replace('"', '||')
class PrintRunner(object):
def __init__(self, runner, printer):
self.runner = runner
self.printer = printer
def __call__(self, command, hide_sensitive=True):
cmd_str = command
if hide_sensitive:
cmd_str = re.sub(r'(CONAN_LOGIN_USERNAME[_\w+]*)=\"(\w+)\"', r'\1="xxxxxxxx"', cmd_str)
cmd_str = re.sub(r'(CONAN_PASSWORD[_\w+]*)=\"(\w+)\"', r'\1="xxxxxxxx"', cmd_str)
self.printer.print_command(cmd_str)
sys.stderr.flush()
sys.stdout.flush()
return self.runner(command)
|
{
"content_hash": "378b9bc786c0f403d339f4555b3a5bfa",
"timestamp": "",
"source": "github",
"line_count": 435,
"max_line_length": 123,
"avg_line_length": 50.99080459770115,
"alnum_prop": 0.4956043460619449,
"repo_name": "conan-io/conan-package-tools",
"id": "eefb4873bd168cb2ea9e470ce54e63e6b1738dff",
"size": "22181",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "cpt/runner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "536316"
}
],
"symlink_target": ""
}
|
"""A script to generate a cloudbuild yaml."""
import os
import yaml
import util
# Add directories for new tests here.
TEST_DIRS = [
'destination_test', 'metadata_test', 'lock_test',
'empty_descriptor_test', 'no_descriptor_test',
'no_deps_test', 'additional_directory'
]
_ST_IMAGE = ('gcr.io/gcp-runtimes/structure-test:'
'6195641f5a5a14c63c7945262066270842150ddb')
_TEST_DIR = '/workspace/ftl/php/testdata'
_PHP_BASE = 'gcr.io/gae-runtimes/php72_app_builder:argo_current'
def main():
cloudbuild_yaml = util.INITIAL_CLOUDBUILD_YAML
cloudbuild_yaml['steps'].append(
# Build the FTL image from source and load it into the daemon.
{
'name': 'gcr.io/cloud-builders/bazel',
'args': ['run', '//ftl:php_builder_image', '--', '--norun'],
'id': 'build-builder',
}, )
# Generate a set of steps for each test and add them.
test_map = {}
for test in TEST_DIRS:
test_map[test] = [
'--base', _PHP_BASE, '--name',
'gcr.io/ftl-node-test/%s-image:latest' % test, '--directory',
os.path.join(_TEST_DIR, test), '--no-cache'
]
test_map['destination_test'].extend(['--destination', '/alternative-app'])
test_map['metadata_test'].extend(['--entrypoint', '/bin/echo'])
test_map['metadata_test'].extend(['--exposed-ports', '8090,8091'])
test_map['additional_directory'].extend([
'--additional-directory',
'/workspace/ftl/php/testdata/additional_directory'
])
for test, args in test_map.iteritems():
cloudbuild_yaml['steps'] += util.run_test_steps(
'php_builder_image', 'gcr.io/ftl-node-test/%s-image:latest' % test,
os.path.join(_TEST_DIR, test), args)
print yaml.dump(cloudbuild_yaml)
if __name__ == "__main__":
main()
|
{
"content_hash": "dda31387e6380d6fc5c6aaf86ae47c8d",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 79,
"avg_line_length": 32.35087719298246,
"alnum_prop": 0.6019522776572668,
"repo_name": "priyawadhwa/runtimes-common",
"id": "53341d3752f5d0d0fb37f878862bc0a75870d7e6",
"size": "1844",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ftl/integration_tests/ftl_php_integration_tests_yaml.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "4463"
},
{
"name": "CSS",
"bytes": "1064"
},
{
"name": "Dockerfile",
"bytes": "3885"
},
{
"name": "Go",
"bytes": "194465"
},
{
"name": "HTML",
"bytes": "2736"
},
{
"name": "JavaScript",
"bytes": "4853"
},
{
"name": "Makefile",
"bytes": "18742"
},
{
"name": "PHP",
"bytes": "75349"
},
{
"name": "Python",
"bytes": "325583"
},
{
"name": "Shell",
"bytes": "5562"
},
{
"name": "Vue",
"bytes": "563"
}
],
"symlink_target": ""
}
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'rsfmodel',
'author': 'John R. Leeman, Ryan May',
'url': 'http://github.com/jrleeman/rsfmodel',
'download_url': 'http://github.com/jrleeman/rsfmodel',
'author_email': 'kd5wxb@gmail.com',
'version': '0.2',
'install_requires': ['nose'],
'packages': ['rsfmodel'],
'scripts': [],
'name': 'rsfmodel'
}
setup(**config)
|
{
"content_hash": "c1e05e548d366a40e8cb934079bfa1ae",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 58,
"avg_line_length": 25.05263157894737,
"alnum_prop": 0.615546218487395,
"repo_name": "jrleeman/rsfmodel",
"id": "1cd402142d25d9564690a6a658a5e6e2f64ddfb0",
"size": "476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62423"
}
],
"symlink_target": ""
}
|
"""
The :class:`DXObject` class is the abstract base class for all remote
object handlers, and its subclass :class:`DXDataObject` is the abstract
base class for all remote data object handlers.
"""
from __future__ import print_function, unicode_literals, division, absolute_import
import time, copy, re
import dxpy.api
from ..exceptions import (DXError, DXAPIError, DXFileError, DXGTableError, DXSearchError, DXAppletError,
DXJobFailureError, AppError, AppInternalError, DXCLIError)
from ..compat import basestring
def verify_string_dxid(dxid, expected_classes):
'''
:param dxid: Value to verify as a DNAnexus ID of class *expected_class*
:param expected_classes: Single string or list of strings of allowed classes of the ID, e.g. "file" or ["project", "container"]
:type expected_classes: string or list of strings
:raises: :exc:`~dxpy.exceptions.DXError` if *dxid* is not a string or is not a valid DNAnexus ID of the expected class
'''
if isinstance(expected_classes, basestring):
expected_classes = [expected_classes]
if not isinstance(expected_classes, list) or len(expected_classes) == 0:
raise DXError('verify_string_dxid: expected_classes should be a string or list of strings')
if not (isinstance(dxid, basestring) and
re.match('^(' + '|'.join(expected_classes) + ')-[0-9a-zA-Z]{24}$', dxid)):
if len(expected_classes) == 1:
str_expected_classes = expected_classes[0]
elif len(expected_classes) == 2:
str_expected_classes = ' or '.join(expected_classes)
else:
str_expected_classes = ', '.join(expected_classes[:-1]) + ', or ' + expected_classes[-1]
raise DXError('Invalid ID of class %s: %r' % (str_expected_classes, dxid))
class DXObject(object):
"""Abstract base class for all remote object handlers."""
def __init__(self, dxid=None, project=None):
# Initialize _dxid and _proj to None values, and have
# subclasses actually perform the setting of the values once
# they have been validated.
self._dxid, self._proj = None, None
self._desc = {}
def _repr(self, use_name=False):
dxid = self._dxid if self._dxid is not None else "no ID stored"
dxproj_id = self._proj if self._proj is not None else "no project ID stored"
if use_name:
if self._class not in ["container", "project", "app"]:
desc = "<dxpy.{classname}: {name} ({dxid} ({dxproj_id}))>"
else:
desc = "<dxpy.{classname}: {name} ({dxid})>"
else:
if self._class not in ["container", "project", "app"]:
desc = "<{module}.{classname} object at 0x{mem_loc:x}: {dxid} ({dxproj_id})>"
else:
desc = "<{module}.{classname} object at 0x{mem_loc:x}: {dxid}>"
desc = desc.format(module=self.__module__,
classname=self.__class__.__name__,
dxid=dxid,
dxproj_id = dxproj_id,
mem_loc=id(self),
name=self._desc.get('name'))
return desc
def __str__(self):
return self._repr(use_name=True)
def __repr__(self):
return self._repr()
def __getattr__(self, attr):
if not self._desc:
self.describe()
try:
return self._desc[attr]
except:
raise AttributeError()
def describe(self, *args, **kwargs):
'''
Avoid infinite recursion in __getattr__ if describe is not defined.
'''
raise NotImplementedError()
def set_id(self, dxid):
'''
:param dxid: New ID to be associated with the handler
:type dxid: string
Discards the currently stored ID and associates the handler with *dxid*
'''
if dxid is not None:
verify_string_dxid(dxid, self._class)
self._dxid = dxid
def get_id(self):
'''
:returns: ID of the associated object
:rtype: string
Returns the ID that the handler is currently associated with.
'''
return self._dxid
class DXDataObject(DXObject):
"""Abstract base class for all remote data object handlers.
.. note:: The attribute values below are current as of the last time
:meth:`~dxpy.bindings.DXDataObject.describe` was run.
(Access to any of the below attributes causes
:meth:`~dxpy.bindings.DXDataObject.describe` to be called
if it has never been called before.)
.. py:attribute:: name
String giving the name of the object
.. py:attribute:: folder
String giving the full path to the folder containing the object
.. py:attribute:: types
List of strings indicating the types associated with the object
.. py:attribute:: state
A string containing one of the values "open", "closing", or "closed"
.. py:attribute:: hidden
Boolean indicating whether the object is hidden or not
.. py:attribute:: links
List of strings indicating object IDs that are pointed to by the
object
.. py:attribute:: sponsored
Boolean indicating whether the object is sponsored by DNAnexus
.. py:attribute:: tags
List of strings indicating the tags that are assocated with the
object
.. py:attribute:: created
Timestamp at which the object was created, in milliseconds since
January 1, 1970 at midnight (UTC).
.. py:attribute:: modified
Timestamp at which the object was last modified, in milliseconds
since January 1, 1970 at midnight (UTC).
.. py:attribute:: createdBy
dict containing the following keys and values:
* user: the string ID of the user who created the object or
launched the job that created it
* job (optional): the string ID of the job that created the
object, if a job created the object
* executable (optional): the string ID of the app or applet that
the job was running, if a job created the object
"""
def __init__(self, dxid=None, project=None):
if not hasattr(self, '_class'):
raise NotImplementedError(
"DXDataObject is an abstract class; a subclass should be initialized instead.")
DXObject.__init__(self)
self.set_ids(dxid, project)
@staticmethod
def _get_creation_params(kwargs):
common_creation_params = {"project", "name", "tags", "types", "hidden", "properties", "details", "folder", "parents"}
dx_hash = {p: kwargs[p] for p in kwargs if p in common_creation_params and kwargs[p] is not None}
remaining_kwargs = {p: kwargs[p] for p in kwargs if p not in common_creation_params}
if "project" not in dx_hash:
dx_hash["project"] = dxpy.WORKSPACE_ID
return dx_hash, remaining_kwargs
def new(self, **kwargs):
'''
:param project: Project ID in which to create the new remote object
:type project: string
:param name: Name for the object
:type name: string
:param tags: Tags to add for the object
:type tags: list of strings
:param types: Types to add to the object
:type types: list of strings
:param hidden: Whether the object is to be hidden
:type hidden: boolean
:param properties: Properties given as key-value pairs of strings
:type properties: dict
:param details: Details to set for the object
:type details: dict or list
:param folder: Full path to the destination folder
:type folder: string
:param parents: If True, recursively create all parent folders if they are missing
:type parents: boolean
:rtype: :class:`DXDataObject`
Creates a data object with the given fields. Only *project* is
required, and only if no default project or workspace is set;
the remaining arguments are optional and have default behavior
as specified in the API documentation for the ``/new`` method of
each data object class.
'''
if not hasattr(self, '_class'):
raise NotImplementedError(
"DXDataObject is an abstract class; a subclass should" + \
"be initialized instead.")
dx_hash, remaining_kwargs = self._get_creation_params(kwargs)
self._new(dx_hash, **remaining_kwargs)
def set_id(self, dxid):
'''
:param dxid: Object ID or a DNAnexus link (a dict with key "$dnanexus_link"); if a project ID is provided in the DNAnexus link, it will also be used to set the project ID
:type dxid: string or dict
Equivalent to calling
:meth:`~dxpy.bindings.DXDataObject.set_ids` with the same
arguments.
'''
self.set_ids(dxid)
def set_ids(self, dxid, project=None):
'''
:param dxid: Object ID or a DNAnexus link (a dict with key "$dnanexus_link"); if a project ID is provided in the DNAnexus link, it will be used as *project* unless *project* has been explictly provided
:type dxid: string or dict
:param project: Project ID
:type project: string
Discards the currently stored ID and associates the handler with
*dxid*. Associates the handler with the copy of the object in
*project* (if no project is explicitly specified, the default
data container is used).
'''
if is_dxlink(dxid):
dxid, project_from_link = get_dxlink_ids(dxid)
if project is None:
project = project_from_link
if dxid is not None:
verify_string_dxid(dxid, self._class)
self._dxid = dxid
if project is None:
self._proj = dxpy.WORKSPACE_ID
elif project is not None:
verify_string_dxid(project, ['project', 'container'])
self._proj = project
def get_proj_id(self):
'''
:returns: Project ID of associated object
:rtype: string
Returns the project ID, if any, that the handler is currently
associated with.
'''
return self._proj
def describe(self, incl_properties=False, incl_details=False, fields=None, default_fields=None, **kwargs):
"""
:param fields: set of fields to include in the output, for
example ``{'name', 'modified'}``. The field ``id`` is always
implicitly included. If ``fields`` is specified, the default
fields are not included (that is, only the fields specified
here, and ``id``, are included) unless ``default_fields`` is
additionally set to True.
:type fields: set or sequence of str
:param default_fields: if True, include the default fields in
addition to fields requested in ``fields``, if any; if
False, only the fields specified in ``fields``, if any, are
returned (defaults to False if ``fields`` is specified, True
otherwise)
:type default_fields: bool
:param incl_properties: if true, includes the properties of the
object in the output (deprecated; use
``fields={'properties'}, default_fields=True`` instead)
:type incl_properties: bool
:param incl_details: if true, includes the details of the object
in the output (deprecated; use ``fields={'details'},
default_fields=True`` instead)
:type incl_details: bool
:returns: Description of the remote object
:rtype: dict
Return a dict with a description of the remote data object.
The result includes the key-value pairs as specified in the API
documentation for the ``/describe`` method of each data object
class. The API defines some default set of fields that will be
included (at a minimum, "id", "class", etc. should be available,
and there may be additional fields that vary based on the
class); the set of fields may be customized using ``fields`` and
``default_fields``.
Any project-specific metadata fields (name, properties, and
tags) are obtained from the copy of the object in the project
associated with the handler, if possible.
"""
if self._dxid is None:
raise DXError('This {handler} handler has not been initialized with a {_class} ID and cannot be described'.format(
handler=self.__class__.__name__,
_class=self._class)
)
if (incl_properties or incl_details) and (fields is not None or default_fields is not None):
raise ValueError('Cannot specify properties or details in conjunction with fields or default_fields')
if incl_properties or incl_details:
describe_input = dict(properties=incl_properties, details=incl_details)
else:
describe_input = {}
if default_fields is not None:
describe_input['defaultFields'] = default_fields
if fields is not None:
describe_input['fields'] = {field_name: True for field_name in fields}
if self._proj is not None:
describe_input["project"] = self._proj
self._desc = self._describe(self._dxid, describe_input, **kwargs)
return self._desc
def add_types(self, types, **kwargs):
"""
:param types: Types to add to the object
:type types: list of strings
:raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state
Adds each of the specified types to the remote object. Takes no
action for types that are already listed for the object.
"""
self._add_types(self._dxid, {"types": types}, **kwargs)
def remove_types(self, types, **kwargs):
"""
:param types: Types to remove from the object
:type types: list of strings
:raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state
Removes each the specified types from the remote object. Takes
no action for types that the object does not currently have.
"""
self._remove_types(self._dxid, {"types": types}, **kwargs)
def get_details(self, **kwargs):
"""
Returns the contents of the details of the object.
:rtype: list or dict
"""
return self._get_details(self._dxid, **kwargs)
def set_details(self, details, **kwargs):
"""
:param details: Details to set for the object
:type details: dict or list
:raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state
Sets the details for the remote object with the specified value.
If the input contains the string ``"$dnanexus_link"`` as a key
in a hash, it must be the only key in the hash, and its value
must be a valid ID of an existing object.
"""
return self._set_details(self._dxid, details, **kwargs)
def hide(self, **kwargs):
"""
:raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state
Hides the remote object.
"""
return self._set_visibility(self._dxid, {"hidden": True}, **kwargs)
def unhide(self, **kwargs):
"""
:raises: :class:`~dxpy.exceptions.DXAPIError` if the object is not in the "open" state
Makes the remote object visible.
"""
return self._set_visibility(self._dxid, {"hidden": False}, **kwargs)
def rename(self, name, **kwargs):
"""
:param name: New name for the object
:type name: string
Renames the remote object.
The name is changed on the copy of the object in the project
associated with the handler.
"""
return self._rename(self._dxid, {"project": self._proj,
"name": name}, **kwargs)
def get_properties(self, **kwargs):
"""
:returns: Properties given as key-value pairs of strings
:rtype: dict
Returns the properties of the object.
The properties are read from the copy of the object in the
project associated with the handler.
"""
return self.describe(incl_properties=True, **kwargs)["properties"]
def set_properties(self, properties, **kwargs):
"""
:param properties: Property names and values given as key-value pairs of strings
:type properties: dict
Given key-value pairs in *properties* for property names and
values, the properties are set on the object for the given
property names. Any property with a value of :const:`None`
indicates the property will be deleted.
.. note:: Any existing properties not mentioned in *properties*
are not modified by this method.
The properties are written to the copy of the object in the
project associated with the handler.
The following example sets the properties for "name" and
"project" for a remote file::
dxfile.set_properties({"name": "George", "project": "cancer"})
Subsequently, the following would delete the property "project"::
dxfile.set_properties({"project": None})
"""
self._set_properties(self._dxid, {"project": self._proj,
"properties": properties},
**kwargs)
def add_tags(self, tags, **kwargs):
"""
:param tags: Tags to add to the object
:type tags: list of strings
Adds each of the specified tags to the remote object. Takes no
action for tags that are already listed for the object.
The tags are added to the copy of the object in the project
associated with the handler.
"""
self._add_tags(self._dxid, {"project": self._proj, "tags": tags},
**kwargs)
def remove_tags(self, tags, **kwargs):
"""
:param tags: Tags to remove from the object
:type tags: list of strings
Removes each of the specified tags from the remote object. Takes
no action for tags that the object does not currently have.
The tags are removed from the copy of the object in the project
associated with the handler.
"""
self._remove_tags(self._dxid, {"project": self._proj, "tags": tags},
**kwargs)
def close(self, **kwargs):
"""
Closes the object for further modification to its types,
details, visibility, and contents.
"""
return self._close(self._dxid, **kwargs)
def list_projects(self, **kwargs):
"""
:rtype: list of strings
Returns a list of project IDs of the projects that contain this
object and are visible to the requesting user.
"""
return self._list_projects(self._dxid, **kwargs)
def remove(self, **kwargs):
'''
:raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object
Permanently removes the associated remote object from the
associated project.
'''
if self._proj is None:
raise DXError("Remove called when a project ID was not associated with this object handler")
dxpy.api.project_remove_objects(self._proj, {"objects": [self._dxid]},
**kwargs)
# Reset internal state
self._dxid = None
self._proj = None
self._desc = {}
def move(self, folder, **kwargs):
'''
:param folder: Folder route to which to move the object
:type folder: string
:raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object
Moves the associated remote object to *folder*.
'''
if self._proj is None:
raise DXError("Move called when a project ID was not associated with this object handler")
dxpy.api.project_move(self._proj, {"objects": [self._dxid],
"destination": folder},
**kwargs)
def clone(self, project, folder="/", include_hidden_links=True,
**kwargs):
'''
:param project: Destination project ID
:type project: string
:param folder: Folder route to which to move the object
:type folder: string
:param include_hidden_links: If True, hidden objects linked to by this object are also cloned into the destination project
:type include_hidden_links: boolean
:raises: :exc:`~dxpy.exceptions.DXError` if no project is associated with the object
:returns: An object handler for the new cloned object
:rtype: :class:`DXDataObject`
Clones the associated remote object to *folder* in *project* and
returns an object handler for the new object in the destination
project.
'''
if self._proj is None:
raise DXError("Clone called when a project ID was not associated with this object handler")
dxpy.api.project_clone(self._proj,
{"objects": [self._dxid],
"project": project,
"destination": folder,
"includeHiddenLinks": include_hidden_links},
**kwargs)
cloned_copy = copy.copy(self)
cloned_copy.set_ids(cloned_copy.get_id(), project)
return cloned_copy
def _get_state(self, **kwargs):
'''
:returns: State of the remote object
:rtype: string
Queries the API server for the object's state. Returns a string
in {"open", "closing", "closed"}.
Note that this function is shorthand for:
dxclass.describe()["state"]
'''
return self.describe(fields={'state'}, **kwargs)["state"]
def _wait_on_close(self, timeout=3600*24*1, **kwargs):
elapsed = 0
while True:
state = self._get_state(**kwargs)
if state == "closed":
break
if state != "closing":
raise DXError("Unexpected state: " + state)
if elapsed >= timeout or elapsed < 0:
raise DXError("Reached timeout while waiting for the remote object to close")
time.sleep(2)
elapsed += 2
from .dxfile import DXFile, DXFILE_HTTP_THREADS, DEFAULT_BUFFER_SIZE
from .download_all_inputs import download_all_inputs
from .dxfile_functions import open_dxfile, new_dxfile, download_dxfile, upload_local_file, upload_string, list_subfolders, download_folder
from .dxgtable import DXGTable, NULL, DXGTABLE_HTTP_THREADS
from .dxgtable_functions import open_dxgtable, new_dxgtable
from .dxrecord import DXRecord, new_dxrecord
from .dxproject import DXContainer, DXProject
from .dxjob import DXJob, new_dxjob
from .dxanalysis import DXAnalysis
from .dxapplet import DXExecutable, DXApplet
from .dxapp import DXApp
from .dxworkflow import DXWorkflow, new_dxworkflow
from .auth import user_info, whoami
from .dxdataobject_functions import dxlink, is_dxlink, get_dxlink_ids, get_handler, describe, get_details, remove
from .search import (find_data_objects, find_executions, find_jobs, find_analyses, find_projects, find_apps,
find_one_data_object, find_one_project, find_one_app, resolve_data_objects, find_orgs,
org_find_members, org_find_projects, org_find_apps)
|
{
"content_hash": "0f437d6e775b19642108a047840c6c02",
"timestamp": "",
"source": "github",
"line_count": 653,
"max_line_length": 209,
"avg_line_length": 36.64931087289433,
"alnum_prop": 0.6088082901554405,
"repo_name": "jhuttner/dx-toolkit",
"id": "6884eeb6a769784f209dab12b97bdf8c0dc5aaf9",
"size": "24612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/dxpy/bindings/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3198"
},
{
"name": "C",
"bytes": "6957"
},
{
"name": "C++",
"bytes": "1880260"
},
{
"name": "CMake",
"bytes": "26162"
},
{
"name": "Groovy",
"bytes": "8855"
},
{
"name": "Java",
"bytes": "2177401"
},
{
"name": "Makefile",
"bytes": "50221"
},
{
"name": "NSIS",
"bytes": "17861"
},
{
"name": "Perl",
"bytes": "46855"
},
{
"name": "PowerShell",
"bytes": "1442"
},
{
"name": "Python",
"bytes": "2261586"
},
{
"name": "R",
"bytes": "550095"
},
{
"name": "Ruby",
"bytes": "78045"
},
{
"name": "Shell",
"bytes": "58977"
}
],
"symlink_target": ""
}
|
"""Support for UK Met Office weather service."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION, CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE,
CONF_MONITORED_CONDITIONS, CONF_NAME, TEMP_CELSIUS)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_LAST_UPDATE = 'last_update'
ATTR_SENSOR_ID = 'sensor_id'
ATTR_SITE_ID = 'site_id'
ATTR_SITE_NAME = 'site_name'
ATTRIBUTION = "Data provided by the Met Office"
CONDITION_CLASSES = {
'cloudy': ['7', '8'],
'fog': ['5', '6'],
'hail': ['19', '20', '21'],
'lightning': ['30'],
'lightning-rainy': ['28', '29'],
'partlycloudy': ['2', '3'],
'pouring': ['13', '14', '15'],
'rainy': ['9', '10', '11', '12'],
'snowy': ['22', '23', '24', '25', '26', '27'],
'snowy-rainy': ['16', '17', '18'],
'sunny': ['0', '1'],
'windy': [],
'windy-variant': [],
'exceptional': [],
}
DEFAULT_NAME = "Met Office"
VISIBILITY_CLASSES = {
'VP': '<1',
'PO': '1-4',
'MO': '4-10',
'GO': '10-20',
'VG': '20-40',
'EX': '>40'
}
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=35)
# Sensor types are defined like: Name, units
SENSOR_TYPES = {
'name': ['Station Name', None],
'weather': ['Weather', None],
'temperature': ['Temperature', TEMP_CELSIUS],
'feels_like_temperature': ['Feels Like Temperature', TEMP_CELSIUS],
'wind_speed': ['Wind Speed', 'mph'],
'wind_direction': ['Wind Direction', None],
'wind_gust': ['Wind Gust', 'mph'],
'visibility': ['Visibility', None],
'visibility_distance': ['Visibility Distance', 'km'],
'uv': ['UV', None],
'precipitation': ['Probability of Precipitation', '%'],
'humidity': ['Humidity', '%']
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=[]):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Inclusive(CONF_LATITUDE, 'coordinates',
'Latitude and longitude must exist together'): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, 'coordinates',
'Latitude and longitude must exist together'): cv.longitude,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Met Office sensor platform."""
import datapoint as dp
api_key = config.get(CONF_API_KEY)
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
name = config.get(CONF_NAME)
datapoint = dp.connection(api_key=api_key)
if None in (latitude, longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return
try:
site = datapoint.get_nearest_site(
latitude=latitude, longitude=longitude)
except dp.exceptions.APIException as err:
_LOGGER.error("Received error from Met Office Datapoint: %s", err)
return
if not site:
_LOGGER.error("Unable to get nearest Met Office forecast site")
return
data = MetOfficeCurrentData(hass, datapoint, site)
data.update()
if data.data is None:
return
sensors = []
for variable in config[CONF_MONITORED_CONDITIONS]:
sensors.append(MetOfficeCurrentSensor(site, data, variable, name))
add_entities(sensors, True)
class MetOfficeCurrentSensor(Entity):
"""Implementation of a Met Office current sensor."""
def __init__(self, site, data, condition, name):
"""Initialize the sensor."""
self._condition = condition
self.data = data
self._name = name
self.site = site
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self._name, SENSOR_TYPES[self._condition][0])
@property
def state(self):
"""Return the state of the sensor."""
if (self._condition == 'visibility_distance' and
hasattr(self.data.data, 'visibility')):
return VISIBILITY_CLASSES.get(self.data.data.visibility.value)
if hasattr(self.data.data, self._condition):
variable = getattr(self.data.data, self._condition)
if self._condition == 'weather':
return [k for k, v in CONDITION_CLASSES.items() if
self.data.data.weather.value in v][0]
return variable.value
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSOR_TYPES[self._condition][1]
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
attr[ATTR_ATTRIBUTION] = ATTRIBUTION
attr[ATTR_LAST_UPDATE] = self.data.data.date
attr[ATTR_SENSOR_ID] = self._condition
attr[ATTR_SITE_ID] = self.site.id
attr[ATTR_SITE_NAME] = self.site.name
return attr
def update(self):
"""Update current conditions."""
self.data.update()
class MetOfficeCurrentData:
"""Get data from Datapoint."""
def __init__(self, hass, datapoint, site):
"""Initialize the data object."""
self._datapoint = datapoint
self._site = site
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Datapoint."""
import datapoint as dp
try:
forecast = self._datapoint.get_forecast_for_site(
self._site.id, '3hourly')
self.data = forecast.now()
except (ValueError, dp.exceptions.APIException) as err:
_LOGGER.error("Check Met Office %s", err.args)
self.data = None
|
{
"content_hash": "c5250183235bb24d42a6587e6293b0f2",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 79,
"avg_line_length": 31.710526315789473,
"alnum_prop": 0.6144398340248962,
"repo_name": "jnewland/home-assistant",
"id": "ff334823ec6f84970729f463461eeee09928b4f8",
"size": "6025",
"binary": false,
"copies": "7",
"ref": "refs/heads/ci",
"path": "homeassistant/components/metoffice/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "Python",
"bytes": "15240512"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17862"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('webirc', '0010_enterexitevent_message'),
]
operations = [
migrations.RemoveField(
model_name='message',
name='screen',
),
]
|
{
"content_hash": "736a2d808a82e2f3e9558f9665edd22d",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 50,
"avg_line_length": 19.058823529411764,
"alnum_prop": 0.5925925925925926,
"repo_name": "jawsper/webirc",
"id": "3761748f2bedff340f05d4477b2b712c19048b81",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/webirc/migrations/0011_remove_message_screen.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1079"
},
{
"name": "HTML",
"bytes": "6358"
},
{
"name": "JavaScript",
"bytes": "17924"
},
{
"name": "Python",
"bytes": "38987"
}
],
"symlink_target": ""
}
|
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.core.urlresolvers import reverse_lazy, reverse
from django.shortcuts import redirect, render
from django.views import generic
from groups.models import Group, GroupMember
from .forms import PaymentForm, PaymentProxyForm
from .mixins import PaymentMixin
from .models import Payment, PaymentProxy
class PaymentListView(UserPassesTestMixin, generic.ListView):
model = Payment
template_name = 'accounting/payment_list.html'
def dispatch(self, request, *args, **kwargs):
user_test_result = self.get_test_func()()
if not user_test_result:
messages.warning(request, "You don't have permission to access this page.")
return redirect('groups:group_detail', kwargs.get('slug'))
return super(PaymentListView, self).dispatch(request, *args, **kwargs)
def get_queryset(self, *args, **kwargs):
return Payment.objects.filter(group=self.get_group())
def get_group(self, queryset=None):
group_slug = self.kwargs.get('slug')
return Group.objects.get(slug=group_slug)
def test_func(self):
return self.request.user.is_staff or self.request.user in self.get_group().members.all()
class PaymentDetailView(PaymentMixin, generic.DetailView):
model = Payment
template_name = 'accounting/payment_detail.html'
class PaymentCreateView(PaymentMixin, generic.CreateView):
model = Payment
template_name = 'accounting/payment_create_form.html'
def get_success_url(self):
return reverse_lazy('accounting:payment_list', kwargs={'slug': self.get_group().slug})
def form_valid(self, form):
payment = form.save(commit=False)
payment.group = self.get_group()
payment.accountant = GroupMember.objects.get(user=self.request.user, group=self.get_group())
payment.save()
messages.success(self.request, 'Awwwsome! Your payment has been successfully created!')
return super(PaymentCreateView, self).form_valid(form)
def test_func(self):
try:
return self.request.user.is_staff or self.get_object().accountant.user == self.request.user or self.get_object().group.memberships.get(user=self.request.user).is_moderator
except GroupMember.DoesNotExist:
messages.error(
self.request, 'This Payment has nobody assigned, contact site administrator or your group\'s moderator.')
redirect('groups:group_list')
class PaymentUpdateView(PaymentMixin, generic.UpdateView):
model = Payment
template_name = 'accounting/payment_update_form.html'
def get_object(self, queryset=None):
group_slug = self.kwargs['slug']
payment_pk = self.kwargs['pk']
return Payment.objects.get(group__slug=group_slug, pk=payment_pk)
def get_success_url(self):
return reverse_lazy('accounting:payment_detail', kwargs={'slug': self.get_object().group.slug, 'pk': self.get_object().pk})
def test_func(self):
try:
return self.request.user.is_staff or self.get_object().accountant.user == self.request.user or self.get_object().group.memberships.get(user=self.request.user).is_moderator
except GroupMember.DoesNotExist:
messages.error(
self.request, 'This Payment has nobody assigned, contact site administrator or your group\'s moderator.')
redirect('groups:group_list')
class PaymentDeleteView(PaymentMixin, generic.DeleteView):
model = Payment
template_name = 'accounting/payment_confirm_delete.html'
def get_success_url(self):
return reverse_lazy('accounting:payment_list', kwargs={'slug': self.get_object().group.slug})
def test_func(self):
try:
return self.request.user.is_staff or self.get_object().accountant.user == self.request.user or self.get_object().group.memberships.get(user=self.request.user).is_moderator
except GroupMember.DoesNotExist:
messages.error(
self.request, 'This Payment has nobody assigned, contact site administrator or your group\'s moderator.')
redirect('groups:group_list')
class PaymentProxyUpdateView(LoginRequiredMixin, UserPassesTestMixin, generic.UpdateView):
model = PaymentProxy
login_url = reverse_lazy('accounts:login')
template_name = 'accounting/pp_update_form.html'
form_class = PaymentProxyForm
def get_success_url(self):
return reverse('accounting:payment_detail', kwargs={'slug': self.get_group().slug, 'pk': self.get_object().payment.pk})
def get_object(self, queryset=None):
proxy_pk = self.kwargs.get('proxy_pk')
return PaymentProxy.objects.get(pk=proxy_pk)
def get_group(self, queryset=None):
group_slug = self.kwargs.get('slug')
return Group.objects.get(slug=group_slug)
def test_func(self):
try:
return self.request.user.is_staff or self.get_group().memberships.get(user=self.request.user).is_moderator or self.get_object().payment.accountant.user == self.request.user
except GroupMember.DoesNotExist:
return False
|
{
"content_hash": "0ad01172fa049232b096f509ec7e5339",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 184,
"avg_line_length": 42.4390243902439,
"alnum_prop": 0.6948275862068966,
"repo_name": "MilyMilo/sci-organizer",
"id": "51456e3fa941739ecd43c6d44b7e79931e448975",
"size": "5220",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "accounting/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4152"
},
{
"name": "HTML",
"bytes": "53975"
},
{
"name": "Python",
"bytes": "100619"
},
{
"name": "Shell",
"bytes": "127"
}
],
"symlink_target": ""
}
|
fish_tuple = ('blowfish', 'clownfish', 'catfish', 'octopus')
# make a list of all fish with a for loop
fish_list = []
for fish in fish_tuple:
if fish != 'octopus':
fish_list.append(fish)
# print(fish_list)
# make a list of all fish using a list comp
fish_list = [fish for fish in fish_tuple if fish != 'octopus']
# print(fish_list)
# nesting conditionals
number_list = [x for x in range(100) if x % 3 == 0 if x % 5 == 0]
print(number_list)
list_nums = [1, 2, 3]
# multiply each nummber in a list by 3
times_three = [(num * 3) for num in list_nums]
print(times_three)
|
{
"content_hash": "7d959d85b3780b1166f3f58b09995f4e",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 65,
"avg_line_length": 26.545454545454547,
"alnum_prop": 0.6541095890410958,
"repo_name": "developerQuinnZ/this_will_work",
"id": "a7db104efd0acedd6ae9849b4fe276e5e5f22a5e",
"size": "656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "student-work/ashley_riehl/week_2/day_1/working_with_csvs/comprehensions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1748"
},
{
"name": "Jupyter Notebook",
"bytes": "9569022"
},
{
"name": "Makefile",
"bytes": "6783"
},
{
"name": "PLpgSQL",
"bytes": "11955896"
},
{
"name": "Python",
"bytes": "290864"
},
{
"name": "Shell",
"bytes": "5873"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
import os
import re
from django.core.serializers.base import DeserializationError
from django.core import management
from django.core.management.base import CommandError
from django.core.management.commands.dumpdata import sort_dependencies
from django.db import transaction, IntegrityError
from django.db.models import signals
from django.test import (TestCase, TransactionTestCase, skipIfDBFeature,
skipUnlessDBFeature)
from django.test.utils import override_settings
from django.utils import six
from django.utils.six import PY3, StringIO
from .models import (Animal, Stuff, Absolute, Parent, Child, Article, Widget,
Store, Person, Book, NKChild, RefToNKChild, Circle1, Circle2, Circle3,
ExternalDependency, Thingy)
class TestFixtures(TestCase):
def animal_pre_save_check(self, signal, sender, instance, **kwargs):
self.pre_save_checks.append(
(
'Count = %s (%s)' % (instance.count, type(instance.count)),
'Weight = %s (%s)' % (instance.weight, type(instance.weight)),
)
)
def test_duplicate_pk(self):
"""
This is a regression test for ticket #3790.
"""
# Load a fixture that uses PK=1
management.call_command(
'loaddata',
'sequence',
verbosity=0,
commit=False
)
# Create a new animal. Without a sequence reset, this new object
# will take a PK of 1 (on Postgres), and the save will fail.
animal = Animal(
name='Platypus',
latin_name='Ornithorhynchus anatinus',
count=2,
weight=2.2
)
animal.save()
self.assertGreater(animal.id, 1)
def test_loaddata_not_found_fields_not_ignore(self):
"""
Test for ticket #9279 -- Error is raised for entries in
the serialised data for fields that have been removed
from the database when not ignored.
"""
with self.assertRaises(DeserializationError):
management.call_command(
'loaddata',
'sequence_extra',
verbosity=0
)
def test_loaddata_not_found_fields_ignore(self):
"""
Test for ticket #9279 -- Ignores entries in
the serialised data for fields that have been removed
from the database.
"""
management.call_command(
'loaddata',
'sequence_extra',
ignore=True,
verbosity=0,
commit=False
)
self.assertEqual(Animal.specimens.all()[0].name, 'Lion')
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_pretty_print_xml(self):
"""
Regression test for ticket #4558 -- pretty printing of XML fixtures
doesn't affect parsing of None values.
"""
# Load a pretty-printed XML fixture with Nulls.
management.call_command(
'loaddata',
'pretty.xml',
verbosity=0,
commit=False
)
self.assertEqual(Stuff.objects.all()[0].name, None)
self.assertEqual(Stuff.objects.all()[0].owner, None)
@skipUnlessDBFeature('interprets_empty_strings_as_nulls')
def test_pretty_print_xml_empty_strings(self):
"""
Regression test for ticket #4558 -- pretty printing of XML fixtures
doesn't affect parsing of None values.
"""
# Load a pretty-printed XML fixture with Nulls.
management.call_command(
'loaddata',
'pretty.xml',
verbosity=0,
commit=False
)
self.assertEqual(Stuff.objects.all()[0].name, '')
self.assertEqual(Stuff.objects.all()[0].owner, None)
def test_absolute_path(self):
"""
Regression test for ticket #6436 --
os.path.join will throw away the initial parts of a path if it
encounters an absolute path.
This means that if a fixture is specified as an absolute path,
we need to make sure we don't discover the absolute path in every
fixture directory.
"""
load_absolute_path = os.path.join(
os.path.dirname(__file__),
'fixtures',
'absolute.json'
)
management.call_command(
'loaddata',
load_absolute_path,
verbosity=0,
commit=False
)
self.assertEqual(Absolute.load_count, 1)
def test_unknown_format(self):
"""
Test for ticket #4371 -- Loading data of an unknown format should fail
Validate that error conditions are caught correctly
"""
with six.assertRaisesRegex(self, management.CommandError,
"Problem installing fixture 'bad_fixture1': "
"unkn is not a known serialization format."):
management.call_command(
'loaddata',
'bad_fixture1.unkn',
verbosity=0,
commit=False,
)
@override_settings(SERIALIZATION_MODULES={'unkn': 'unexistent.path'})
def test_unimportable_serializer(self):
"""
Test that failing serializer import raises the proper error
"""
with six.assertRaisesRegex(self, ImportError,
r"No module named.*unexistent"):
management.call_command(
'loaddata',
'bad_fixture1.unkn',
verbosity=0,
commit=False,
)
def test_invalid_data(self):
"""
Test for ticket #4371 -- Loading a fixture file with invalid data
using explicit filename.
Validate that error conditions are caught correctly
"""
with six.assertRaisesRegex(self, management.CommandError,
"No fixture data found for 'bad_fixture2'. \(File format may be invalid.\)"):
management.call_command(
'loaddata',
'bad_fixture2.xml',
verbosity=0,
commit=False,
)
def test_invalid_data_no_ext(self):
"""
Test for ticket #4371 -- Loading a fixture file with invalid data
without file extension.
Validate that error conditions are caught correctly
"""
with six.assertRaisesRegex(self, management.CommandError,
"No fixture data found for 'bad_fixture2'. \(File format may be invalid.\)"):
management.call_command(
'loaddata',
'bad_fixture2',
verbosity=0,
commit=False,
)
def test_empty(self):
"""
Test for ticket #4371 -- Loading a fixture file with no data returns an error.
Validate that error conditions are caught correctly
"""
with six.assertRaisesRegex(self, management.CommandError,
"No fixture data found for 'empty'. \(File format may be invalid.\)"):
management.call_command(
'loaddata',
'empty',
verbosity=0,
commit=False,
)
def test_error_message(self):
"""
(Regression for #9011 - error message is correct)
"""
with six.assertRaisesRegex(self, management.CommandError,
"^No fixture data found for 'bad_fixture2'. \(File format may be invalid.\)$"):
management.call_command(
'loaddata',
'bad_fixture2',
'animal',
verbosity=0,
commit=False,
)
def test_pg_sequence_resetting_checks(self):
"""
Test for ticket #7565 -- PostgreSQL sequence resetting checks shouldn't
ascend to parent models when inheritance is used
(since they are treated individually).
"""
management.call_command(
'loaddata',
'model-inheritance.json',
verbosity=0,
commit=False
)
self.assertEqual(Parent.objects.all()[0].id, 1)
self.assertEqual(Child.objects.all()[0].id, 1)
def test_close_connection_after_loaddata(self):
"""
Test for ticket #7572 -- MySQL has a problem if the same connection is
used to create tables, load data, and then query over that data.
To compensate, we close the connection after running loaddata.
This ensures that a new connection is opened when test queries are
issued.
"""
management.call_command(
'loaddata',
'big-fixture.json',
verbosity=0,
commit=False
)
articles = Article.objects.exclude(id=9)
self.assertEqual(
list(articles.values_list('id', flat=True)),
[1, 2, 3, 4, 5, 6, 7, 8]
)
# Just for good measure, run the same query again.
# Under the influence of ticket #7572, this will
# give a different result to the previous call.
self.assertEqual(
list(articles.values_list('id', flat=True)),
[1, 2, 3, 4, 5, 6, 7, 8]
)
def test_field_value_coerce(self):
"""
Test for tickets #8298, #9942 - Field values should be coerced into the
correct type by the deserializer, not as part of the database write.
"""
self.pre_save_checks = []
signals.pre_save.connect(self.animal_pre_save_check)
try:
management.call_command(
'loaddata',
'animal.xml',
verbosity=0,
commit=False,
)
self.assertEqual(
self.pre_save_checks,
[
("Count = 42 (<%s 'int'>)" % ('class' if PY3 else 'type'),
"Weight = 1.2 (<%s 'float'>)" % ('class' if PY3 else 'type'))
]
)
finally:
signals.pre_save.disconnect(self.animal_pre_save_check)
def test_dumpdata_uses_default_manager(self):
"""
Regression for #11286
Ensure that dumpdata honors the default manager
Dump the current contents of the database as a JSON fixture
"""
management.call_command(
'loaddata',
'animal.xml',
verbosity=0,
commit=False,
)
management.call_command(
'loaddata',
'sequence.json',
verbosity=0,
commit=False,
)
animal = Animal(
name='Platypus',
latin_name='Ornithorhynchus anatinus',
count=2,
weight=2.2
)
animal.save()
stdout = StringIO()
management.call_command(
'dumpdata',
'fixtures_regress.animal',
format='json',
stdout=stdout
)
# Output order isn't guaranteed, so check for parts
data = stdout.getvalue()
# Get rid of artifacts like '000000002' to eliminate the differences
# between different Python versions.
data = re.sub('0{6,}\d', '', data)
lion_json = '{"pk": 1, "model": "fixtures_regress.animal", "fields": {"count": 3, "weight": 1.2, "name": "Lion", "latin_name": "Panthera leo"}}'
emu_json = '{"pk": 10, "model": "fixtures_regress.animal", "fields": {"count": 42, "weight": 1.2, "name": "Emu", "latin_name": "Dromaius novaehollandiae"}}'
platypus_json = '{"pk": %d, "model": "fixtures_regress.animal", "fields": {"count": 2, "weight": 2.2, "name": "Platypus", "latin_name": "Ornithorhynchus anatinus"}}'
platypus_json = platypus_json % animal.pk
self.assertEqual(len(data), len('[%s]' % ', '.join([lion_json, emu_json, platypus_json])))
self.assertTrue(lion_json in data)
self.assertTrue(emu_json in data)
self.assertTrue(platypus_json in data)
def test_proxy_model_included(self):
"""
Regression for #11428 - Proxy models aren't included when you dumpdata
"""
stdout = StringIO()
# Create an instance of the concrete class
widget = Widget.objects.create(name='grommet')
management.call_command(
'dumpdata',
'fixtures_regress.widget',
'fixtures_regress.widgetproxy',
format='json',
stdout=stdout
)
self.assertEqual(
stdout.getvalue(),
"""[{"pk": %d, "model": "fixtures_regress.widget", "fields": {"name": "grommet"}}]"""
% widget.pk
)
def test_loaddata_works_when_fixture_has_forward_refs(self):
"""
Regression for #3615 - Forward references cause fixtures not to load in MySQL (InnoDB)
"""
management.call_command(
'loaddata',
'forward_ref.json',
verbosity=0,
commit=False
)
self.assertEqual(Book.objects.all()[0].id, 1)
self.assertEqual(Person.objects.all()[0].id, 4)
def test_loaddata_raises_error_when_fixture_has_invalid_foreign_key(self):
"""
Regression for #3615 - Ensure data with nonexistent child key references raises error
"""
with six.assertRaisesRegex(self, IntegrityError,
"Problem installing fixture"):
management.call_command(
'loaddata',
'forward_ref_bad_data.json',
verbosity=0,
commit=False,
)
_cur_dir = os.path.dirname(os.path.abspath(__file__))
@override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, 'fixtures_1'),
os.path.join(_cur_dir, 'fixtures_2')])
def test_loaddata_forward_refs_split_fixtures(self):
"""
Regression for #17530 - should be able to cope with forward references
when the fixtures are not in the same files or directories.
"""
management.call_command(
'loaddata',
'forward_ref_1.json',
'forward_ref_2.json',
verbosity=0,
commit=False
)
self.assertEqual(Book.objects.all()[0].id, 1)
self.assertEqual(Person.objects.all()[0].id, 4)
def test_loaddata_no_fixture_specified(self):
"""
Regression for #7043 - Error is quickly reported when no fixtures is provided in the command line.
"""
with six.assertRaisesRegex(self, management.CommandError,
"No database fixture specified. Please provide the path of "
"at least one fixture in the command line."):
management.call_command(
'loaddata',
verbosity=0,
commit=False,
)
def test_loaddata_not_existant_fixture_file(self):
stdout_output = StringIO()
management.call_command(
'loaddata',
'this_fixture_doesnt_exist',
verbosity=2,
commit=False,
stdout=stdout_output,
)
self.assertTrue("No xml fixture 'this_fixture_doesnt_exist' in" in
stdout_output.getvalue())
class NaturalKeyFixtureTests(TestCase):
def test_nk_deserialize(self):
"""
Test for ticket #13030 - Python based parser version
natural keys deserialize with fk to inheriting model
"""
management.call_command(
'loaddata',
'model-inheritance.json',
verbosity=0,
commit=False
)
management.call_command(
'loaddata',
'nk-inheritance.json',
verbosity=0,
commit=False
)
self.assertEqual(
NKChild.objects.get(pk=1).data,
'apple'
)
self.assertEqual(
RefToNKChild.objects.get(pk=1).nk_fk.data,
'apple'
)
def test_nk_deserialize_xml(self):
"""
Test for ticket #13030 - XML version
natural keys deserialize with fk to inheriting model
"""
management.call_command(
'loaddata',
'model-inheritance.json',
verbosity=0,
commit=False
)
management.call_command(
'loaddata',
'nk-inheritance.json',
verbosity=0,
commit=False
)
management.call_command(
'loaddata',
'nk-inheritance2.xml',
verbosity=0,
commit=False
)
self.assertEqual(
NKChild.objects.get(pk=2).data,
'banana'
)
self.assertEqual(
RefToNKChild.objects.get(pk=2).nk_fk.data,
'apple'
)
def test_nk_on_serialize(self):
"""
Check that natural key requirements are taken into account
when serializing models
"""
management.call_command(
'loaddata',
'forward_ref_lookup.json',
verbosity=0,
commit=False
)
stdout = StringIO()
management.call_command(
'dumpdata',
'fixtures_regress.book',
'fixtures_regress.person',
'fixtures_regress.store',
verbosity=0,
format='json',
use_natural_keys=True,
stdout=stdout,
)
self.assertEqual(
stdout.getvalue(),
"""[{"pk": 2, "model": "fixtures_regress.store", "fields": {"main": null, "name": "Amazon"}}, {"pk": 3, "model": "fixtures_regress.store", "fields": {"main": null, "name": "Borders"}}, {"pk": 4, "model": "fixtures_regress.person", "fields": {"name": "Neal Stephenson"}}, {"pk": 1, "model": "fixtures_regress.book", "fields": {"stores": [["Amazon"], ["Borders"]], "name": "Cryptonomicon", "author": ["Neal Stephenson"]}}]"""
)
def test_dependency_sorting(self):
"""
Now lets check the dependency sorting explicitly
It doesn't matter what order you mention the models
Store *must* be serialized before then Person, and both
must be serialized before Book.
"""
sorted_deps = sort_dependencies(
[('fixtures_regress', [Book, Person, Store])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_2(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Book, Store, Person])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_3(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Store, Book, Person])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_4(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Store, Person, Book])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_5(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Person, Book, Store])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_6(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Person, Store, Book])]
)
self.assertEqual(
sorted_deps,
[Store, Person, Book]
)
def test_dependency_sorting_dangling(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Person, Circle1, Store, Book])]
)
self.assertEqual(
sorted_deps,
[Circle1, Store, Person, Book]
)
def test_dependency_sorting_tight_circular(self):
self.assertRaisesMessage(
CommandError,
"""Can't resolve dependencies for fixtures_regress.Circle1, fixtures_regress.Circle2 in serialized app list.""",
sort_dependencies,
[('fixtures_regress', [Person, Circle2, Circle1, Store, Book])],
)
def test_dependency_sorting_tight_circular_2(self):
self.assertRaisesMessage(
CommandError,
"""Can't resolve dependencies for fixtures_regress.Circle1, fixtures_regress.Circle2 in serialized app list.""",
sort_dependencies,
[('fixtures_regress', [Circle1, Book, Circle2])],
)
def test_dependency_self_referential(self):
self.assertRaisesMessage(
CommandError,
"""Can't resolve dependencies for fixtures_regress.Circle3 in serialized app list.""",
sort_dependencies,
[('fixtures_regress', [Book, Circle3])],
)
def test_dependency_sorting_long(self):
self.assertRaisesMessage(
CommandError,
"""Can't resolve dependencies for fixtures_regress.Circle1, fixtures_regress.Circle2, fixtures_regress.Circle3 in serialized app list.""",
sort_dependencies,
[('fixtures_regress', [Person, Circle2, Circle1, Circle3, Store, Book])],
)
def test_dependency_sorting_normal(self):
sorted_deps = sort_dependencies(
[('fixtures_regress', [Person, ExternalDependency, Book])]
)
self.assertEqual(
sorted_deps,
[Person, Book, ExternalDependency]
)
def test_normal_pk(self):
"""
Check that normal primary keys still work
on a model with natural key capabilities
"""
management.call_command(
'loaddata',
'non_natural_1.json',
verbosity=0,
commit=False
)
management.call_command(
'loaddata',
'forward_ref_lookup.json',
verbosity=0,
commit=False
)
management.call_command(
'loaddata',
'non_natural_2.xml',
verbosity=0,
commit=False
)
books = Book.objects.all()
self.assertEqual(
books.__repr__(),
"""[<Book: Cryptonomicon by Neal Stephenson (available at Amazon, Borders)>, <Book: Ender's Game by Orson Scott Card (available at Collins Bookstore)>, <Book: Permutation City by Greg Egan (available at Angus and Robertson)>]"""
)
class TestTicket11101(TransactionTestCase):
def ticket_11101(self):
management.call_command(
'loaddata',
'thingy.json',
verbosity=0,
commit=False
)
self.assertEqual(Thingy.objects.count(), 1)
transaction.rollback()
self.assertEqual(Thingy.objects.count(), 0)
transaction.commit()
@skipUnlessDBFeature('supports_transactions')
def test_ticket_11101(self):
"""Test that fixtures can be rolled back (ticket #11101)."""
ticket_11101 = transaction.commit_manually(self.ticket_11101)
ticket_11101()
|
{
"content_hash": "c16c391cdf5e97f7def051898951b44f",
"timestamp": "",
"source": "github",
"line_count": 681,
"max_line_length": 435,
"avg_line_length": 34.177679882525695,
"alnum_prop": 0.5539419978517723,
"repo_name": "chrisfranzen/django",
"id": "55363bc5b76c0ee88d6b3c6dbbf01fad308ac970",
"size": "23325",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/regressiontests/fixtures_regress/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42663"
},
{
"name": "HTML",
"bytes": "95024"
},
{
"name": "JavaScript",
"bytes": "94313"
},
{
"name": "Python",
"bytes": "8216479"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from flask import url_for
from sqlalchemy import *
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.orm import relationship, backref, exc
from sqlalchemy.sql.expression import between
from sqlalchemy.dialects.mysql import INTEGER as Integer
import rfk.database
from rfk.database import Base, UTCDateTime
from rfk.types import ENUM, SET
from rfk.helper import now
class Show(Base):
"""Show"""
__tablename__ = 'shows'
show = Column(Integer(unsigned=True), primary_key=True, autoincrement=True)
series_id = Column("series", Integer(unsigned=True), ForeignKey('series.series',
onupdate="CASCADE",
ondelete="RESTRICT"))
series = relationship("Series", backref=backref('shows'))
logo = Column(String(255))
begin = Column(UTCDateTime, default=now)
end = Column(UTCDateTime)
updated = Column(UTCDateTime, default=now)
name = Column(String(50))
description = Column(Text)
flags = Column(Integer(unsigned=True), default=0)
FLAGS = SET(['DELETED', 'PLANNED', 'UNPLANNED', 'RECORD'])
@hybrid_property
def length(self):
return self.end - self.begin
@hybrid_method
def contains(self, point):
return (self.begin <= point) & (point < self.end)
@hybrid_method
def intersects(self, other):
return self.contains(other.begin) | self.contains(other.end)
def end_show(self):
"""ends the Show
raises exception if the show is planned since it doesn't need to be ended"""
if self.flags & Show.FLAGS.PLANNED:
raise Exception
self.end = now()
rfk.database.session.flush()
def add_tags(self, tags):
"""adds a list of Tags to the Show"""
for tag in tags:
self.add_tag(tag=tag)
def sync_tags(self, tags):
old_tags = []
for tag in self.tags:
old_tags.append(tag.tag)
for tag in tags:
if tag in old_tags:
old_tags.remove(tag)
self.add_tag(tag=tag)
for tag in old_tags:
ShowTag.query.filter(ShowTag.show == self,
ShowTag.tag == tag).delete()
rfk.database.session.flush()
def add_tag(self, tag=None, name=None):
"""adds a Tag to the Show either by object or by identifier"""
assert tag or name
if tag is None:
tag = Tag.get_tag(name)
if tag is None:
return False
try:
ShowTag.query.filter(ShowTag.show == self,
ShowTag.tag == tag).one()
return False
except exc.NoResultFound:
self.tags.append(ShowTag(tag))
rfk.database.session.flush()
return True
def add_user(self, user, role=None):
if role is None:
role = Role.get_role('host')
try:
us = UserShow.query.filter(UserShow.user == user,
UserShow.show == self).one()
if us.role != role:
us.role = role
rfk.database.session.flush()
return us
except exc.NoResultFound:
us = UserShow(show=self, user=user, role=role)
rfk.database.session.add(us)
rfk.database.session.flush()
return us
def remove_user(self, user):
"""removes the association to user"""
UserShow.query.filter(UserShow.user == user,
UserShow.show == self).delete()
def get_usershow(self, user):
try:
return UserShow.query.filter(UserShow.user == user,
UserShow.show == self).one()
except exc.NoResultFound:
return None
@staticmethod
def get_current_show(user=None, only_planned=False):
"""returns the current show"""
clauses = []
clauses.append((between(datetime.utcnow(), Show.begin, Show.end)) | (Show.end == None))
clauses.append(UserShow.user == user)
if only_planned:
clauses.append(Show.flags == Show.FLAGS.PLANNED)
shows = Show.query.join(UserShow).filter(*clauses).all()
if len(shows) == 1:
return shows[0]
elif len(shows) > 1:
for show in shows:
if show.flags & Show.FLAGS.PLANNED:
return show
return shows[0]
else:
return None
@staticmethod
def get_active_show():
try:
return Show.query.join(UserShow).filter(UserShow.status == UserShow.STATUS.STREAMING).one()
except exc.NoResultFound:
return None
def get_active_user(self):
try:
return UserShow.query.filter(UserShow.show == self,
UserShow.status == UserShow.STATUS.STREAMING).one().user
except exc.NoResultFound:
return None
def get_logo(self):
"""return the logourl for this show
falls back to serieslogo if set"""
if self.logo is not None:
return self.logo
elif self.series is not None:
return self.series.logo
def get_duration(self):
if self.end is not None:
return (self.end - self.begin).total_seconds()
else:
return 0
@property
def link(self):
return url_for('show.show_view', show=self.show)
def is_fulfilled(self):
""" this function returns True under the folling circumstances:
somebody streamed or is streaming, or
this show has not ended yet
"""
if self.end > now():
return True
for user in self.users:
if user.status > UserShow.STATUS.UNKNOWN:
return True
return False
def __repr__(self):
return "<rfk.database.show.Show id=%d flags=%s name=%s >" % (self.show, Show.FLAGS.name(self.flags), self.name)
"""Show Indices"""
Index('show_begin_idx', Show.begin)
Index('show_end_idx', Show.end)
class UserShow(Base):
"""connection between users and show"""
__tablename__ = 'user_shows'
userShow = Column(Integer(unsigned=True), primary_key=True, autoincrement=True)
user_id = Column("user", Integer(unsigned=True), ForeignKey('users.user',
onupdate="CASCADE",
ondelete="CASCADE"), nullable=False)
user = relationship("User", backref=backref('shows', cascade="all, delete-orphan"))
show_id = Column("show", Integer(unsigned=True),
ForeignKey('shows.show',
onupdate="CASCADE",
ondelete="CASCADE"), nullable=False)
show = relationship("Show", backref=backref('users', cascade="all, delete-orphan"))
role_id = Column("role", Integer(unsigned=True),
ForeignKey('roles.role',
onupdate="CASCADE",
ondelete="RESTRICT"), nullable=False)
role = relationship("Role")
status = Column(Integer(unsigned=True), default=0)
STATUS = ENUM(['UNKNOWN', 'STREAMING', 'STREAMED'])
class Tag(Base):
__tablename__ = 'tags'
tag = Column(Integer(unsigned=True), primary_key=True, autoincrement=True)
name = Column(String(25), nullable=False, unique=True)
icon = Column(String(30))
description = Column(Text, nullable=False)
@staticmethod
def get_tag(name):
"""returns a Tag object by given identifier"""
try:
return Tag.query.filter(Tag.name == name).one()
except exc.NoResultFound:
tag = Tag(name=name, description=name)
rfk.database.session.add(tag)
rfk.database.session.flush()
return tag
@staticmethod
def parse_tags(tags):
"""parses a space separated list of tags and returns a list of Tag objects"""
def unique(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if x not in seen and not seen_add(x)]
r = []
if tags is not None and len(tags) > 0:
for str_tag in unique(tags.strip().split(' ')):
if str_tag == '':
continue
tag = Tag.get_tag(str_tag)
r.append(tag)
return r
class ShowTag(Base):
"""connection between Shows and Tags"""
__tablename__ = 'show_tags'
show_tag = Column(Integer(unsigned=True), primary_key=True, autoincrement=True)
show_id = Column("show", Integer(unsigned=True),
ForeignKey('shows.show',
onupdate="CASCADE",
ondelete="CASCADE"), nullable=False)
show = relationship("Show", backref=backref('tags', cascade="all, delete-orphan"))
tag_id = Column("tag", Integer(unsigned=True),
ForeignKey('tags.tag',
onupdate="CASCADE",
ondelete="RESTRICT"), nullable=False)
tag = relationship("Tag", backref=backref('shows', cascade="all, delete-orphan"))
def __init__(self, tag):
self.tag = tag
class Role(Base):
__tablename__ = 'roles'
role = Column(Integer(unsigned=True), primary_key=True, autoincrement=True)
name = Column(String(50))
@staticmethod
def get_role(name):
try:
return Role.query.filter(Role.name == name).one()
except exc.NoResultFound:
role = Role(name=name)
rfk.database.session.add(role)
rfk.database.session.flush()
return role
class Series(Base):
__tablename__ = 'series'
series = Column(Integer(unsigned=True), primary_key=True, autoincrement=True)
user_id = Column("user", Integer(unsigned=True), ForeignKey('users.user',
onupdate="CASCADE",
ondelete="SET NULL"))
user = relationship("User", backref=backref('series'))
public = Column(Boolean)
name = Column(String(50))
description = Column(String(255))
logo = Column(String(255))
|
{
"content_hash": "1b021b8225afe20db7f38b6c6eb8702b",
"timestamp": "",
"source": "github",
"line_count": 294,
"max_line_length": 119,
"avg_line_length": 35.697278911564624,
"alnum_prop": 0.5541686517389233,
"repo_name": "buckket/weltklang",
"id": "9debf5b6255fc0e8eeebcc62f8a2426c8076ca82",
"size": "10495",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/rfk/database/show.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42171"
},
{
"name": "HTML",
"bytes": "217875"
},
{
"name": "JavaScript",
"bytes": "24784"
},
{
"name": "Makefile",
"bytes": "90"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "278815"
},
{
"name": "Shell",
"bytes": "263"
}
],
"symlink_target": ""
}
|
class Credentials(object):
def __init__(self, authurl, user, password, tenant_name, region,
endpoint_type, auth_version, domain_id, domain_name, tenant_id,
user_id, user_domain_id, user_domain_name, project_id,
project_name, project_domain_id, project_domain_name):
self.authurl = authurl
self.user = user
self.password = password
self.tenant_name = tenant_name
self.region = region
self.endpoint_type = endpoint_type
self.auth_version = auth_version
self.domain_id = domain_id
self.domain_name = domain_name
self.tenant_id = tenant_id
self.user_id = user_id
self.user_domain_id = user_domain_id
self.user_domain_name = user_domain_name
self.project_id = project_id
self.project_name = project_name
self.project_domain_id = project_domain_id
self.project_domain_name = project_domain_name
|
{
"content_hash": "09f633640bb5e6157b22db6075b9b1ba",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 75,
"avg_line_length": 43.86363636363637,
"alnum_prop": 0.6269430051813472,
"repo_name": "wal-e/wal-e",
"id": "9c84d6ce993c7ac5440fd80d41de3097a6cc3fed",
"size": "965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wal_e/blobstore/swift/credentials.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "986"
},
{
"name": "Python",
"bytes": "381281"
}
],
"symlink_target": ""
}
|
"""The Generalized Normal (Generalized Gaussian) distribution class."""
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import random as tfp_random
from tensorflow_probability.python.bijectors import identity as identity_bijector
from tensorflow_probability.python.bijectors import softplus as softplus_bijector
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import gamma
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import parameter_properties
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import samplers
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.math import special
__all__ = [
'GeneralizedNormal',
]
class GeneralizedNormal(distribution.AutoCompositeTensorDistribution):
"""The Generalized Normal distribution.
The Generalized Normal (or Generalized Gaussian) generalizes the Normal
distribution with an additional shape parameter. It is parameterized by
location `loc`, scale `scale` and shape `power`.
#### Mathematical details
The probability density function (pdf) is,
```none
pdf(x; loc, scale, power) = 1 / (2 * scale * Gamma(1 + 1 / power)) *
exp(-(|x - loc| / scale) ^ power)
```
where `loc` is the mean, `scale` is the scale, and, `power` is the shape
parameter. If the power is above two, the distribution becomes platykurtic.
A power equal to two results in a Normal distribution. A power smaller than
two produces a leptokurtic (heavy-tailed) distribution. Mean and scale behave
the same way as in the equivalent Normal distribution.
See
https://en.wikipedia.org/w/index.php?title=Generalized_normal_distribution&oldid=954254464
for the definitions used here, including CDF, variance and entropy. See
https://sccn.ucsd.edu/wiki/Generalized_Gaussian_Probability_Density_Function
for the sampling method used here.
#### Examples
```python
import tensorflow_probability as tfp
tfd = tfp.distributions
dist = tfd.GeneralizedNormal(loc=3.0, scale=2.0, power=1.0)
dist2 = tfd.GeneralizedNormal(loc=0, scale=[3.0, 4.0], power=[[2.0], [3.0]])
```
"""
def __init__(self,
loc,
scale,
power,
validate_args=False,
allow_nan_stats=True,
name='GeneralizedNormal'):
"""Construct Generalized Normal distributions.
The Generalized Normal is parametrized with mean `loc`, scale
`scale` and shape parameter `power`. The parameters must be shaped
in a way that supports broadcasting (e.g. `loc + scale` is a valid
operation).
Args:
loc: Floating point tensor; the means of the distribution(s).
scale: Floating point tensor; the scale of the
distribution(s). Must contain only positive values.
power: Floating point tensor; the shape parameter of the distribution(s).
Must contain only positive values. `loc`, `scale` and `power` must have
compatible shapes for broadcasting.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if `loc`, `scale`, and `power` have different `dtype`.
"""
parameters = dict(locals())
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, scale, power],
dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(
loc, dtype=dtype, name='loc')
self._scale = tensor_util.convert_nonref_to_tensor(
scale, dtype=dtype, name='scale')
self._power = tensor_util.convert_nonref_to_tensor(
power, dtype=dtype, name='power')
super(GeneralizedNormal, self).__init__(
dtype=dtype,
reparameterization_type=reparameterization.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _parameter_properties(cls, dtype, num_classes=None):
# pylint: disable=g-long-lambda
return dict(
loc=parameter_properties.ParameterProperties(),
scale=parameter_properties.ParameterProperties(
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))),
power=parameter_properties.ParameterProperties(
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))))
# pylint: enable=g-long-lambda
@property
def loc(self):
"""Distribution parameter for the mean."""
return self._loc
@property
def scale(self):
"""Distribution parameter for scale."""
return self._scale
@property
def power(self):
"""Distribution parameter for shape."""
return self._power
def _event_shape_tensor(self):
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _sample_n(self, n, seed=None, name=None):
n = ps.convert_to_shape_tensor(n, name='num', dtype=tf.int32)
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
power = tf.convert_to_tensor(self.power)
batch_shape = self._batch_shape_tensor(loc=loc, scale=scale, power=power)
result_shape = ps.concat([[n], batch_shape], axis=0)
ipower = tf.broadcast_to(tf.math.reciprocal(power), batch_shape)
gamma_dist = gamma.Gamma(ipower, 1.)
rademacher_seed, gamma_seed = samplers.split_seed(seed, salt='GenNormal')
gamma_sample = gamma_dist.sample(n, seed=gamma_seed)
binary_sample = tfp_random.rademacher(result_shape, dtype=self.dtype,
seed=rademacher_seed)
sampled = (binary_sample * tf.math.pow(tf.abs(gamma_sample), ipower))
return loc + scale * sampled
def _log_prob(self, x):
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
power = tf.convert_to_tensor(self.power)
one = tf.constant(1., dtype=self.dtype)
two = tf.constant(2., dtype=self.dtype)
log_normalization = (tf.math.log(two) + tf.math.log(scale) +
tf.math.lgamma(one + tf.math.reciprocal(power)))
log_unnormalized = -tf.pow(tf.abs(x - loc) / scale, power)
return log_unnormalized - log_normalization
def _cdf_zero_mean(self, x):
scale = tf.convert_to_tensor(self.scale)
power = tf.convert_to_tensor(self.power)
zero = tf.constant(0., dtype=self.dtype)
half = tf.constant(0.5, dtype=self.dtype)
one = tf.constant(1., dtype=self.dtype)
# Double tf.where to avoid incorrect gradient at x == 0.
x_is_zero = tf.equal(x, zero)
safe_x = tf.where(x_is_zero, one, x)
half_gamma = half * tf.math.igammac(
tf.math.reciprocal(power),
tf.pow(tf.abs(safe_x) / scale, power))
return tf.where(
x_is_zero,
half,
tf.where(x > zero, one - half_gamma, half_gamma),
)
def _cdf(self, x):
loc = tf.convert_to_tensor(self.loc)
return self._cdf_zero_mean(x - loc)
def _survival_function(self, x):
loc = tf.convert_to_tensor(self.loc)
# sf(x) = cdf(-x) for loc == 0, because distribution is symmetric.
return self._cdf_zero_mean(loc - x)
def _quantile(self, p):
loc = tf.convert_to_tensor(self.loc)
scale = tf.convert_to_tensor(self.scale)
power = tf.convert_to_tensor(self.power)
ipower = tf.math.reciprocal(power)
quantile = tf.where(
p < 0.5,
loc - tf.math.pow(
special.igammacinv(ipower, 2. * p), ipower) * scale,
loc + tf.math.pow(
special.igammainv(ipower, 2. * p - 1.), ipower) * scale)
return quantile
def _entropy(self):
scale = tf.convert_to_tensor(self.scale)
power = tf.convert_to_tensor(self.power)
ipower = tf.math.reciprocal(power)
one = tf.constant(1., dtype=self.dtype)
logtwo = tf.constant(np.log(2.), dtype=self.dtype)
entropy = ipower + (logtwo + tf.math.log(scale) +
tf.math.lgamma(one + ipower))
return tf.broadcast_to(entropy,
self._batch_shape_tensor(scale=scale, power=power))
def _mean(self):
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(loc, self._batch_shape_tensor(loc=loc))
def _variance(self):
ipower = tf.math.reciprocal(tf.convert_to_tensor(self.power))
two = tf.constant(2., dtype=self.dtype)
three = tf.constant(3., dtype=self.dtype)
log_var = (two * tf.math.log(self.scale) +
tf.math.lgamma(three * ipower) - tf.math.lgamma(ipower))
var = tf.math.exp(log_var)
return tf.broadcast_to(var, self._batch_shape_tensor())
_mode = _mean
def _default_event_space_bijector(self):
return identity_bijector.Identity(validate_args=self.validate_args)
def _parameter_control_dependencies(self, is_init):
assertions = []
if is_init:
# _batch_shape() will raise error if it can statically prove that `loc`,
# `scale`, and `power` have incompatible shapes.
try:
self._batch_shape()
except ValueError:
raise ValueError(
'Arguments `loc`, `scale` and `power` must have compatible shapes; '
'loc.shape={}, scale.shape={}, power.shape={}.'.format(
self.loc.shape, self.scale.shape, self.power.shape))
# We don't bother checking the shapes in the dynamic case because
# all member functions access the three arguments anyway.
if not self.validate_args:
assert not assertions # Should never happen.
return []
if is_init != tensor_util.is_ref(self.scale):
assertions.append(assert_util.assert_positive(
self.scale, message='Argument `scale` must be positive.'))
if is_init != tensor_util.is_ref(self.power):
assertions.append(assert_util.assert_positive(
self.power, message='Argument `power` must be positive.'))
return assertions
|
{
"content_hash": "e1e125f84bc51d44c26fb12b70969275",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 92,
"avg_line_length": 39.57090909090909,
"alnum_prop": 0.666421613673957,
"repo_name": "tensorflow/probability",
"id": "79c245a24f3e757503ba7ce20641daccf1c4d0d7",
"size": "11560",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_probability/python/distributions/generalized_normal.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "55552121"
},
{
"name": "Python",
"bytes": "17339674"
},
{
"name": "Shell",
"bytes": "24852"
},
{
"name": "Starlark",
"bytes": "663851"
}
],
"symlink_target": ""
}
|
"""
These meta-datasources operate on :class:`revscoring.Datasource`'s that returns
a list of strings (i.e. "tokens") and produces a list of ngram/skipgram
sequences.
.. autoclass:: revscoring.datasources.meta.gramming.gram
"""
from ..datasource import Datasource
class gram(Datasource):
"""
Converts a sequence of items into ngrams.
:Parameters:
items_datasource : :class:`revscoring.Datasource`
A datasource that generates a list of some item
grams : `list` ( `tuple` ( `int` ) )
A list of ngram and/or skipgram sequences to produce
name : `str`
A name for the datasource.
"""
def __init__(self, items_datasource, grams=[(0,)], name=None):
name = self._format_name(name, [items_datasource, grams])
super().__init__(name, self.process,
depends_on=[items_datasource])
self.grams = grams
def process(self, tokens):
return list(gram_tokens(tokens, grams=self.grams))
def gram_tokens(items, grams=[(0,)]):
for i in range(len(items)):
for gram in grams:
if gram == (0,):
yield items[i]
elif len(items) > i + max(gram):
yield tuple(items[i + offset] for offset in gram)
|
{
"content_hash": "8ec4b42795c64d1ecefc0f0246c450e9",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 79,
"avg_line_length": 32.05,
"alnum_prop": 0.6006240249609984,
"repo_name": "yafeunteun/wikipedia-spam-classifier",
"id": "84a2bb1b8185dbf6ba1717b5133a29d64473cdc4",
"size": "1282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "revscoring/revscoring/datasources/meta/gramming.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7262"
},
{
"name": "Jupyter Notebook",
"bytes": "971575"
},
{
"name": "Makefile",
"bytes": "7446"
},
{
"name": "Python",
"bytes": "796831"
},
{
"name": "Shell",
"bytes": "132"
}
],
"symlink_target": ""
}
|
from .models_BioCyc_io import models_BioCyc_io
from .models_BioCyc_dependencies import models_BioCyc_dependencies
from .models_COBRA_query import models_COBRA_query
from .models_BioCyc_dependencies import models_BioCyc_dependencies
import copy
class models_BioCyc_execute(models_BioCyc_io):
def execute_convertAndMap_BioCycRegulation2COBRA(
self,
):
''' '''
pass;
def join_BioCyc2COBRAregulationWithCOBRAinteractions(self,
BioCyc2COBRA_regulation,
COBRA_interaction,
BioCyc_alt_id = {},
COBRA_alt_id = {},
COBRA_alt_id2 = {},
deformat_met_id_I = True
):
'''
return a list mapped and unmapped components
INPUT:
BioCyc2COBRA_regulation = [{left:[string],right:[string],mode:[string],
parent_classes:[string],mechanism:[string]},
{left_EcoCyc:[string],right_EcoCyc:[string]]
COBRA_interaction = [{left:[string],right:[string],mode:[string],
parent_classes:[string],mechanism:[string]}]
BioCyc_alt_id = {name:{'synonym':[],'common_name':[],'accession_1':[],'accession_2':[]}}
output from get_alternativeGeneIdentifiers_modelsBioCycPolymerSegments
COBRA_alt_id = {rxn_id:'pathways':[],'stoichiometry':[]}}
output from get_rowsDict_modelID_dataStage02PhysiologyModelPathways
convert_netRxnDict2rxnNetRxnDict
COBRA_alt_id2 = {bnumber:'bnumber':'','gene_name':[]}}
OUTPUT:
data_O = [{left:[string],right:[string],mode:[string],parent_classes:[string]}]
'''
from .models_COBRA_dependencies import models_COBRA_dependencies
COBRA_dependencies = models_COBRA_dependencies();
def deformatAndConvert_metID(met_id_I):
met_id_O = None;
if '_c' in met_id_I:
met_id_O = COBRA_dependencies.deformat_metid(met_id_I)\
.replace('13dpg','23dpg')\
.replace('3pg','Pool_2pg_3pg')\
.replace('glycogen','adpglc')\
.replace('uacgam','udpglcur');
return met_id_O;
data_tmp = []
#BioCyc
for row in BioCyc2COBRA_regulation:
if not row['used_']: continue;
unique = {
#'left':row['left'],
#'right':row['right'],
'mode':row['mode'],
'mechanism':row['mechanism'],
#'name':row['name'],
'parent_classes':row['parent_classes']
};
#BioCyc Left identifiers
left_ids=[];
if type(row['left'])!=type([]) and row['left'] in BioCyc_alt_id.keys():
left_alt_ids = list(set(BioCyc_alt_id[row['left']]['common_name']+\
BioCyc_alt_id[row['left']]['synonym']+\
[row['left']]))
left_ids.extend(left_alt_ids)
elif type(row['left_EcoCyc'])!=type([]) and row['left_EcoCyc'] in BioCyc_alt_id.keys():
left_alt_ids = list(set(BioCyc_alt_id[row['left_EcoCyc']]['common_name']+\
BioCyc_alt_id[row['left_EcoCyc']]['synonym']+\
[row['left_EcoCyc']]))
left_ids.extend(left_alt_ids)
elif type(row['left'])!=type([]) and row['left'] in COBRA_alt_id.keys():
left_ids.append(row['left'])
left_ids.extend(COBRA_alt_id[row['left']]['pathways'])
elif row['left']:
left_ids.append(row['left'])
if row['left']:
met_id_left = deformatAndConvert_metID(row['left'])
if met_id_left:
left_ids.append(met_id_left)
#BioCyc Right identifiers
right_ids=[];
if type(row['right'])!=type([]) and row['right'] in BioCyc_alt_id.keys():
right_alt_ids = list(set(BioCyc_alt_id[row['right']]['common_name']+\
BioCyc_alt_id[row['right']]['synonym']+\
[row['right']]))
right_ids.extend(right_alt_ids)
elif type(row['right_EcoCyc'])!=type([]) and row['right_EcoCyc'] in BioCyc_alt_id.keys():
right_alt_ids = list(set(BioCyc_alt_id[row['right_EcoCyc']]['common_name']+\
BioCyc_alt_id[row['right_EcoCyc']]['synonym']+\
[row['right_EcoCyc']]))
right_ids.extend(right_alt_ids)
elif type(row['right'])!=type([]) and row['right'] in COBRA_alt_id.keys():
right_ids.append(row['right'])
right_ids.extend(COBRA_alt_id[row['right']]['pathways'])
elif row['right']:
right_ids.append(row['right'])
if row['right']:
met_id_right = deformatAndConvert_metID(row['right'])
if met_id_right:
right_ids.append(met_id_right)
#Flatten left and right identifiers
for l in left_ids:
for r in right_ids:
tmp = {}
tmp['left'] = l;
tmp['right'] = r;
tmp.update(unique);
data_tmp.append(tmp);
#COBRA
for row in COBRA_interaction:
unique = {
#'left':row['left'],
#'right':row['right'],
'mode':row['mode'],
'mechanism':row['mechanism'],
#'name':'',
'parent_classes':row['parent_classes']
};
left_ids=[];
left_ids.append(row['left'])
if row['left'] in COBRA_alt_id2.keys():
left_alt_ids = list(set(COBRA_alt_id2[row['left']]['gene_name']))
left_ids.extend(left_alt_ids)
met_id_left = deformatAndConvert_metID(row['left'])
if met_id_left:
left_ids.append(met_id_left)
right_ids=[];
right_ids.append(row['right'])
if row['right'] in COBRA_alt_id2.keys():
left_alt_ids = list(set(COBRA_alt_id2[row['right']]['gene_name']))
met_id_right = deformatAndConvert_metID(row['right'])
if met_id_right:
right_ids.append(met_id_right)
#Flatten left and right identifiers
for l in left_ids:
for r in right_ids:
tmp = {}
tmp['left'] = l;
tmp['right'] = r;
tmp.update(unique);
data_tmp.append(tmp);
#remove duplicate entries
#(NOTE: only works because each dictionary is constructed identically)
data_O = [];
for row in data_tmp:
if not row in data_O:
data_O.append(row);
return data_O;
def update_BioCyc2COBRAregulation_mappings(self,
BioCyc2COBRA_regulation_all,
BioCyc2COBRA_met_mappings,
BioCyc2COBRA_rxn_mappings,
BioCyc_exclusion_names=[]
):
'''Update the listDict of BioCyc2COBRA_regulation with
manually mapped entries
INPUT:
BioCyc2COBRA_regulation_all = [{}]
BioCyc2COBRA_met_mappings = [{'BioCyc':[string],'BiGG':[string],'used_':[boolean],'comment_':[string]}]
BioCyc2COBRA_rxn_mappings = [{'BioCyc':[string],'BiGG':[string],'used_':[boolean],'comment_':[string]}]
BioCyc_exclusion_names = []
OUTPUT:
BioCyc2COBRA_regulation_mapped = [{}]
'''
#add in metabolite mappings
BioCyc2COBRA_regulation_all_1 = [];
for d in BioCyc2COBRA_regulation_all:
if d['name'] in BioCyc_exclusion_names: continue;
d['used_']=True;
d['comment_']=None;
#metabolites
if d['left_EcoCyc'] in BioCyc2COBRA_met_mappings.keys():
for row in BioCyc2COBRA_met_mappings[d['left_EcoCyc']]:
tmp = copy.copy(d)
#check for null mappings
if row['used_'] is None or row['used_'] == "":
if d not in BioCyc2COBRA_regulation_all_1:
BioCyc2COBRA_regulation_all_1.append(d);
#check for false mappings
elif row['BiGG']==tmp['left'] and \
(row['used_'] == "FALSE" or not row['used_']):
tmp['used_']=row['used_']
tmp['comment_']=row['comment_']
BioCyc2COBRA_regulation_all_1.append(tmp);
#add in true mappings
elif row['used_'] == "TRUE" or row['used_']:
tmp['left']=row['BiGG']
tmp['comment_']=row['comment_']
BioCyc2COBRA_regulation_all_1.append(tmp);
else:
if d not in BioCyc2COBRA_regulation_all_1:
BioCyc2COBRA_regulation_all_1.append(d);
else:
BioCyc2COBRA_regulation_all_1.append(d);
#add in reaction mappings
BioCyc2COBRA_regulation_all_2 = [];
for d in BioCyc2COBRA_regulation_all_1:
#reactions
if d['right_EcoCyc'] in BioCyc2COBRA_rxn_mappings.keys():
for row in BioCyc2COBRA_rxn_mappings[d['right_EcoCyc']]:
tmp = copy.copy(d)
#check for null mappings
if row['used_'] is None or row['used_'] == "":
if d not in BioCyc2COBRA_regulation_all_2:
BioCyc2COBRA_regulation_all_2.append(d);
#check for false mappings
elif row['BiGG']==tmp['right'] and \
(row['used_'] == "FALSE" or not row['used_']):
tmp['used_']=row['used_']
tmp['comment_']=row['comment_']
BioCyc2COBRA_regulation_all_2.append(tmp);
#add in true mappings
elif row['used_'] == "TRUE" or row['used_']:
tmp['right']=row['BiGG']
tmp['comment_']=row['comment_']
BioCyc2COBRA_regulation_all_2.append(tmp);
else:
if d not in BioCyc2COBRA_regulation_all_2:
BioCyc2COBRA_regulation_all_2.append(d);
else:
BioCyc2COBRA_regulation_all_2.append(d);
#remove duplicate entries
#(NOTE: only works because each dictionary is constructed identically)
BioCyc2COBRA_regulation_all = [];
for row in BioCyc2COBRA_regulation_all_2:
if not row in BioCyc2COBRA_regulation_all:
BioCyc2COBRA_regulation_all.append(row);
return BioCyc2COBRA_regulation_all;
def join_BioCyc2COBRA_regulationAndTranscriptionFactors(
self,
BioCyc2COBRA_regulation_I,
BioCyc2COBRA_TFs_I
):
'''Join converted and mapped BioCyc regulation and Transcription factor reactions
INPUT:
BioCyc2COBRA_regulation_I = output from convertAndMap_BioCycRegulation2COBRA
BioCyc2COBRA_TFs_I = output from convertAndMap_BioCycTranscriptionFactor2COBRA
OUTPUT:
'''
BioCyc2COBRA_regulation_all = [];
#iterate through each row of regulation
for row in BioCyc2COBRA_regulation_I:
unique = {
'regulator':row['regulator'],
'regulated_entity':row['regulated_entity'],
'mode':row['mode'],
'mechanism':row['mechanism'],
'name':row['name'],
'parent_classes':row['parent_classes']
}
tmp = {
'left_EcoCyc':[],
'left':[],
'right_EcoCyc':[],
'right':[],
}
if row['regulator'] in BioCyc2COBRA_TFs_I.keys():
for reg in BioCyc2COBRA_TFs_I[row['regulator']]:
for i in range(len(reg['ligands']['BioCyc_name'])):
tmp = {
'left_EcoCyc':reg['ligands']['BioCyc_name'][i],
'left':reg['ligands']['COBRA_met_id'][i],
'right_EcoCyc':reg['tu'],
'right':None,
'regulator':row['regulator'],
'regulated_entity':row['regulated_entity'],
'mode':reg['mode'],
'mechanism':row['mechanism'],
'parent_classes':'("Protein-Ligand-Binding-Reactions")',
'name':row['name']
};
BioCyc2COBRA_regulation_all.append(tmp);
for i in range(len(reg['genes'])):
tmp = {
'left_EcoCyc':reg['genes'][i],
'left':reg['genes'][i],
'right_EcoCyc':reg['tu'],
'right':None,
'regulator':row['regulator'],
'regulated_entity':row['regulated_entity'],
'mode':reg['mode'],
'mechanism':row['mechanism'],
'parent_classes':'("DNA-to-Protein")',
'name':row['name']
};
BioCyc2COBRA_regulation_all.append(tmp);
for i in range(len(reg['ligands']['BioCyc_name'])):
tmp = {
'left_EcoCyc':reg['ligands']['BioCyc_name'][i],
'left':reg['ligands']['COBRA_met_id'][i],
'right_EcoCyc':row['regulated_entities_EcoCyc'],
'right':row['regulated_entities_COBRA'],
'regulator':row['regulator'],
'regulated_entity':row['regulated_entity'],
'mode':reg['mode']+row['mode'],
'mechanism':row['mechanism'],
'parent_classes':'("Protein-Ligand-DNA-Binding-Reactions")',
'name':row['name']
};
BioCyc2COBRA_regulation_all.append(tmp);
for i in range(len(reg['genes'])):
tmp = {
'left_EcoCyc':reg['genes'][i],
'left':reg['genes'][i],
'right_EcoCyc':row['regulated_entities_EcoCyc'],
'right':row['regulated_entities_COBRA'],
'regulator':row['regulator'],
'regulated_entity':row['regulated_entity'],
'mechanism':row['mechanism'],
'mode':row['mode'],
'parent_classes':'("DNA-to-Protein-DNA-Binding-Reactions")',
'name':row['name']
};
BioCyc2COBRA_regulation_all.append(tmp);
tmp = {
'left_EcoCyc':row['regulators_EcoCyc'],
'left':row['regulators_COBRA'],
'right_EcoCyc':row['regulated_entities_EcoCyc'],
'right':row['regulated_entities_COBRA'],
};
tmp.update(unique);
BioCyc2COBRA_regulation_all.append(tmp);
else:
tmp = {
'left_EcoCyc':row['regulators_EcoCyc'],
'left':row['regulators_COBRA'],
'right_EcoCyc':row['regulated_entities_EcoCyc'],
'right':row['regulated_entities_COBRA'],
};
tmp.update(unique);
BioCyc2COBRA_regulation_all.append(tmp);
#remove duplicate entries
#(NOTE: only works because each dictionary is constructed identically)
data_O = [];
for row in BioCyc2COBRA_regulation_all:
if not row in data_O:
data_O.append(row);
return data_O;
def convertAndMap_BioCycTranscriptionFactor2COBRA(
self,
BioCyc2COBRA_regulation_I,
BioCyc_polymerSegments_I = None,
BioCyc_compounds_I = None,
COBRA_metabolites_I = None,
chebi2inchi_I = None,
):
'''Convert and map BioCyc Transcription factor (ligand-binding) reactions
to COBRA model ids
INPUT:
BioCyc2COBRA_regulation_I = output from convertAndMap_BioCycRegulation2COBRA
BioCyc_polymerSegments_I = (TODO) listDict of models_BioCyc_polymerSegments
BioCyc_compounds_I = listDict of models_BioCyc_compounds
COBRA_metabolites_I = listDict of models_COBRA_metabolites
chebi2inchi_I = listDict of CHEBI_ID to InCHI
OUTPUT:
'''
from SBaaS_models.models_BioCyc_dependencies import models_BioCyc_dependencies
BioCyc_dependencies = models_BioCyc_dependencies();
if not BioCyc2COBRA_regulation_I is None and BioCyc2COBRA_regulation_I:
BioCyc2COBRA_regulators = list(set([r['regulator'] for r in BioCyc2COBRA_regulation_I \
if 'DNA-binding transcriptional dual regulator' in r['regulator']]));
else:
BioCyc2COBRA_regulators=BioCyc2COBRA_regulation_I;
if not chebi2inchi_I is None and chebi2inchi_I:
chebi2inchi_dict_I = {r['CHEBI_ID']:r['InChI'] for r in chebi2inchi_I}
else:
chebi2inchi_dict_I=chebi2inchi_I;
if not BioCyc_compounds_I is None and BioCyc_compounds_I:
#BioCyc_compounds_dict_I = {r['name']:r for r in BioCyc_compounds_I}
BioCyc_compounds_dict_I = {}
for row in BioCyc_compounds_I:
keys = [];
keys.append(row['name'])
keys = list(set([k for k in keys if k!='']))
for k in keys:
if not k in BioCyc_compounds_dict_I.keys():
BioCyc_compounds_dict_I[k]=[];
if not row in BioCyc_compounds_dict_I[k]:
BioCyc_compounds_dict_I[k].append(row);
else:
BioCyc_compounds_dict_I=BioCyc_compounds_I;
if not BioCyc_polymerSegments_I is None and BioCyc_polymerSegments_I:
BioCyc_polymerSegments_dict_I = {}
for r in BioCyc_polymerSegments_I:
products = models_BioCyc_dependencies.convert_bioCycList2List(r['product'])
for p in products:
if not p in BioCyc_polymerSegments_dict_I.keys():
BioCyc_polymerSegments_dict_I[p]=[];
else:
BioCyc_polymerSegments_dict_I[p].append(r);
else:
BioCyc_polymerSegments_dict_I = BioCyc_polymerSegments_I
BioCyc2COBRA_regulators_O = {}
for e in BioCyc2COBRA_regulators:
BioCyc2COBRA_regulators_O[e]=[];
#spot checks:
if e == 'Cra DNA-binding transcriptional dual regulator':
#error mapping fdp_c
print('check');
elif e == 'GalR DNA-binding transcriptional dual regulator':
#gene is being identified as a TU
print('check');
elif e == 'β-D-galactose':
#not a transcription factor
print('check');
tmp = self.get_rows_substratesAndParentClassesAndDatabase_modelsBioCycReactions(
e,
database_I='ECOLI',
query_I={},
output_O='listDict',
dictColumn_I=None
);
for t in tmp:
ligands = {'COBRA_met_id': [],
'BioCyc_name': []};
genes = [];
tus = [];
#parse left and right
left = BioCyc_dependencies.convert_bioCycList2List(t['left'])
right = BioCyc_dependencies.convert_bioCycList2List(t['right'])
#check for tus
if e in left:
tus.append(e);
mode = '("-")';
elif e in right:
tus.append(e);
mode = '("+")';
else:
continue;
#query proteins to look up the gene
#query compounds to look up the ligands
for l in left:
proteins,compounds = [],[];
proteins = self.get_rows_nameAndDatabase_modelsBioCycProteins(
l,database_I = 'ECOLI'
);
compounds = self.get_rows_nameAndDatabase_modelsBioCycCompounds(
l,database_I = 'ECOLI'
);
if proteins:
for p in proteins:
#1. parse genes directly
genes.extend(BioCyc_dependencies.convert_bioCycList2List(p['gene']));
#2. if genes are not specified (i.e., protein complex) query and parse polymerSegments
names = BioCyc_dependencies.convert_bioCycList2List(p['names'])
for n in names:
##TODO: test
#if n in BioCyc_polymerSegments_dict_I.keys():
# for row in BioCyc_polymerSegments_dict_I[n]:
# genes.append(row['name'])
rows = self.get_rows_productAndDatabase_modelsBioCycPolymerSegments(
n,database_I = 'ECOLI');
genes.extend(r['name'] for r in rows);
elif compounds:
#map the ligand names...
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
compounds,
#[c['name'] for c in compounds],
BioCyc_components_dict_I=BioCyc_compounds_dict_I,
BioCyc2COBRA_func_I=BioCyc_dependencies.map_BioCycCompound2COBRA,
BioCyc2COBRA_params_I={
'COBRA_metabolites_I':COBRA_metabolites_I,
'chebi2inchi_dict_I':chebi2inchi_dict_I,
}
);
ligands['BioCyc_name'].extend([c['name'] for c in original])
#ligands['BioCyc_name'].extend(original)
ligands['COBRA_met_id'].extend(converted)
for r in right:
proteins,compounds = [],[];
proteins = self.get_rows_nameAndDatabase_modelsBioCycProteins(
r,database_I = 'ECOLI'
);
compounds = self.get_rows_nameAndDatabase_modelsBioCycCompounds(
r,database_I = 'ECOLI'
);
if proteins:
for p in proteins:
#1. parse genes directly
genes.extend(BioCyc_dependencies.convert_bioCycList2List(p['gene']));
#2. if genes are not specified (i.e., protein complex) query and parse polymerSegments
names = BioCyc_dependencies.convert_bioCycList2List(p['names'])
for n in names:
##TODO: test
#if n in BioCyc_polymerSegments_dict_I.keys():
# for row in BioCyc_polymerSegments_dict_I[n]:
# genes.append(row['name'])
rows = self.get_rows_productAndDatabase_modelsBioCycPolymerSegments(
n,database_I = 'ECOLI');
genes.extend(r['name'] for r in rows);
elif compounds:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
compounds,
#[c['name'] for c in compounds],
BioCyc_components_dict_I=BioCyc_compounds_dict_I,
BioCyc2COBRA_func_I=BioCyc_dependencies.map_BioCycCompound2COBRA,
BioCyc2COBRA_params_I={
'COBRA_metabolites_I':COBRA_metabolites_I,
'chebi2inchi_dict_I':chebi2inchi_dict_I,
}
);
ligands['BioCyc_name'].extend([c['name'] for c in original])
#ligands['BioCyc_name'].extend(original)
ligands['COBRA_met_id'].extend(converted)
genes = list(set([g for g in genes if g!='']))
#check that there is only 1 tu:
assert(len(tus)==1); #only 1 tu
tu = tus[0];
#NOTE: there can be multiple ligands/genes associated with the tu
BioCyc2COBRA_regulators_O[e].append({
'ligands':ligands,
'genes':genes,
'tu':tu,
'regulator':e,
'mode':mode,
});
return BioCyc2COBRA_regulators_O;
def convertAndMap_BioCycRegulation2COBRA(
self,
BioCyc_regulation_I,
BioCyc_reactions_I = None,
BioCyc_enzymaticReactions2PolymerSegments_I = None,
BioCyc_compounds_I = None,
COBRA_reactions_I = None,
COBRA_metabolites_I = None,
chebi2inchi_I = None,
#chebi2database_I = None,
MetaNetX_reactions_I = None,
MetaNetX_metabolites_I = None,):
'''Convert and map BioCyc Regulation
to COBRA model ids
INPUT:
BioCyc_regulation_I = listDict
BioCyc_reactions_I = listDict of models_BioCyc_reactions
BioCyc_enzymaticReactions2PolymerSegments_I = listDict of
join between models_BioCyc_enzymaticReactions and
models_BioCyc_polymerSegments
(getJoin_genes_namesAndDatabase_modelsBioCycEnzymaticReactionsAndPolymerSegments)
BioCyc_compounds_I = listDict of models_BioCyc_compounds
COBRA_reactions_I = listDict of models_COBRA_reactions
COBRA_metabolites_I = listDict of models_COBRA_metabolites
chebi2inchi_I = listDict of CHEBI_ID to InCHI
MetaNetX_reactions_I = listDict of MetaNetX reaction xrefs
MetaNetX_metabolites_I = listDict of MetaNetX chemical xrefs
OUTPUT:
'''
BioCyc_dependencies = models_BioCyc_dependencies();
#reformat input into a dict for fast traversal
if not chebi2inchi_I is None and chebi2inchi_I:
chebi2inchi_dict_I = {r['CHEBI_ID']:r['InChI'] for r in chebi2inchi_I}
else:
chebi2inchi_dict_I=chebi2inchi_I;
#if not chebi2database_I is None and chebi2database_I:
# chebi2database_dict_I = {r['CHEBI_ID']:r['InChI'] for r in chebi2database_I}
#else:
# chebi2database_dict_I=chebi2database_I;
if not BioCyc_compounds_I is None and BioCyc_compounds_I:
#BioCyc_compounds_dict_I = {r['name']:r for r in BioCyc_compounds_I}
BioCyc_compounds_dict_I = {}
for row in BioCyc_compounds_I:
keys = [];
keys.append(row['name'])
keys = list(set([k for k in keys if k!='']))
for k in keys:
if not k in BioCyc_compounds_dict_I.keys():
BioCyc_compounds_dict_I[k]=[];
if not row in BioCyc_compounds_dict_I[k]:
BioCyc_compounds_dict_I[k].append(row);
else:
BioCyc_compounds_dict_I=BioCyc_compounds_I;
if not BioCyc_reactions_I is None and BioCyc_reactions_I:
BioCyc_reactions_dict_I = {}
for row in BioCyc_reactions_I:
keys = [];
keys.append(row['common_name'])
keys.extend(BioCyc_dependencies.convert_bioCycList2List(row['enzymatic_reaction']))
keys = list(set([k for k in keys if k!='']))
for k in keys:
if not k in BioCyc_reactions_dict_I.keys():
BioCyc_reactions_dict_I[k]=[];
if not row in BioCyc_reactions_dict_I[k]:
BioCyc_reactions_dict_I[k].append(row);
else:
BioCyc_reactions_dict_I=BioCyc_reactions_I;
if not BioCyc_enzymaticReactions2PolymerSegments_I is None and BioCyc_enzymaticReactions2PolymerSegments_I:
BioCyc_enzymaticReactions_dict_I = {}
for row in BioCyc_enzymaticReactions2PolymerSegments_I:
try:
if not row['name'] in BioCyc_enzymaticReactions_dict_I.keys():
BioCyc_enzymaticReactions_dict_I[row['name']]={
'name':'',
'enzyme':[],
'gene_ids':[],
'accession_1':[],
}
BioCyc_enzymaticReactions_dict_I[row['name']]['name']=row['name'];
BioCyc_enzymaticReactions_dict_I[row['name']]['enzyme'].append(row['enzyme']);
BioCyc_enzymaticReactions_dict_I[row['name']]['gene_ids'].extend(row['gene_ids']);
BioCyc_enzymaticReactions_dict_I[row['name']]['accession_1'].extend(row['accession_1']);
except Exception as e:
print(e)
else:
BioCyc_enzymaticReactions_dict_I=BioCyc_enzymaticReactions2PolymerSegments_I;
if not MetaNetX_reactions_I is None and MetaNetX_reactions_I:
MetaNetX_reactions_dict_I = {}
for row in MetaNetX_reactions_I:
try:
if not row['MNX_ID'] in MetaNetX_reactions_dict_I.keys():
MetaNetX_reactions_dict_I[row['MNX_ID']]={}
key_value = row['#XREF'].split(':')
MetaNetX_reactions_dict_I[row['MNX_ID']][key_value[0]]=key_value[1];
except Exception as e:
print(e)
#print(row)
else:
MetaNetX_reactions_dict_I=MetaNetX_reactions_I;
if not MetaNetX_metabolites_I is None and MetaNetX_metabolites_I:
MetaNetX_metabolites_dict_I = {}
for row in MetaNetX_metabolites_I:
try:
if not row['MNX_ID'] in MetaNetX_metabolites_dict_I.keys():
MetaNetX_metabolites_dict_I[row['MNX_ID']]={}
key_value = row['#XREF'].split(':')
MetaNetX_metabolites_dict_I[row['MNX_ID']][key_value[0]]=key_value[1];
except Exception as e:
print(e)
#print(row)
else:
MetaNetX_metabolites_dict_I=MetaNetX_metabolites_I;
regulation_O = [];
for i,reg in enumerate(BioCyc_regulation_I):
#if reg['name'] == 'Regulation of galSp by GalR DNA-binding transcriptional dual regulator':
# print('check')
#elif reg['name'] == 'Regulation of ribonucleoside-diphosphate reductase by dATP':
# print('check');
#elif reg['regulated_entity_enzymaticReaction'] == 'formate dehydrogenase':
# print('check');
unique = {
'regulator':reg['regulator'],
'regulated_entity':reg['regulated_entity'],
'mode':reg['mode'],
'mechanism':reg['mechanism'],
'name':reg['name'],
'parent_classes':reg['parent_classes']
}
tmp = {
'regulators_EcoCyc':[],
'regulators_COBRA':[],
'regulated_entities_EcoCyc':[],
'regulated_entities_COBRA':[],
}
#convert the regulators
if reg['regulator_gene']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulator_gene'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulators_EcoCyc']=original;
tmp['regulators_COBRA']=converted;
elif reg['regulator_protein']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulator_protein'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulators_EcoCyc']=original;
tmp['regulators_COBRA']=converted;
elif reg['regulator_RNA']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulator_RNA'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulators_EcoCyc']=original;
tmp['regulators_COBRA']=converted;
elif reg['regulator_compound']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulator_compound'],
BioCyc_components_dict_I=BioCyc_compounds_dict_I,
BioCyc2COBRA_func_I=BioCyc_dependencies.map_BioCycCompound2COBRA,
BioCyc2COBRA_params_I={
'COBRA_metabolites_I':COBRA_metabolites_I,
'chebi2inchi_dict_I':chebi2inchi_dict_I,
'MetaNetX_metabolites_dict_I':MetaNetX_metabolites_dict_I,
}
);
tmp['regulators_EcoCyc']=original;
tmp['regulators_COBRA']=converted;
#convert the regulated_entities
if reg['regulated_entity_gene']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulated_entity_gene'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulated_entities_EcoCyc']=original;
tmp['regulated_entities_COBRA']=converted;
elif reg['regulated_entity_enzymaticReaction']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulated_entity_enzymaticReaction'],
BioCyc_components_dict_I=BioCyc_reactions_dict_I,
BioCyc2COBRA_func_I=BioCyc_dependencies.map_BioCycReaction2COBRA,
BioCyc2COBRA_params_I={
'COBRA_reactions_I':COBRA_reactions_I,
'MetaNetX_reactions_dict_I':MetaNetX_reactions_dict_I,
'BioCyc_reaction2Genes_dict_I':BioCyc_enzymaticReactions_dict_I,
}
);
tmp['regulated_entities_EcoCyc']=original;
tmp['regulated_entities_COBRA']=converted;
elif reg['regulated_entity_promoter']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulated_entity_promoter'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulated_entities_EcoCyc']=original;
tmp['regulated_entities_COBRA']=converted;
elif reg['regulated_entity_product']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulated_entity_product'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulated_entities_EcoCyc']=original;
tmp['regulated_entities_COBRA']=converted;
elif reg['regulated_entity_protein']:
original,converted = BioCyc_dependencies.map_BioCyc2COBRA(
reg['regulated_entity_protein'],
BioCyc2COBRA_func_I=None,
BioCyc2COBRA_params_I={}
);
tmp['regulated_entities_EcoCyc']=original;
tmp['regulated_entities_COBRA']=converted;
#check that mappings/conversions took place
if not tmp['regulators_EcoCyc'] or not tmp['regulated_entities_EcoCyc'] or \
not tmp['regulators_COBRA'] or not tmp['regulated_entities_COBRA']:
continue;
#flatten
EcoCyc_flattened = BioCyc_dependencies.crossMultiple_2lists(
tmp['regulators_EcoCyc'],
tmp['regulated_entities_EcoCyc'],
'regulators_EcoCyc',
'regulated_entities_EcoCyc',
)
COBRA_flattened = BioCyc_dependencies.crossMultiple_2lists(
tmp['regulators_COBRA'],
tmp['regulated_entities_COBRA'],
'regulators_COBRA',
'regulated_entities_COBRA',
)
for i in range(len(EcoCyc_flattened)):
tmp1 = {};
tmp1.update(EcoCyc_flattened[i])
tmp1.update(COBRA_flattened[i])
tmp1.update(unique)
regulation_O.append(tmp1);
#remove duplicate entries
#(NOTE: only works because each dictionary is constructed identically)
data_O = [];
for row in regulation_O:
if not row in data_O:
data_O.append(row);
return data_O;
def execute_getBiomassProducingPathwayComponents_BioCycAndCOBRA(
self,
biomassProducingPathways,
):
''' '''
pathways = [d['Subpathway'] for d in biomassProducingPathways if d['used_']=="TRUE"]
#reorganize into subpathways
subpathways_dict = {};
for d in biomassProducingPathways:
if d['used_'] and d['used_']=="TRUE":
if not d['Pathway'] in subpathways_dict.keys():
subpathways_dict[d['Pathway']]={};
if not d['Pathway'] in subpathways_dict[d['Pathway']].keys():
subpathways_dict[d['Pathway']][d['Pathway']]=[];
if not d['Subpathway'] in subpathways_dict[d['Pathway']].keys():
subpathways_dict[d['Pathway']][d['Subpathway']]=[];
subpathways_dict[d['Pathway']][d['Subpathway']].append(d['Subpathway']);
subpathways_dict[d['Pathway']][d['Pathway']].append(d['Subpathway']);
subpathways_dict['all']={'all':pathways};
#map the subpathway components
subpathways2components_dict = {};
for k1,v1 in subpathways_dict.items():
subpathways2components_dict[k1]={};
for k2,v2 in v1.items():
gene_ids,rxn_ids,met_ids,met_ids_deformated = \
self.get_geneIDsAndRxnIDsAndMetIDs_modelsBioCycAndModelsCOBRA(v2)
subpathways2components_dict[k1][k2]={
'gene_ids':gene_ids,
'rxn_ids':rxn_ids,
'met_ids':met_ids,
'met_ids_deformated':met_ids_deformated,
};
return subpathways2components_dict;
def get_geneIDsAndRxnIDsAndMetIDs_modelsBioCycAndModelsCOBRA(
self,
pathways):
#initialize supporting objects
cobra01 = models_COBRA_query(self.session,self.engine,self.settings);
cobra01.initialize_supportedTables();
cobra_dependencies = models_BioCyc_dependencies();
#query the pathways
biocyc_pathways = self.getParsed_genesAndPathwaysAndReactions_namesAndDatabase_modelsBioCycPathways(
names_I=pathways,
database_I='ECOLI',
query_I={},
output_O='listDict',
dictColumn_I=None);
genes = list(set([g['gene'] for g in biocyc_pathways if g['gene']!='']));
#join list of genes with alternative identifiers
biocyc_genes = self.getParsed_genesAndAccessionsAndSynonyms_namesAndParentClassesAndDatabase_modelsBioCycPolymerSegments(
names_I=genes,
database_I='ECOLI',
parent_classes_I=None,
query_I={},
output_O='listDict',
dictColumn_I=None);
gene_ids = list(set(genes + [g['synonym'] for g in biocyc_genes if g['synonym']]));
accession_1 = list(set([g['accession_1'] for g in biocyc_genes if g['accession_1']!='']));
#Join accession_1 with COBRA reactions
cobra_rxnIDs = cobra01.get_rows_modelIDAndOrderedLocusNames_dataStage02PhysiologyModelReactions(
model_id_I='150526_iDM2015',
ordered_locus_names_I=accession_1,
query_I={},
output_O='listDict',
dictColumn_I=None)
rxn_ids = list(set([g['rxn_id'].replace('_reverse','') for g in cobra_rxnIDs if g['rxn_id']!='']));
#COBRA metabolites
met_ids = list(set([p for g in cobra_rxnIDs if g['products_ids'] for p in g['products_ids']]+\
[p for g in cobra_rxnIDs if g['reactants_ids'] for p in g['reactants_ids']]));
#deformat met_ids
from SBaaS_models.models_COBRA_dependencies import models_COBRA_dependencies
cobra_dependencies = models_COBRA_dependencies();
met_ids_deformated = list(set([cobra_dependencies.deformat_metid(m).replace('13dpg','23dpg')\
.replace('3pg','Pool_2pg_3pg')\
.replace('glycogen','adpglc')\
.replace('uacgam','udpglcur') for m in met_ids]));
#return values
return gene_ids,rxn_ids,met_ids,met_ids_deformated;
|
{
"content_hash": "7df009150c0f0f369ec826b3d1209f5b",
"timestamp": "",
"source": "github",
"line_count": 885,
"max_line_length": 129,
"avg_line_length": 48.3683615819209,
"alnum_prop": 0.4999766387889548,
"repo_name": "dmccloskey/SBaaS_models",
"id": "842da436f731b8f66109b22d0bbcf85f595621d5",
"size": "42808",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SBaaS_models/models_BioCyc_execute.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "634512"
}
],
"symlink_target": ""
}
|
class SimdType(object):
"""Base class for all SIMD Types."""
def codegen(self, indent=0):
from ctree.simd.codegen import SimdCodeGen
return SimdCodeGen().visit(self)
class m256d(SimdType):
pass
class m256(SimdType):
pass
class m512(SimdType):
pass
|
{
"content_hash": "9e22243222fc5f2e9d998f03f7a6af07",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 50,
"avg_line_length": 17.058823529411764,
"alnum_prop": 0.6586206896551724,
"repo_name": "ucb-sejits/ctree",
"id": "88f36c93c810011fab505e5a05cb5b1037644790",
"size": "290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ctree/simd/types.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Mako",
"bytes": "820"
},
{
"name": "Python",
"bytes": "249654"
},
{
"name": "Shell",
"bytes": "1396"
}
],
"symlink_target": ""
}
|
import os
import json
import re
import time
import datetime
import base64
import decimal
import operator
import logging
import copy
import uuid
import urllib
import functools
from logging import handlers as logging_handlers
from gevent import wsgi
from geventhttpclient import HTTPClient
from geventhttpclient.url import URL
import flask
import jsonrpc
from jsonrpc import dispatcher
import pymongo
from bson import json_util
from bson.son import SON
from lib import config, siofeeds, util, blockchain, util_bitcoin
from lib.components import betting, rps, assets_trading, dex
PREFERENCES_MAX_LENGTH = 100000 #in bytes, as expressed in JSON
API_MAX_LOG_SIZE = 10 * 1024 * 1024 #max log size of 20 MB before rotation (make configurable later)
API_MAX_LOG_COUNT = 10
decimal.setcontext(decimal.Context(prec=8, rounding=decimal.ROUND_HALF_EVEN))
D = decimal.Decimal
def serve_api(mongo_db, redis_client):
# Preferneces are just JSON objects... since we don't force a specific form to the wallet on
# the server side, this makes it easier for 3rd party wallets (i.e. not Counterwallet) to fully be able to
# use counterblockd to not only pull useful data, but also load and store their own preferences, containing
# whatever data they need
DEFAULT_COUNTERPARTYD_API_CACHE_PERIOD = 60 #in seconds
app = flask.Flask(__name__)
tx_logger = logging.getLogger("transaction_log") #get transaction logger
@dispatcher.add_method
def is_ready():
"""this method used by the client to check if the server is alive, caught up, and ready to accept requests.
If the server is NOT caught up, a 525 error will be returned actually before hitting this point. Thus,
if we actually return data from this function, it should always be true. (may change this behaviour later)"""
blockchainInfo = blockchain.getinfo()
ip = flask.request.headers.get('X-Real-Ip', flask.request.remote_addr)
country = config.GEOIP.country_code_by_addr(ip)
return {
'caught_up': util.is_caught_up_well_enough_for_government_work(),
'last_message_index': config.LAST_MESSAGE_INDEX,
'block_height': blockchainInfo['info']['blocks'],
'testnet': config.TESTNET,
'ip': ip,
'country': country
}
@dispatcher.add_method
def get_reflected_host_info():
"""Allows the requesting host to get some info about itself, such as its IP. Used for troubleshooting."""
ip = flask.request.headers.get('X-Real-Ip', flask.request.remote_addr)
country = config.GEOIP.country_code_by_addr(ip)
return {
'ip': ip,
'cookie': flask.request.headers.get('Cookie', ''),
'country': country
}
@dispatcher.add_method
def get_messagefeed_messages_by_index(message_indexes):
messages = util.call_jsonrpc_api("get_messages_by_index", {'message_indexes': message_indexes}, abort_on_error=True)['result']
events = []
for m in messages:
events.append(util.decorate_message_for_feed(m))
return events
@dispatcher.add_method
def get_chain_block_height():
data = blockchain.getinfo()
return data['info']['blocks']
@dispatcher.add_method
def get_chain_address_info(addresses, with_uxtos=True, with_last_txn_hashes=4, with_block_height=False):
if not isinstance(addresses, list):
raise Exception("addresses must be a list of addresses, even if it just contains one address")
results = []
if with_block_height:
block_height_response = blockchain.getinfo()
block_height = block_height_response['info']['blocks'] if block_height_response else None
for address in addresses:
info = blockchain.getaddressinfo(address)
txns = info['transactions']
del info['transactions']
result = {}
result['addr'] = address
result['info'] = info
if with_block_height: result['block_height'] = block_height
#^ yeah, hacky...it will be the same block height for each address (we do this to avoid an extra API call to get_block_height)
if with_uxtos:
result['uxtos'] = blockchain.listunspent(address)
if with_last_txn_hashes:
#with last_txns, only show CONFIRMED txns (so skip the first info['unconfirmedTxApperances'] # of txns, if not 0
result['last_txns'] = txns[info['unconfirmedTxApperances']:with_last_txn_hashes+info['unconfirmedTxApperances']]
results.append(result)
return results
@dispatcher.add_method
def get_chain_txns_status(txn_hashes):
if not isinstance(txn_hashes, list):
raise Exception("txn_hashes must be a list of txn hashes, even if it just contains one hash")
results = []
for tx_hash in txn_hashes:
tx_info = blockchain.gettransaction(tx_hash);
if tx_info:
assert tx_info['txid'] == tx_hash
results.append({
'tx_hash': tx_info['txid'],
'blockhash': tx_info.get('blockhash', None), #not provided if not confirmed on network
'confirmations': tx_info.get('confirmations', 0), #not provided if not confirmed on network
'blocktime': tx_info.get('time', None),
})
return results
@dispatcher.add_method
def get_normalized_balances(addresses):
"""
This call augments counterpartyd's get_balances with a normalized_quantity field. It also will include any owned
assets for an address, even if their balance is zero.
NOTE: Does not retrieve DOGE balance. Use get_address_info for that.
"""
if not isinstance(addresses, list):
raise Exception("addresses must be a list of addresses, even if it just contains one address")
if not len(addresses):
raise Exception("Invalid address list supplied")
filters = []
for address in addresses:
filters.append({'field': 'address', 'op': '==', 'value': address})
results = []
offset = 0
limit = 1000
while True:
result = util.call_jsonrpc_api("get_balances",
{'filters': filters, 'filterop': 'or', 'offset': offset, 'limit': limit}, abort_on_error=True)['result']
results += result
if len(result) >= limit:
offset += len(result)
else:
break
isowner = {}
owned_assets = mongo_db.tracked_assets.find( { '$or': [{'owner': a } for a in addresses] }, { '_history': 0, '_id': 0 } )
for o in owned_assets:
isowner[o['owner'] + o['asset']] = o
data = []
mappings = {}
for d in results:
if not d['quantity'] and ((d['address'] + d['asset']) not in isowner):
continue #don't include balances with a zero asset value
asset_info = mongo_db.tracked_assets.find_one({'asset': d['asset']})
d['normalized_quantity'] = util_bitcoin.normalize_quantity(d['quantity'], asset_info['divisible'])
d['owner'] = (d['address'] + d['asset']) in isowner
mappings[d['address'] + d['asset']] = d
data.append(d)
#include any owned assets for each address, even if their balance is zero
for key in isowner:
if key not in mappings:
o = isowner[key]
data.append({
'address': o['owner'],
'asset': o['asset'],
'quantity': 0,
'normalized_quantity': 0,
'owner': True,
})
return data
def _get_address_history(address, start_block=None, end_block=None):
address_dict = {}
address_dict['balances'] = util.call_jsonrpc_api("get_balances",
{ 'filters': [{'field': 'address', 'op': '==', 'value': address},],
}, abort_on_error=True)['result']
address_dict['debits'] = util.call_jsonrpc_api("get_debits",
{ 'filters': [{'field': 'address', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['credits'] = util.call_jsonrpc_api("get_credits",
{ 'filters': [{'field': 'address', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['burns'] = util.call_jsonrpc_api("get_burns",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['sends'] = util.call_jsonrpc_api("get_sends",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address}, {'field': 'destination', 'op': '==', 'value': address}],
'filterop': 'or',
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
#^ with filterop == 'or', we get all sends where this address was the source OR destination
address_dict['orders'] = util.call_jsonrpc_api("get_orders",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['order_matches'] = util.call_jsonrpc_api("get_order_matches",
{ 'filters': [{'field': 'tx0_address', 'op': '==', 'value': address}, {'field': 'tx1_address', 'op': '==', 'value': address},],
'filterop': 'or',
'order_by': 'tx0_block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['btcpays'] = util.call_jsonrpc_api("get_btcpays",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address}, {'field': 'destination', 'op': '==', 'value': address}],
'filterop': 'or',
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['issuances'] = util.call_jsonrpc_api("get_issuances",
{ 'filters': [{'field': 'issuer', 'op': '==', 'value': address}, {'field': 'source', 'op': '==', 'value': address}],
'filterop': 'or',
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['broadcasts'] = util.call_jsonrpc_api("get_broadcasts",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['bets'] = util.call_jsonrpc_api("get_bets",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['bet_matches'] = util.call_jsonrpc_api("get_bet_matches",
{ 'filters': [{'field': 'tx0_address', 'op': '==', 'value': address}, {'field': 'tx1_address', 'op': '==', 'value': address},],
'filterop': 'or',
'order_by': 'tx0_block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['dividends'] = util.call_jsonrpc_api("get_dividends",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['cancels'] = util.call_jsonrpc_api("get_cancels",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['callbacks'] = util.call_jsonrpc_api("get_callbacks",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['bet_expirations'] = util.call_jsonrpc_api("get_bet_expirations",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['order_expirations'] = util.call_jsonrpc_api("get_order_expirations",
{ 'filters': [{'field': 'source', 'op': '==', 'value': address},],
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['bet_match_expirations'] = util.call_jsonrpc_api("get_bet_match_expirations",
{ 'filters': [{'field': 'tx0_address', 'op': '==', 'value': address}, {'field': 'tx1_address', 'op': '==', 'value': address},],
'filterop': 'or',
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
address_dict['order_match_expirations'] = util.call_jsonrpc_api("get_order_match_expirations",
{ 'filters': [{'field': 'tx0_address', 'op': '==', 'value': address}, {'field': 'tx1_address', 'op': '==', 'value': address},],
'filterop': 'or',
'order_by': 'block_index',
'order_dir': 'asc',
'start_block': start_block,
'end_block': end_block,
}, abort_on_error=True)['result']
return address_dict
@dispatcher.add_method
def get_last_n_messages(count=100):
if count > 1000:
raise Exception("The count is too damn high")
message_indexes = range(max(config.LAST_MESSAGE_INDEX - count, 0) + 1, config.LAST_MESSAGE_INDEX+1)
messages = util.call_jsonrpc_api("get_messages_by_index",
{ 'message_indexes': message_indexes }, abort_on_error=True)['result']
for i in xrange(len(messages)):
messages[i] = util.decorate_message_for_feed(messages[i])
return messages
@dispatcher.add_method
def get_raw_transactions(address, start_ts=None, end_ts=None, limit=500):
"""Gets raw transactions for a particular address
@param address: A single address string
@param start_ts: The starting date & time. Should be a unix epoch object. If passed as None, defaults to 60 days before the end_date
@param end_ts: The ending date & time. Should be a unix epoch object. If passed as None, defaults to the current date & time
@param limit: the maximum number of transactions to return; defaults to ten thousand
@return: Returns the data, ordered from newest txn to oldest. If any limit is applied, it will cut back from the oldest results
"""
def get_asset_cached(asset, asset_cache):
if asset in asset_cache:
return asset_cache[asset]
asset_data = mongo_db.tracked_assets.find_one({'asset': asset})
asset_cache[asset] = asset_data
return asset_data
asset_cache = {} #ghetto cache to speed asset lookups within the scope of a function call
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 60 days before the end date
start_ts = end_ts - (60 * 24 * 60 * 60)
start_block_index, end_block_index = util.get_block_indexes_for_dates(
start_dt=datetime.datetime.utcfromtimestamp(start_ts),
end_dt=datetime.datetime.utcfromtimestamp(end_ts) if now_ts != end_ts else None)
#make API call to counterpartyd to get all of the data for the specified address
txns = []
d = _get_address_history(address, start_block=start_block_index, end_block=end_block_index)
#mash it all together
for category, entries in d.iteritems():
if category in ['balances',]:
continue
for e in entries:
e['_category'] = category
e = util.decorate_message(e, for_txn_history=True) #DRY
txns += entries
txns = util.multikeysort(txns, ['-_block_time', '-_tx_index'])
txns = txns[0:limit] #TODO: we can trunk before sorting. check if we can use the messages table and use sql order and limit
#^ won't be a perfect sort since we don't have tx_indexes for cancellations, but better than nothing
#txns.sort(key=operator.itemgetter('block_index'))
return txns
@dispatcher.add_method
def get_base_quote_asset(asset1, asset2):
"""Given two arbitrary assets, returns the base asset and the quote asset.
"""
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
base_asset_info = mongo_db.tracked_assets.find_one({'asset': base_asset})
quote_asset_info = mongo_db.tracked_assets.find_one({'asset': quote_asset})
pair_name = "%s/%s" % (base_asset, quote_asset)
if not base_asset_info or not quote_asset_info:
raise Exception("Invalid asset(s)")
return {
'base_asset': base_asset,
'quote_asset': quote_asset,
'pair_name': pair_name
}
@dispatcher.add_method
def get_market_price_summary(asset1, asset2, with_last_trades=0):
result = assets_trading.get_market_price_summary(asset1, asset2, with_last_trades)
return result if result is not None else False
#^ due to current bug in our jsonrpc stack, just return False if None is returned
@dispatcher.add_method
def get_market_cap_history(start_ts=None, end_ts=None):
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
data = {}
results = {}
#^ format is result[market_cap_as][asset] = [[block_time, market_cap], [block_time2, market_cap2], ...]
for market_cap_as in (config.XCP, config.BTC):
caps = mongo_db.asset_marketcap_history.aggregate([
{"$match": {
"market_cap_as": market_cap_as,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"hour": {"$hour": "$block_time"},
"asset": 1,
"market_cap": 1,
}},
{"$sort": {"block_time": pymongo.ASCENDING}},
{"$group": {
"_id": {"asset": "$asset", "year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
"market_cap": {"$avg": "$market_cap"}, #use the average marketcap during the interval
}},
])
caps = [] if not caps['ok'] else caps['result']
data[market_cap_as] = {}
for e in caps:
interval_time = int(time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day'], e['_id']['hour']).timetuple()) * 1000)
data[market_cap_as].setdefault(e['_id']['asset'], [])
data[market_cap_as][e['_id']['asset']].append([interval_time, e['market_cap']])
results[market_cap_as] = []
for asset in data[market_cap_as]:
#for z in data[market_cap_as][asset]: assert z[0] and z[0] > 0 and z[1] and z[1] >= 0
results[market_cap_as].append({'name': asset,
'data': sorted(data[market_cap_as][asset], key=operator.itemgetter(0))})
return results
@dispatcher.add_method
def get_market_info(assets):
assets_market_info = list(mongo_db.asset_market_info.find({'asset': {'$in': assets}}, {'_id': 0}))
extended_asset_info = mongo_db.asset_extended_info.find({'asset': {'$in': assets}})
extended_asset_info_dict = {}
for e in extended_asset_info:
if not e.get('disabled', False): #skip assets marked disabled
extended_asset_info_dict[e['asset']] = e
for a in assets_market_info:
if a['asset'] in extended_asset_info_dict and extended_asset_info_dict[a['asset']].get('processed', False):
extended_info = extended_asset_info_dict[a['asset']]
a['extended_image'] = bool(extended_info.get('image', ''))
a['extended_description'] = extended_info.get('description', '')
a['extended_website'] = extended_info.get('website', '')
a['extended_pgpsig'] = extended_info.get('pgpsig', '')
else:
a['extended_image'] = a['extended_description'] = a['extended_website'] = a['extended_pgpsig'] = ''
return assets_market_info
@dispatcher.add_method
def get_market_info_leaderboard(limit=100):
"""returns market leaderboard data for both the XDP and DOGE markets"""
#do two queries because we limit by our sorted results, and we might miss an asset with a high DOGE trading value
# but with little or no XDP trading activity, for instance if we just did one query
assets_market_info_xcp = list(mongo_db.asset_market_info.find({}, {'_id': 0}).sort('market_cap_in_{}'.format(config.XCP.lower()), pymongo.DESCENDING).limit(limit))
assets_market_info_btc = list(mongo_db.asset_market_info.find({}, {'_id': 0}).sort('market_cap_in_{}'.format(config.BTC.lower()), pymongo.DESCENDING).limit(limit))
assets_market_info = {
config.XCP.lower(): [a for a in assets_market_info_xcp if a['price_in_{}'.format(config.XCP.lower())]],
config.BTC.lower(): [a for a in assets_market_info_btc if a['price_in_{}'.format(config.BTC.lower())]]
}
#throw on extended info, if it exists for a given asset
assets = list(set([a['asset'] for a in assets_market_info[config.XCP.lower()]] + [a['asset'] for a in assets_market_info[config.BTC.lower()]]))
extended_asset_info = mongo_db.asset_extended_info.find({'asset': {'$in': assets}})
extended_asset_info_dict = {}
for e in extended_asset_info:
if not e.get('disabled', False): #skip assets marked disabled
extended_asset_info_dict[e['asset']] = e
for r in (assets_market_info[config.XCP.lower()], assets_market_info[config.BTC.lower()]):
for a in r:
if a['asset'] in extended_asset_info_dict:
extended_info = extended_asset_info_dict[a['asset']]
if 'extended_image' not in a or 'extended_description' not in a or 'extended_website' not in a:
continue #asset has been recognized as having a JSON file description, but has not been successfully processed yet
a['extended_image'] = bool(extended_info.get('image', ''))
a['extended_description'] = extended_info.get('description', '')
a['extended_website'] = extended_info.get('website', '')
else:
a['extended_image'] = a['extended_description'] = a['extended_website'] = ''
return assets_market_info
@dispatcher.add_method
def get_market_price_history(asset1, asset2, start_ts=None, end_ts=None, as_dict=False):
"""Return block-by-block aggregated market history data for the specified asset pair, within the specified date range.
@returns List of lists (or list of dicts, if as_dict is specified).
* If as_dict is False, each embedded list has 8 elements [block time (epoch in MS), open, high, low, close, volume, # trades in block, block index]
* If as_dict is True, each dict in the list has the keys: block_time (epoch in MS), block_index, open, high, low, close, vol, count
Aggregate on an an hourly basis
"""
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 180 days before the end date
start_ts = end_ts - (180 * 24 * 60 * 60)
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
#get ticks -- open, high, low, close, volume
result = mongo_db.trades.aggregate([
{"$match": {
"base_asset": base_asset,
"quote_asset": quote_asset,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"hour": {"$hour": "$block_time"},
"block_index": 1,
"unit_price": 1,
"base_quantity_normalized": 1 #to derive volume
}},
{"$group": {
"_id": {"year": "$year", "month": "$month", "day": "$day", "hour": "$hour"},
"open": {"$first": "$unit_price"},
"high": {"$max": "$unit_price"},
"low": {"$min": "$unit_price"},
"close": {"$last": "$unit_price"},
"vol": {"$sum": "$base_quantity_normalized"},
"count": {"$sum": 1},
}},
{"$sort": SON([("_id.year", pymongo.ASCENDING), ("_id.month", pymongo.ASCENDING), ("_id.day", pymongo.ASCENDING), ("_id.hour", pymongo.ASCENDING)])},
])
if not result['ok'] or not len(result['result']):
return False
result = result['result']
midline = [((r['high'] + r['low']) / 2.0) for r in result]
if as_dict:
for i in xrange(len(result)):
result[i]['interval_time'] = int(time.mktime(datetime.datetime(
result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000)
result[i]['midline'] = midline[i]
del result[i]['_id']
return result
else:
list_result = []
for i in xrange(len(result)):
list_result.append([
int(time.mktime(datetime.datetime(
result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000),
result[i]['open'], result[i]['high'], result[i]['low'], result[i]['close'], result[i]['vol'],
result[i]['count'], midline[i]
])
return list_result
@dispatcher.add_method
def get_trade_history(asset1=None, asset2=None, start_ts=None, end_ts=None, limit=50):
"""
Gets last N of trades within a specific date range (normally, for a specified asset pair, but this can
be left blank to get any/all trades).
"""
assert (asset1 and asset2) or (not asset1 and not asset2) #cannot have one asset, but not the other
if limit > 500:
raise Exception("Requesting history of too many trades")
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
filters = {
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}
if asset1 and asset2:
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
filters["base_asset"] = base_asset
filters["quote_asset"] = quote_asset
last_trades = mongo_db.trades.find(filters, {'_id': 0}).sort("block_time", pymongo.DESCENDING).limit(limit)
if not last_trades.count():
return False #no suitable trade data to form a market price
last_trades = list(last_trades)
return last_trades
def _get_order_book(base_asset, quote_asset,
bid_book_min_pct_fee_provided=None, bid_book_min_pct_fee_required=None, bid_book_max_pct_fee_required=None,
ask_book_min_pct_fee_provided=None, ask_book_min_pct_fee_required=None, ask_book_max_pct_fee_required=None):
"""Gets the current order book for a specified asset pair
@param: normalized_fee_required: Only specify if buying DOGE. If specified, the order book will be pruned down to only
show orders at and above this fee_required
@param: normalized_fee_provided: Only specify if selling DOGE. If specified, the order book will be pruned down to only
show orders at and above this fee_provided
"""
base_asset_info = mongo_db.tracked_assets.find_one({'asset': base_asset})
quote_asset_info = mongo_db.tracked_assets.find_one({'asset': quote_asset})
if not base_asset_info or not quote_asset_info:
raise Exception("Invalid asset(s)")
#TODO: limit # results to 8 or so for each book (we have to sort as well to limit)
base_bid_filters = [
{"field": "get_asset", "op": "==", "value": base_asset},
{"field": "give_asset", "op": "==", "value": quote_asset},
]
base_ask_filters = [
{"field": "get_asset", "op": "==", "value": quote_asset},
{"field": "give_asset", "op": "==", "value": base_asset},
]
if base_asset == config.BTC or quote_asset == config.BTC:
extra_filters = [
{'field': 'give_remaining', 'op': '>', 'value': 0}, #don't show empty DOGE orders
{'field': 'get_remaining', 'op': '>', 'value': 0}, #don't show empty DOGE orders
{'field': 'fee_required_remaining', 'op': '>=', 'value': 0},
{'field': 'fee_provided_remaining', 'op': '>=', 'value': 0},
]
base_bid_filters += extra_filters
base_ask_filters += extra_filters
base_bid_orders = util.call_jsonrpc_api("get_orders", {
'filters': base_bid_filters,
'show_expired': False,
'status': 'open',
'order_by': 'block_index',
'order_dir': 'asc',
}, abort_on_error=True)['result']
base_ask_orders = util.call_jsonrpc_api("get_orders", {
'filters': base_ask_filters,
'show_expired': False,
'status': 'open',
'order_by': 'block_index',
'order_dir': 'asc',
}, abort_on_error=True)['result']
def get_o_pct(o):
if o['give_asset'] == config.BTC: #NB: fee_provided could be zero here
pct_fee_provided = float(( D(o['fee_provided_remaining']) / D(o['give_quantity']) ))
else: pct_fee_provided = None
if o['get_asset'] == config.BTC: #NB: fee_required could be zero here
pct_fee_required = float(( D(o['fee_required_remaining']) / D(o['get_quantity']) ))
else: pct_fee_required = None
return pct_fee_provided, pct_fee_required
#filter results by pct_fee_provided and pct_fee_required for DOGE pairs as appropriate
filtered_base_bid_orders = []
filtered_base_ask_orders = []
if base_asset == config.BTC or quote_asset == config.BTC:
for o in base_bid_orders:
pct_fee_provided, pct_fee_required = get_o_pct(o)
addToBook = True
if bid_book_min_pct_fee_provided is not None and pct_fee_provided is not None and pct_fee_provided < bid_book_min_pct_fee_provided:
addToBook = False
if bid_book_min_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required < bid_book_min_pct_fee_required:
addToBook = False
if bid_book_max_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required > bid_book_max_pct_fee_required:
addToBook = False
if addToBook: filtered_base_bid_orders.append(o)
for o in base_ask_orders:
pct_fee_provided, pct_fee_required = get_o_pct(o)
addToBook = True
if ask_book_min_pct_fee_provided is not None and pct_fee_provided is not None and pct_fee_provided < ask_book_min_pct_fee_provided:
addToBook = False
if ask_book_min_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required < ask_book_min_pct_fee_required:
addToBook = False
if ask_book_max_pct_fee_required is not None and pct_fee_required is not None and pct_fee_required > ask_book_max_pct_fee_required:
addToBook = False
if addToBook: filtered_base_ask_orders.append(o)
else:
filtered_base_bid_orders += base_bid_orders
filtered_base_ask_orders += base_ask_orders
def make_book(orders, isBidBook):
book = {}
for o in orders:
if o['give_asset'] == base_asset:
if base_asset == config.BTC and o['give_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF:
continue #filter dust orders, if necessary
give_quantity = util_bitcoin.normalize_quantity(o['give_quantity'], base_asset_info['divisible'])
get_quantity = util_bitcoin.normalize_quantity(o['get_quantity'], quote_asset_info['divisible'])
unit_price = float(( D(get_quantity) / D(give_quantity) ))
remaining = util_bitcoin.normalize_quantity(o['give_remaining'], base_asset_info['divisible'])
else:
if quote_asset == config.BTC and o['give_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF:
continue #filter dust orders, if necessary
give_quantity = util_bitcoin.normalize_quantity(o['give_quantity'], quote_asset_info['divisible'])
get_quantity = util_bitcoin.normalize_quantity(o['get_quantity'], base_asset_info['divisible'])
unit_price = float(( D(give_quantity) / D(get_quantity) ))
remaining = util_bitcoin.normalize_quantity(o['get_remaining'], base_asset_info['divisible'])
id = "%s_%s_%s" % (base_asset, quote_asset, unit_price)
#^ key = {base}_{bid}_{unit_price}, values ref entries in book
book.setdefault(id, {'unit_price': unit_price, 'quantity': 0, 'count': 0})
book[id]['quantity'] += remaining #base quantity outstanding
book[id]['count'] += 1 #num orders at this price level
book = sorted(book.itervalues(), key=operator.itemgetter('unit_price'), reverse=isBidBook)
#^ convert to list and sort -- bid book = descending, ask book = ascending
return book
#compile into a single book, at volume tiers
base_bid_book = make_book(filtered_base_bid_orders, True)
base_ask_book = make_book(filtered_base_ask_orders, False)
#get stats like the spread and median
if base_bid_book and base_ask_book:
#don't do abs(), as this is "the amount by which the ask price exceeds the bid", so I guess it could be negative
# if there is overlap in the book (right?)
bid_ask_spread = float(( D(base_ask_book[0]['unit_price']) - D(base_bid_book[0]['unit_price']) ))
bid_ask_median = float(( D( max(base_ask_book[0]['unit_price'], base_bid_book[0]['unit_price']) ) - (D(abs(bid_ask_spread)) / 2) ))
else:
bid_ask_spread = 0
bid_ask_median = 0
#compose depth and round out quantities
bid_depth = D(0)
for o in base_bid_book:
o['quantity'] = float(D(o['quantity']))
bid_depth += D(o['quantity'])
o['depth'] = float(D(bid_depth))
bid_depth = float(D(bid_depth))
ask_depth = D(0)
for o in base_ask_book:
o['quantity'] = float(D(o['quantity']))
ask_depth += D(o['quantity'])
o['depth'] = float(D(ask_depth))
ask_depth = float(D(ask_depth))
#compose raw orders
orders = filtered_base_bid_orders + filtered_base_ask_orders
for o in orders:
#add in the blocktime to help makes interfaces more user-friendly (i.e. avoid displaying block
# indexes and display datetimes instead)
o['block_time'] = time.mktime(util.get_block_time(o['block_index']).timetuple()) * 1000
#for orders where DOGE is the give asset, also return online status of the user
for o in orders:
if o['give_asset'] == config.BTC:
r = mongo_db.btc_open_orders.find_one({'order_tx_hash': o['tx_hash']})
o['_is_online'] = (r['wallet_id'] in siofeeds.onlineClients) if r else False
else:
o['_is_online'] = None #does not apply in this case
result = {
'base_bid_book': base_bid_book,
'base_ask_book': base_ask_book,
'bid_depth': bid_depth,
'ask_depth': ask_depth,
'bid_ask_spread': bid_ask_spread,
'bid_ask_median': bid_ask_median,
'raw_orders': orders,
'base_asset': base_asset,
'quote_asset': quote_asset
}
return result
@dispatcher.add_method
def get_order_book_simple(asset1, asset2, min_pct_fee_provided=None, max_pct_fee_required=None):
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
result = _get_order_book(base_asset, quote_asset,
bid_book_min_pct_fee_provided=min_pct_fee_provided,
bid_book_max_pct_fee_required=max_pct_fee_required,
ask_book_min_pct_fee_provided=min_pct_fee_provided,
ask_book_max_pct_fee_required=max_pct_fee_required)
return result
@dispatcher.add_method
def get_order_book_buysell(buy_asset, sell_asset, pct_fee_provided=None, pct_fee_required=None):
base_asset, quote_asset = util.assets_to_asset_pair(buy_asset, sell_asset)
bid_book_min_pct_fee_provided = None
bid_book_min_pct_fee_required = None
bid_book_max_pct_fee_required = None
ask_book_min_pct_fee_provided = None
ask_book_min_pct_fee_required = None
ask_book_max_pct_fee_required = None
if base_asset == config.BTC:
if buy_asset == config.BTC:
#if DOGE is base asset and we're buying it, we're buying the BASE. we require a DOGE fee (we're on the bid (bottom) book and we want a lower price)
# - show BASE buyers (bid book) that require a DOGE fee >= what we require (our side of the book)
# - show BASE sellers (ask book) that provide a DOGE fee >= what we require
bid_book_min_pct_fee_required = pct_fee_required #my competition at the given fee required
ask_book_min_pct_fee_provided = pct_fee_required
elif sell_asset == config.BTC:
#if DOGE is base asset and we're selling it, we're selling the BASE. we provide a DOGE fee (we're on the ask (top) book and we want a higher price)
# - show BASE buyers (bid book) that provide a DOGE fee >= what we provide
# - show BASE sellers (ask book) that require a DOGE fee <= what we provide (our side of the book)
bid_book_max_pct_fee_required = pct_fee_provided
ask_book_min_pct_fee_provided = pct_fee_provided #my competition at the given fee provided
elif quote_asset == config.BTC:
assert base_asset == config.XCP #only time when this is the case
if buy_asset == config.BTC:
#if DOGE is quote asset and we're buying it, we're selling the BASE. we require a DOGE fee (we're on the ask (top) book and we want a higher price)
# - show BASE buyers (bid book) that provide a DOGE fee >= what we require
# - show BASE sellers (ask book) that require a DOGE fee >= what we require (our side of the book)
bid_book_min_pct_fee_provided = pct_fee_required
ask_book_min_pct_fee_required = pct_fee_required #my competition at the given fee required
elif sell_asset == config.BTC:
#if DOGE is quote asset and we're selling it, we're buying the BASE. we provide a DOGE fee (we're on the bid (bottom) book and we want a lower price)
# - show BASE buyers (bid book) that provide a DOGE fee >= what we provide (our side of the book)
# - show BASE sellers (ask book) that require a DOGE fee <= what we provide
bid_book_min_pct_fee_provided = pct_fee_provided #my compeitition at the given fee provided
ask_book_max_pct_fee_required = pct_fee_provided
result = _get_order_book(base_asset, quote_asset,
bid_book_min_pct_fee_provided=bid_book_min_pct_fee_provided,
bid_book_min_pct_fee_required=bid_book_min_pct_fee_required,
bid_book_max_pct_fee_required=bid_book_max_pct_fee_required,
ask_book_min_pct_fee_provided=ask_book_min_pct_fee_provided,
ask_book_min_pct_fee_required=ask_book_min_pct_fee_required,
ask_book_max_pct_fee_required=ask_book_max_pct_fee_required)
#filter down raw_orders to be only open sell orders for what the caller is buying
open_sell_orders = []
for o in result['raw_orders']:
if o['give_asset'] == buy_asset:
open_sell_orders.append(o)
result['raw_orders'] = open_sell_orders
return result
@dispatcher.add_method
def get_transaction_stats(start_ts=None, end_ts=None):
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 360 days before the end date
start_ts = end_ts - (360 * 24 * 60 * 60)
stats = mongo_db.transaction_stats.aggregate([
{"$match": {
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}},
{"$project": {
"year": {"$year": "$block_time"},
"month": {"$month": "$block_time"},
"day": {"$dayOfMonth": "$block_time"},
"category": 1,
}},
{"$group": {
"_id": {"year": "$year", "month": "$month", "day": "$day", "category": "$category"},
"count": {"$sum": 1},
}}
#{"$sort": SON([("_id.year", pymongo.ASCENDING), ("_id.month", pymongo.ASCENDING), ("_id.day", pymongo.ASCENDING), ("_id.hour", pymongo.ASCENDING), ("_id.category", pymongo.ASCENDING)])},
])
times = {}
categories = {}
stats = [] if not stats['ok'] else stats['result']
for e in stats:
categories.setdefault(e['_id']['category'], {})
time_val = int(time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']).timetuple()) * 1000)
times.setdefault(time_val, True)
categories[e['_id']['category']][time_val] = e['count']
times_list = times.keys()
times_list.sort()
#fill in each array with all found timestamps
for e in categories:
a = []
for t in times_list:
a.append([t, categories[e][t] if t in categories[e] else 0])
categories[e] = a #replace with array data
#take out to final data structure
categories_list = []
for k, v in categories.iteritems():
categories_list.append({'name': k, 'data': v})
return categories_list
@dispatcher.add_method
def get_wallet_stats(start_ts=None, end_ts=None):
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 360 days before the end date
start_ts = end_ts - (360 * 24 * 60 * 60)
num_wallets_mainnet = mongo_db.preferences.find({'network': 'mainnet'}).count()
num_wallets_testnet = mongo_db.preferences.find({'network': 'testnet'}).count()
num_wallets_unknown = mongo_db.preferences.find({'network': None}).count()
wallet_stats = []
for net in ['mainnet', 'testnet']:
filters = {
"when": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
},
'network': net
}
stats = mongo_db.wallet_stats.find(filters).sort('when', pymongo.ASCENDING)
new_wallet_counts = []
login_counts = []
distinct_login_counts = []
for e in stats:
d = int(time.mktime(datetime.datetime(e['when'].year, e['when'].month, e['when'].day).timetuple()) * 1000)
if 'distinct_login_count' in e: distinct_login_counts.append([ d, e['distinct_login_count'] ])
if 'login_count' in e: login_counts.append([ d, e['login_count'] ])
if 'new_count' in e: new_wallet_counts.append([ d, e['new_count'] ])
wallet_stats.append({'name': '%s: Logins' % net.capitalize(), 'data': login_counts})
wallet_stats.append({'name': '%s: Active Wallets' % net.capitalize(), 'data': distinct_login_counts})
wallet_stats.append({'name': '%s: New Wallets' % net.capitalize(), 'data': new_wallet_counts})
return {
'num_wallets_mainnet': num_wallets_mainnet,
'num_wallets_testnet': num_wallets_testnet,
'num_wallets_unknown': num_wallets_unknown,
'wallet_stats': wallet_stats}
@dispatcher.add_method
def get_owned_assets(addresses):
"""Gets a list of owned assets for one or more addresses"""
result = mongo_db.tracked_assets.find({
'owner': {"$in": addresses}
}, {"_id":0}).sort("asset", pymongo.ASCENDING)
return list(result)
@dispatcher.add_method
def get_asset_pair_market_info(asset1=None, asset2=None, limit=50):
"""Given two arbitrary assets, returns the base asset and the quote asset.
"""
assert (asset1 and asset2) or (asset1 is None and asset2 is None)
if asset1 and asset2:
base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2)
pair_info = mongo_db.asset_pair_market_info.find({'base_asset': base_asset, 'quote_asset': quote_asset}, {'_id': 0})
else:
pair_info = mongo_db.asset_pair_market_info.find({}, {'_id': 0}).sort('completed_trades_count', pymongo.DESCENDING).limit(limit)
#^ sort by this for now, may want to sort by a market_cap value in the future
return list(pair_info) or []
@dispatcher.add_method
def get_asset_extended_info(asset):
ext_info = mongo_db.asset_extended_info.find_one({'asset': asset}, {'_id': 0})
return ext_info or False
@dispatcher.add_method
def get_asset_history(asset, reverse=False):
"""
Returns a list of changes for the specified asset, from its inception to the current time.
@param asset: The asset to retrieve a history on
@param reverse: By default, the history is returned in the order of oldest to newest. Set this parameter to True
to return items in the order of newest to oldest.
@return:
Changes are returned as a list of dicts, with each dict having the following format:
* type: One of 'created', 'issued_more', 'changed_description', 'locked', 'transferred', 'called_back'
* 'at_block': The block number this change took effect
* 'at_block_time': The block time this change took effect
* IF type = 'created': Has the following fields, as specified when the asset was initially created:
* owner, description, divisible, locked, total_issued, total_issued_normalized
* IF type = 'issued_more':
* 'additional': The additional quantity issued (raw)
* 'additional_normalized': The additional quantity issued (normalized)
* 'total_issued': The total issuance after this change (raw)
* 'total_issued_normalized': The total issuance after this change (normalized)
* IF type = 'changed_description':
* 'prev_description': The old description
* 'new_description': The new description
* IF type = 'locked': NO EXTRA FIELDS
* IF type = 'transferred':
* 'prev_owner': The address the asset was transferred from
* 'new_owner': The address the asset was transferred to
* IF type = 'called_back':
* 'percentage': The percentage of the asset called back (between 0 and 100)
"""
asset = mongo_db.tracked_assets.find_one({ 'asset': asset }, {"_id":0})
if not asset:
raise Exception("Unrecognized asset")
#run down through _history and compose a diff log
history = []
raw = asset['_history'] + [asset,] #oldest to newest. add on the current state
prev = None
for i in xrange(len(raw)): #oldest to newest
if i == 0:
assert raw[i]['_change_type'] == 'created'
history.append({
'type': 'created',
'owner': raw[i]['owner'],
'description': raw[i]['description'],
'divisible': raw[i]['divisible'],
'locked': raw[i]['locked'],
'total_issued': raw[i]['total_issued'],
'total_issued_normalized': raw[i]['total_issued_normalized'],
'at_block': raw[i]['_at_block'],
'at_block_time': time.mktime(raw[i]['_at_block_time'].timetuple()) * 1000,
})
prev = raw[i]
continue
assert prev
if raw[i]['_change_type'] == 'locked':
history.append({
'type': 'locked',
'at_block': raw[i]['_at_block'],
'at_block_time': time.mktime(raw[i]['_at_block_time'].timetuple()) * 1000,
})
elif raw[i]['_change_type'] == 'transferred':
history.append({
'type': 'transferred',
'at_block': raw[i]['_at_block'],
'at_block_time': time.mktime(raw[i]['_at_block_time'].timetuple()) * 1000,
'prev_owner': prev['owner'],
'new_owner': raw[i]['owner'],
})
elif raw[i]['_change_type'] == 'changed_description':
history.append({
'type': 'changed_description',
'at_block': raw[i]['_at_block'],
'at_block_time': time.mktime(raw[i]['_at_block_time'].timetuple()) * 1000,
'prev_description': prev['description'],
'new_description': raw[i]['description'],
})
else: #issue additional
assert raw[i]['total_issued'] - prev['total_issued'] > 0
history.append({
'type': 'issued_more',
'at_block': raw[i]['_at_block'],
'at_block_time': time.mktime(raw[i]['_at_block_time'].timetuple()) * 1000,
'additional': raw[i]['total_issued'] - prev['total_issued'],
'additional_normalized': raw[i]['total_issued_normalized'] - prev['total_issued_normalized'],
'total_issued': raw[i]['total_issued'],
'total_issued_normalized': raw[i]['total_issued_normalized'],
})
prev = raw[i]
#get callbacks externally via the cpd API, and merge in with the asset history we composed
callbacks = util.call_jsonrpc_api("get_callbacks",
{'filters': {'field': 'asset', 'op': '==', 'value': asset['asset']}}, abort_on_error=True)['result']
final_history = []
if len(callbacks):
for e in history: #history goes from earliest to latest
if callbacks[0]['block_index'] < e['at_block']: #throw the callback entry in before this one
block_time = util.get_block_time(callbacks[0]['block_index'])
assert block_time
final_history.append({
'type': 'called_back',
'at_block': callbacks[0]['block_index'],
'at_block_time': time.mktime(block_time.timetuple()) * 1000,
'percentage': callbacks[0]['fraction'] * 100,
})
callbacks.pop(0)
else:
final_history.append(e)
else:
final_history = history
if reverse: final_history.reverse()
return final_history
@dispatcher.add_method
def record_btc_open_order(wallet_id, order_tx_hash):
"""Records an association between a wallet ID and order TX ID for a trade where DOGE is being SOLD, to allow
buyers to see which sellers of the DOGE are "online" (which can lead to a better result as a BTCpay will be required
to complete any trades where DOGE is involved, and the seller (or at least their wallet) must be online for this to happen"""
#ensure the wallet_id exists
result = mongo_db.preferences.find_one({"wallet_id": wallet_id})
if not result: raise Exception("WalletID does not exist")
mongo_db.btc_open_orders.insert({
'wallet_id': wallet_id,
'order_tx_hash': order_tx_hash,
'when_created': datetime.datetime.utcnow()
})
return True
@dispatcher.add_method
def cancel_btc_open_order(wallet_id, order_tx_hash):
mongo_db.btc_open_orders.remove({'order_tx_hash': order_tx_hash, 'wallet_id': wallet_id})
#^ wallet_id is used more for security here so random folks can't remove orders from this collection just by tx hash
return True
@dispatcher.add_method
def get_balance_history(asset, addresses, normalize=True, start_ts=None, end_ts=None):
"""Retrieves the ordered balance history for a given address (or list of addresses) and asset pair, within the specified date range
@param normalize: If set to True, return quantities that (if the asset is divisible) have been divided by 100M (satoshi).
@return: A list of tuples, with the first entry of each tuple being the block time (epoch TS), and the second being the new balance
at that block time.
"""
if not isinstance(addresses, list):
raise Exception("addresses must be a list of addresses, even if it just contains one address")
asset_info = mongo_db.tracked_assets.find_one({'asset': asset})
if not asset_info:
raise Exception("Asset does not exist.")
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 30 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
results = []
for address in addresses:
result = mongo_db.balance_changes.find({
'address': address,
'asset': asset,
"block_time": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}).sort("block_time", pymongo.ASCENDING)
entry = {
'name': address,
'data': [
(time.mktime(r['block_time'].timetuple()) * 1000,
r['new_balance_normalized'] if normalize else r['new_balance']
) for r in result]
}
results.append(entry)
return results
@dispatcher.add_method
def get_num_users_online():
#gets the current number of users attached to the server's chat feed
return len(siofeeds.onlineClients)
@dispatcher.add_method
def is_chat_handle_in_use(handle):
results = mongo_db.chat_handles.find({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } })
return True if results.count() else False
@dispatcher.add_method
def get_chat_handle(wallet_id):
result = mongo_db.chat_handles.find_one({"wallet_id": wallet_id})
if not result: return False #doesn't exist
result['last_touched'] = time.mktime(time.gmtime())
mongo_db.chat_handles.save(result)
data = {
'handle': re.sub('[^\sA-Za-z0-9_-]', "", result['handle']),
'is_op': result.get('is_op', False),
'last_updated': result.get('last_updated', None)
} if result else {}
banned_until = result.get('banned_until', None)
if banned_until != -1 and banned_until is not None:
data['banned_until'] = int(time.mktime(banned_until.timetuple())) * 1000 #convert to epoch ts in ms
else:
data['banned_until'] = banned_until #-1 or None
return data
@dispatcher.add_method
def store_chat_handle(wallet_id, handle):
"""Set or update a chat handle"""
if not isinstance(handle, basestring):
raise Exception("Invalid chat handle: bad data type")
if not re.match(r'^[\sA-Za-z0-9_-]{4,12}$', handle):
raise Exception("Invalid chat handle: bad syntax/length")
#see if this handle already exists (case insensitive)
results = mongo_db.chat_handles.find({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } })
if results.count():
if results[0]['wallet_id'] == wallet_id:
return True #handle already saved for this wallet ID
else:
raise Exception("Chat handle already is in use")
mongo_db.chat_handles.update(
{'wallet_id': wallet_id},
{"$set": {
'wallet_id': wallet_id,
'handle': handle,
'last_updated': time.mktime(time.gmtime()),
'last_touched': time.mktime(time.gmtime())
}
}, upsert=True)
#^ last_updated MUST be in UTC, as it will be compaired again other servers
return True
@dispatcher.add_method
def get_chat_history(start_ts=None, end_ts=None, handle=None, limit=1000):
now_ts = time.mktime(datetime.datetime.utcnow().timetuple())
if not end_ts: #default to current datetime
end_ts = now_ts
if not start_ts: #default to 5 days before the end date
start_ts = end_ts - (30 * 24 * 60 * 60)
if limit >= 5000:
raise Exception("Requesting too many lines (limit too high")
filters = {
"when": {
"$gte": datetime.datetime.utcfromtimestamp(start_ts)
} if end_ts == now_ts else {
"$gte": datetime.datetime.utcfromtimestamp(start_ts),
"$lte": datetime.datetime.utcfromtimestamp(end_ts)
}
}
if handle:
filters['handle'] = handle
chat_history = mongo_db.chat_history.find(filters, {'_id': 0}).sort("when", pymongo.DESCENDING).limit(limit)
if not chat_history.count():
return False #no suitable trade data to form a market price
chat_history = list(chat_history)
return chat_history
@dispatcher.add_method
def is_wallet_online(wallet_id):
return wallet_id in siofeeds.onlineClients
@dispatcher.add_method
def get_preferences(wallet_id, for_login=False, network=None):
"""Gets stored wallet preferences
@param network: only required if for_login is specified. One of: 'mainnet' or 'testnet'
"""
if network not in (None, 'mainnet', 'testnet'):
raise Exception("Invalid network parameter setting")
if for_login and network is None:
raise Exception("network parameter required if for_login is set")
result = mongo_db.preferences.find_one({"wallet_id": wallet_id})
if not result: return False #doesn't exist
last_touched_date = datetime.datetime.utcfromtimestamp(result['last_touched']).date()
now = datetime.datetime.utcnow()
if for_login: #record user login
ip = flask.request.headers.get('X-Real-Ip', flask.request.remote_addr)
ua = flask.request.headers.get('User-Agent', '')
mongo_db.login_history.insert({'wallet_id': wallet_id, 'when': now, 'network': network, 'action': 'login', 'ip': ip, 'ua': ua})
result['last_touched'] = time.mktime(time.gmtime())
mongo_db.preferences.save(result)
return {
'preferences': json.loads(result['preferences']),
'last_updated': result.get('last_updated', None)
}
@dispatcher.add_method
def store_preferences(wallet_id, preferences, for_login=False, network=None, referer=None):
"""Stores freeform wallet preferences
@param network: only required if for_login is specified. One of: 'mainnet' or 'testnet'
"""
if network not in (None, 'mainnet', 'testnet'):
raise Exception("Invalid network parameter setting")
if for_login and network is None:
raise Exception("network parameter required if for_login is set")
if not isinstance(preferences, dict):
raise Exception("Invalid preferences object")
try:
preferences_json = json.dumps(preferences)
except:
raise Exception("Cannot dump preferences to JSON")
now = datetime.datetime.utcnow()
#sanity check around max size
if len(preferences_json) >= PREFERENCES_MAX_LENGTH:
raise Exception("Preferences object is too big.")
if for_login: #mark this as a new signup IF the wallet doesn't exist already
existing_record = mongo_db.login_history.find({'wallet_id': wallet_id, 'network': network, 'action': 'create'})
if existing_record.count() == 0:
ip = flask.request.headers.get('X-Real-Ip', flask.request.remote_addr)
ua = flask.request.headers.get('User-Agent', '')
mongo_db.login_history.insert({'wallet_id': wallet_id, 'when': now,
'network': network, 'action': 'create', 'referer': referer, 'ip': ip, 'ua': ua})
mongo_db.login_history.insert({'wallet_id': wallet_id, 'when': now,
'network': network, 'action': 'login', 'ip': ip, 'ua': ua}) #also log a wallet login
now_ts = time.mktime(time.gmtime())
mongo_db.preferences.update(
{'wallet_id': wallet_id},
{'$set': {
'wallet_id': wallet_id,
'preferences': preferences_json,
'last_updated': now_ts,
'last_touched': now_ts },
'$setOnInsert': {'when_created': now_ts, 'network': network}
}, upsert=True)
#^ last_updated MUST be in GMT, as it will be compaired again other servers
return True
@dispatcher.add_method
def proxy_to_counterpartyd(method='', params=[]):
if method=='sql': raise Exception("Invalid method")
result = None
cache_key = None
if redis_client: #check for a precached result and send that back instead
cache_key = method + '||' + base64.b64encode(json.dumps(params).encode()).decode()
#^ must use encoding (e.g. base64) since redis doesn't allow spaces in its key names
# (also shortens the hashing key for better performance)
result = redis_client.get(cache_key)
if result:
try:
result = json.loads(result)
except Exception as e:
logging.warn("Error loading JSON from cache: %s, cached data: '%s'" % (e, result))
result = None #skip from reading from cache and just make the API call
if result is None: #cache miss or cache disabled
result = util.call_jsonrpc_api(method, params)
if redis_client: #cache miss
redis_client.setex(cache_key, DEFAULT_COUNTERPARTYD_API_CACHE_PERIOD, json.dumps(result))
#^TODO: we may want to have different cache periods for different types of data
if 'error' in result:
if result['error'].get('data', None):
errorMsg = result['error']['data'].get('message', result['error']['message'])
else:
errorMsg = json.dumps(result['error'])
raise Exception(errorMsg.encode('ascii','ignore'))
#decode out unicode for now (json-rpc lib was made for python 3.3 and does str(errorMessage) internally,
# which messes up w/ unicode under python 2.x)
return result['result']
@dispatcher.add_method
def get_bets(bet_type, feed_address, deadline, target_value=None, leverage=5040):
bets = betting.find_bets(bet_type, feed_address, deadline, target_value=target_value, leverage=leverage)
return bets
@dispatcher.add_method
def get_user_bets(addresses = [], status="open"):
bets = betting.find_user_bets(mongo_db, addresses, status)
return bets
@dispatcher.add_method
def get_feed(address_or_url = ''):
feed = betting.find_feed(mongo_db, address_or_url)
return feed
@dispatcher.add_method
def get_feeds_by_source(addresses = []):
feed = betting.get_feeds_by_source(mongo_db, addresses)
return feed
@dispatcher.add_method
def parse_base64_feed(base64_feed):
feed = betting.parse_base64_feed(base64_feed)
return feed
@dispatcher.add_method
def get_open_rps_count(possible_moves = 3, exclude_addresses = []):
return rps.get_open_rps_count(possible_moves, exclude_addresses)
@dispatcher.add_method
def get_user_rps(addresses):
return rps.get_user_rps(addresses)
@dispatcher.add_method
def get_users_pairs(addresses=[], max_pairs=12):
return dex.get_users_pairs(addresses, max_pairs)
@dispatcher.add_method
def get_market_orders(asset1, asset2, addresses=[], min_fee_provided=0.95, max_fee_required=0.95):
return dex.get_market_orders(asset1, asset2, addresses, None, min_fee_provided, max_fee_required)
@dispatcher.add_method
def get_market_trades(asset1, asset2, addresses=[], limit=100):
return dex.get_market_trades(asset1, asset2, addresses, limit)
@dispatcher.add_method
def get_markets_list():
return dex.get_markets_list(mongo_db)
@dispatcher.add_method
def get_market_details(asset1, asset2, min_fee_provided=0.95, max_fee_required=0.95):
try:
return dex.get_market_details(asset1, asset2, min_fee_provided, max_fee_required, mongo_db)
except Exception as e:
logging.error(e)
@dispatcher.add_method
def get_pubkey_for_address(address):
#returns None if the address has made 0 transactions (as we wouldn't be able to get the public key)
return blockchain.get_pubkey_for_address(address) or False
@dispatcher.add_method
def create_armory_utx(unsigned_tx_hex, public_key_hex):
if not config.ARMORY_UTXSVR_ENABLE:
raise Exception("Support for this feature is not enabled on this system")
endpoint = "http://127.0.0.1:%s/" % (
config.ARMORY_UTXSVR_PORT_MAINNET if not config.TESTNET else config.ARMORY_UTXSVR_PORT_TESTNET)
params = {'unsigned_tx_hex': unsigned_tx_hex, 'public_key_hex': public_key_hex}
utx_ascii = util.call_jsonrpc_api("serialize_unsigned_tx", params=params, endpoint=endpoint, abort_on_error=True)['result']
return utx_ascii
@dispatcher.add_method
def convert_armory_signedtx_to_raw_hex(signed_tx_ascii):
if not config.ARMORY_UTXSVR_ENABLE:
raise Exception("Support for this feature is not enabled on this system")
endpoint = "http://127.0.0.1:%s/" % (
config.ARMORY_UTXSVR_PORT_MAINNET if not config.TESTNET else config.ARMORY_UTXSVR_PORT_TESTNET)
params = {'signed_tx_ascii': signed_tx_ascii}
raw_tx_hex = util.call_jsonrpc_api("convert_signed_tx_to_raw_hex", params=params, endpoint=endpoint, abort_on_error=True)['result']
return raw_tx_hex
@dispatcher.add_method
def create_support_case(name, from_email, problem, screenshot=None, addtl_info=''):
"""create an email with the information received
@param screenshot: The base64 text of the screenshot itself, prefixed with data=image/png ...,
@param addtl_info: A JSON-encoded string of a dict with additional information to include in the support request
"""
import smtplib
import email.utils
from email.header import Header
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.MIMEText import MIMEText
from email.mime.image import MIMEImage
if not config.SUPPORT_EMAIL:
raise Exception("Sending of support emails are disabled on the server: no SUPPORT_EMAIL address set")
if not email.utils.parseaddr(from_email)[1]: #should have been validated in the form
raise Exception("Invalid support email address")
try:
if screenshot:
screenshot_data = screenshot.split(',', 1)[1]
screenshot_data_decoded = base64.b64decode(screenshot_data)
except:
raise Exception("screenshot data format unexpected")
try:
addtl_info = json.loads(addtl_info)
addtl_info = json.dumps(addtl_info, indent=1, sort_keys=False)
except:
raise Exception("addtl_info data format unexpected")
from_email_formatted = email.utils.formataddr((name, from_email))
msg = MIMEMultipart()
msg['Subject'] = Header((problem[:75] + '...') if len(problem) > 75 else problem, 'utf-8')
msg['From'] = from_email_formatted
msg['Reply-to'] = from_email_formatted
msg['To'] = config.SUPPORT_EMAIL
msg['Date'] = email.utils.formatdate(localtime=True)
msg_text = MIMEText("""Problem: %s\n\nAdditional Info:\n%s""" % (problem, addtl_info))
msg.attach(msg_text)
if screenshot:
image = MIMEImage(screenshot_data_decoded, name="screenshot.png")
msg.attach(image)
server = smtplib.SMTP(config.EMAIL_SERVER)
server.sendmail(from_email, config.SUPPORT_EMAIL, msg.as_string())
return True
def _set_cors_headers(response):
if config.RPC_ALLOW_CORS:
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = 'DNT,X-Mx-ReqToken,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
@app.route('/', methods=["OPTIONS",])
@app.route('/api/', methods=["OPTIONS",])
def handle_options():
response = flask.Response('', 204)
_set_cors_headers(response)
return response
@app.route('/', methods=["GET",])
@app.route('/api/', methods=["GET",])
def handle_get():
if flask.request.headers.get("Content-Type", None) == 'application/csp-report':
try:
data_json = flask.request.get_data().decode('utf-8')
data = json.loads(data_json)
assert 'csp-report' in data
except Exception as e:
obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(data="Invalid JSON-RPC 2.0 request format")
return flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
tx_logger.info("***CSP SECURITY --- %s" % data_json)
return flask.Response('', 200)
#"ping" counterpartyd to test
cpd_s = time.time()
cpd_result_valid = True
try:
cpd_status = util.call_jsonrpc_api("get_running_info", abort_on_error=True)['result']
except:
cpd_result_valid = False
cpd_e = time.time()
#"ping" counterblockd to test, as well
cbd_s = time.time()
cbd_result_valid = True
cbd_result_error_code = None
payload = {
"id": 0,
"jsonrpc": "2.0",
"method": "is_ready",
"params": [],
}
try:
url = URL("http://127.0.0.1:%s/api/" % config.RPC_PORT)
client = HTTPClient.from_url(url)
r = client.post(url.request_uri, body=json.dumps(payload), headers={'content-type': 'application/json'})
except Exception as e:
cbd_result_valid = False
cbd_result_error_code = "GOT EXCEPTION: %s" % e
else:
if r.status_code != 200:
cbd_result_valid = False
cbd_result_error_code = "GOT STATUS %s" % r.status_code if r else 'COULD NOT CONTACT'
cbd_result = json.loads(r.read())
if 'error' in r:
cbd_result_valid = False
cbd_result_error_code = "GOT ERROR: %s" % r['error']
finally:
client.close()
cbd_e = time.time()
response_code = 200
if not cpd_result_valid or not cbd_result_valid:
response_code = 500
result = {
'counterpartyd': 'OK' if cpd_result_valid else 'NOT OK',
'counterblockd': 'OK' if cbd_result_valid else 'NOT OK',
'counterblockd_error': cbd_result_error_code,
'counterpartyd_ver': '%s.%s.%s' % (
cpd_status['version_major'], cpd_status['version_minor'], cpd_status['version_revision']) if cpd_result_valid else '?',
'counterblockd_ver': config.VERSION,
'counterpartyd_last_block': cpd_status['last_block'] if cpd_result_valid else '?',
'counterpartyd_last_message_index': cpd_status['last_message_index'] if cpd_result_valid else '?',
'counterpartyd_check_elapsed': cpd_e - cpd_s,
'counterblockd_check_elapsed': cbd_e - cbd_s,
'local_online_users': len(siofeeds.onlineClients),
}
return flask.Response(json.dumps(result), response_code, mimetype='application/json')
@app.route('/', methods=["POST",])
@app.route('/api/', methods=["POST",])
def handle_post():
#don't do anything if we're not caught up
if not util.is_caught_up_well_enough_for_government_work():
obj_error = jsonrpc.exceptions.JSONRPCServerError(data="Server is not caught up. Please try again later.")
response = flask.Response(obj_error.json.encode(), 525, mimetype='application/json')
#^ 525 is a custom response code we use for this one purpose
_set_cors_headers(response)
return response
try:
request_json = flask.request.get_data().decode('utf-8')
request_data = json.loads(request_json)
assert 'id' in request_data and request_data['jsonrpc'] == "2.0" and request_data['method']
# params may be omitted
except:
obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(data="Invalid JSON-RPC 2.0 request format")
response = flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
_set_cors_headers(response)
return response
#only arguments passed as a dict are supported
if request_data.get('params', None) and not isinstance(request_data['params'], dict):
obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(
data='Arguments must be passed as a JSON object (list of unnamed arguments not supported)')
response = flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
_set_cors_headers(response)
return response
rpc_response = jsonrpc.JSONRPCResponseManager.handle(request_json, dispatcher)
rpc_response_json = json.dumps(rpc_response.data, default=util.json_dthandler).encode()
#log the request data
try:
assert 'method' in request_data
tx_logger.info("TRANSACTION --- %s ||| REQUEST: %s ||| RESPONSE: %s" % (request_data['method'], request_json, rpc_response_json))
except Exception as e:
logging.info("Could not log transaction: Invalid format: %s" % e)
response = flask.Response(rpc_response_json, 200, mimetype='application/json')
_set_cors_headers(response)
return response
#make a new RotatingFileHandler for the access log.
api_logger = logging.getLogger("api_log")
h = logging_handlers.RotatingFileHandler(os.path.join(config.DATA_DIR, "api.access.log"), 'a', API_MAX_LOG_SIZE, API_MAX_LOG_COUNT)
api_logger.setLevel(logging.INFO)
api_logger.addHandler(h)
api_logger.propagate = False
#hack to allow wsgiserver logging to use python logging module...
def trimlog(log, msg):
log.info(msg.rstrip())
api_logger.write = functools.partial(trimlog, api_logger)
#start up the API listener/handler
server = wsgi.WSGIServer((config.RPC_HOST, int(config.RPC_PORT)), app, log=api_logger)
server.serve_forever()
|
{
"content_hash": "cc7e928078d2f23d6c0bc4ad0b36b14a",
"timestamp": "",
"source": "github",
"line_count": 1649,
"max_line_length": 199,
"avg_line_length": 49.40933899332929,
"alnum_prop": 0.5728312631940694,
"repo_name": "patricklodder/dogeblockd",
"id": "561f59a5e3b5bc0200a6691d1b33bfe8b2c5da8a",
"size": "81476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "292394"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# don't add anythin after each version
major = 0
minor = 0
patch = 1
# define custom only if you want to add something after M.N.P<-custom>
# (note: no delimiter character will be added automatically)
custom = None
__version__ = str(major) + "." + str(minor) + "." + str(patch)
|
{
"content_hash": "d85df78c8b4d89bd87cefbf7becde1d2",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 70,
"avg_line_length": 19.8,
"alnum_prop": 0.6893939393939394,
"repo_name": "Stibbons/python-project-bootstrap",
"id": "0c10863bf83113b5111d9676cd19256c5e0835cc",
"size": "396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ppb/cli/version.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "13084"
}
],
"symlink_target": ""
}
|
import sys
from setuptools import setup
import moflow
if sys.version_info[0:2] < (2, 6):
sys.exit('Requires Python 2.6 or later')
setup_data = {
'name': 'moflow',
'description': 'A Python package for MODFLOW and related programs',
'version': moflow.__version__,
'author': moflow.__author__,
'author_email': moflow.__email__,
'license': 'BSD',
'packages': ['moflow'],
'setup_requires': ['pytest-runner'],
'tests_require': ['pytest'],
'zip_safe': True,
}
setup(**setup_data)
|
{
"content_hash": "197540a17aea1fa7dbf396e218113c15",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 71,
"avg_line_length": 22.695652173913043,
"alnum_prop": 0.6130268199233716,
"repo_name": "mwtoews/moflow",
"id": "aebef0331c98bb2fa5afed747dcc3c8f3fc0851f",
"size": "545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "109122"
}
],
"symlink_target": ""
}
|
import sys
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class MicrosoftDatadogClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for MicrosoftDatadogClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2022-06-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(MicrosoftDatadogClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop("api_version", "2022-06-01") # type: Literal["2022-06-01"]
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-datadog/{}".format(VERSION))
self._configure(**kwargs)
def _configure(
self, **kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
self.credential, *self.credential_scopes, **kwargs
)
|
{
"content_hash": "99493ce93ed2dcf05e9f049d2c1b9c98",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 107,
"avg_line_length": 49.53731343283582,
"alnum_prop": 0.7095510695992769,
"repo_name": "Azure/azure-sdk-for-python",
"id": "ff713bd00fcd140175a95b9c1d259c1b8bf6fccc",
"size": "3787",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/datadog/azure-mgmt-datadog/azure/mgmt/datadog/_configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
import subprocess
import os
from contextlib import contextmanager
import click
import cpuinfo
import requests
BASE_REPO_URL = 'https://github.com/lumol-org/lumol.git'
INDIVIDUAL_BENCH_TEMPLATE = """
<details><summary>{sha} {title}</summary>
<p>\n\n
\n\n
```bash\n
{body}\n\n
```\n\n
</p></details>
"""
COMPARISON_TEMPLATE = """
<details><summary>{sha} {title}</summary>
<p>\n\n
\n\n
```bash\n
{body}\n\n
```\n\n
</p></details>
"""
def _get_environment_variable(name):
var = os.environ.get(name, None)
if var is None:
msg = 'The environment variable {} must be set to access Github API'.format(name)
raise Exception(msg)
return var
def check_for_cargo_benchcmp():
try:
subprocess.check_output('cargo benchcmp --help', shell=True)
except subprocess.CalledProcessError as e:
raise Exception("cargo-benchcmp is not installed, please install with "
"`cargo install cargo-benchcmp`")
def check_for_environment_variables():
"""
Used to check early that the required env vars are set.
"""
_get_environment_variable('LUMOL_GH_USERNAME')
_get_environment_variable('LUMOL_GH_TOKEN')
def request_api(endpoint, data=None):
url = 'https://api.github.com/repos/lumol-org/lumol' + endpoint
username = _get_environment_variable('LUMOL_GH_USERNAME')
token = _get_environment_variable('LUMOL_GH_TOKEN')
if data is None:
r = requests.get(url, auth=(username, token))
else:
r = requests.post(url, auth=(username, token), json=data)
return r.json()
def get_commit_descriptions(n_commits=None):
"""
Get hash and title of the `n_commits` latest commits on the PR.
Also adds the commit at the HEAD of master in the end. If there
are more than `n_commits` on the PR, only the ones on the PR
are kept.
:param n_commits:
:return:
"""
cmd = 'git log --format="%h" _bot_remote/master^.._bot_pr'
sha = subprocess.check_output(cmd, shell=True).decode('utf-8')
cmd = 'git log --format="%s" _bot_remote/master^.._bot_pr'
titles = subprocess.check_output(cmd, shell=True).decode('utf-8')
descriptions = list(zip(sha.split('\n'), titles.split('\n')))
if n_commits is not None:
n_commits = n_commits - 1 # Always keep the commit from master
assert(n_commits > 0)
descriptions = descriptions[:min(len(descriptions), n_commits)]
return descriptions
def clean_repo():
subprocess.call('git checkout master', shell=True)
subprocess.call('git branch -D _bot_pr', shell=True)
subprocess.call('git remote remove _bot_remote', shell=True)
@contextmanager
def setup_repo(pr_id):
clean_repo()
subprocess.call('git remote add _bot_remote {}'.format(BASE_REPO_URL), shell=True)
subprocess.call('git fetch _bot_remote master', shell=True)
subprocess.call('git fetch _bot_remote pull/{}/head:_bot_pr'.format(pr_id), shell=True)
subprocess.call('git checkout _bot_pr', shell=True)
try:
yield
finally:
clean_repo()
class Benchmarker:
def __init__(self, n_commits, output_dir):
self.commit_descriptions = get_commit_descriptions(n_commits)
self.output_dir = os.path.abspath(os.path.expanduser(output_dir))
def run_warmup(self):
print('=================== Warming up ==============================')
for _ in range(3):
subprocess.check_output('cargo bench', shell=True)
def run_bench(self, sha):
cmd = 'cargo bench > {}/{}.txt'.format(self.output_dir, sha)
subprocess.call(cmd, shell=True)
def run_all_benches(self):
for sha, title in self.commit_descriptions:
print('=================== Benchmarking commit {} =============================='.format(sha))
subprocess.call('git checkout {}'.format(sha), shell=True)
self.run_bench(sha)
print('=================== Done ==============================')
def compare_benches(self):
comparisons = {}
master_sha, _ = self.commit_descriptions[-1]
for sha, title in self.commit_descriptions[:-1]:
cmd = 'cargo benchcmp {dir}/{sha_1}.txt {dir}/{sha_2}.txt --threshold 2 --variance' \
.format(dir=self.output_dir, sha_1=master_sha, sha_2=sha)
out = subprocess.check_output(cmd, shell=True).decode('utf-8')
comparisons[sha] = out
return comparisons
def comment_pr(self, pr_id):
# Comparison benchmarks
master_sha, master_title = self.commit_descriptions[-1]
comment = '## Comparing to master ({})\nusing `--threshold 2, latest commit first`'.format(master_sha)
comparisons = self.compare_benches()
for sha, title in self.commit_descriptions[:-1]:
compare = comparisons[sha]
comment += COMPARISON_TEMPLATE.format(sha=sha, title=title, body=compare)
# Individual benchmarks
comment += '\n## Individual benchmarks\n'
for k, (sha, title) in enumerate(self.commit_descriptions):
with open('{}/{}.txt'.format(self.output_dir, sha)) as f:
bench = f.read()
comment += INDIVIDUAL_BENCH_TEMPLATE.format(sha=sha, title=title, body=bench)
info = cpuinfo.get_cpu_info()
if info is not None:
comment += '\n<br>**CPU**: {}'.format(info['brand'])
# Emit the request
data = {
'body': comment
}
request_api('/issues/{}/comments'.format(pr_id), data)
print('Comment posted on PR.')
@click.command()
@click.argument('pr_ids', type=click.INT, nargs=-1)
@click.option('--output-dir', '-o', default='./target/benchmarks/', help='output directory')
@click.option('--n-commits', '-n', type=click.INT, help='maximum number of commits to benchmark')
def main(pr_ids, output_dir, n_commits):
"""
Run the benchmarks for multiple commits on a PR and compare to master.
The benchmark results are saved in --output-dir, and a comment
with a summary will be automatically added to the PR.
This script requires the environment variables LUMOL_GH_USERNAME
and LUMOL_GH_TOKEN to contain respectively the Github username
and a personal access token.
"""
check_for_cargo_benchcmp()
check_for_environment_variables()
os.makedirs(output_dir, exist_ok=True)
for pr_id in pr_ids:
with setup_repo(pr_id):
benchmarker = Benchmarker(n_commits, output_dir)
benchmarker.run_warmup()
benchmarker.run_all_benches()
benchmarker.comment_pr(pr_id)
if __name__ == '__main__':
main()
|
{
"content_hash": "f90c80723a09e1cb0b3aaf72fb48bbb8",
"timestamp": "",
"source": "github",
"line_count": 207,
"max_line_length": 110,
"avg_line_length": 32.28502415458937,
"alnum_prop": 0.6160407002843035,
"repo_name": "g-bauer/lumol",
"id": "91060177ed66f5e08e7f9f41d8f63898fea0e630",
"size": "6721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/benchmark-pr.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "8398"
},
{
"name": "Rust",
"bytes": "663571"
},
{
"name": "Shell",
"bytes": "1480"
}
],
"symlink_target": ""
}
|
"""Tests for `odroid_fan_controller` package."""
import pytest
from click.testing import CliRunner
from odroid_fan_controller import odroid_fan_controller
from odroid_fan_controller import cli
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
def test_command_line_interface():
"""Test the CLI."""
runner = CliRunner()
result = runner.invoke(cli.main)
assert result.exit_code == 0
assert 'odroid_fan_controller.cli.main' in result.output
help_result = runner.invoke(cli.main, ['--help'])
assert help_result.exit_code == 0
assert '--help Show this message and exit.' in help_result.output
|
{
"content_hash": "e25331d6b928dfd41a65b9b51b8ea29f",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 78,
"avg_line_length": 29.314285714285713,
"alnum_prop": 0.7076023391812866,
"repo_name": "tr0yspradling/odroid-fan-controller",
"id": "3b1f87044a33f49666c2a3fce3fce3e812765322",
"size": "1073",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_odroid_fan_controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2301"
},
{
"name": "Python",
"bytes": "6641"
}
],
"symlink_target": ""
}
|
"""
=================
Threading Classes
=================
:Author:
Moritz Emanuel Beber
:Date:
2011-02-26
:Copyright:
Copyright(c) 2011 Jacobs University of Bremen. All rights reserved.
:File:
threaded.py
Notes
-----
Most classes (unless they inherit from old-style classes) are new-style classes.
Attributes and methods not intended to be accessed directly by the user are
preceeded by a single underscore '_' but they can be used if the user knows
what he is doing. Those preceeded with two underscores '__' should under no
circumstances be touched.
"""
import os
import threading
import logging
import paramiko
import socket
import math
from Queue import Queue
from .errors import NetworkError
class ThreadPoolWorker(threading.Thread):
"""
Worker thread that operates on items from its queue.
"""
def __init__(self, queue, exception_queue=None):
"""
"""
threading.Thread.__init__(self)
self._queue = queue
self._exception_queue = exception_queue
self.daemon = True
def run(self):
"""
"""
while True:
(perform, args, kw_args) = self._queue.get()
try:
perform(*args, **kw_args)
except StandardError as err:
if self._exception_queue:
self._exception_queue.put((err, perform, args, kw_args))
finally:
self._queue.task_done()
def ThreadPool(object):
"""
"""
def __init__(self, num_threads, retry=False):
"""
"""
object.__init__(self)
self.queue = Queue()
if retry:
self.exception_queue = Queue()
else:
self.exception_queue = None
for i in xrange(num_threads):
w = ThreadPoolWorker(self.queue, self.exception_queue)
w.start()
def put(self, perform, *args, **kw_args):
"""
"""
self.queue.put((perform, args, kw_args))
def join(self):
"""
"""
self.queue.join()
class RemoteSetup(object):
"""
docstring for RemoteSetup
"""
def __init__(self, host, options, *args, **kwargs):
"""
docstring
"""
object.__init__(self)
self._host = str(host)
self.name = "%s@%s" % (self.__class__.__name__, self._host)
self.logger = logging.getLogger(self.name)
self._child_name = "%s.SSHClient" % self.name
self._child_logger = logging.getLogger(self._child_name)
self._child_logger.propagate = 0
self._options = options
self._client = None
self._n_cpus = None
self._cpu_usage = None
self._io_lock = threading.Lock()
def __del__(self):
"""
docstring
"""
if self._client:
self._client.close()
def close(self):
if self._client:
self._client.close()
def make_ssh_connection(self):
"""
docstring
"""
# create the communication instance
self.logger.debug("Creating SSHClient instance")
self._client = paramiko.SSHClient()
# set logging for it
self.logger.debug("Setting log channel")
self._client.set_log_channel(self._child_name)
self.logger.debug("Setting missing host key policies")
if self._options.auto_add:
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
else:
self._client.set_missing_host_key_policy(paramiko.WarningPolicy())
self.logger.debug("Loading known host keys")
self._io_lock.acquire()
try:
self._client.load_host_keys(
os.path.expanduser("~/.ssh/known_hosts"))
except IOError as err:
self.logger.exception(str(err))
# how to proceed when loading of host keys fails?
# right now making the connection probably still fails so all is well
finally:
self._io_lock.release()
self.logger.debug("Making connection")
try:
self._client.connect(hostname=self._host, port=self._options.ssh_port,
username=self._options.username, password=self._options.password)
except paramiko.BadHostKeyException:
raise NetworkError("Bad Host Key")
except paramiko.AuthenticationException:
raise NetworkError("Authentication Error")
except paramiko.SSHException:
raise NetworkError("Connection Error")
except socket.error:
raise NetworkError("Socket Error")
else:
self.logger.info("Connection established and authenticated")
self._io_lock.acquire()
self._client.save_host_keys(os.path.expanduser("~/.ssh/known_hosts"))
self._io_lock.release()
def one_time_cmd(self, cmd):
"""
"""
try:
(stdin_fh, stdout_fh, stderr_fh) = self._client.exec_command(cmd,\
self._options.buf_size)
except paramiko.SSHException:
raise NetworkError("Failed to execute remote command")
stderr = stderr_fh.read()
stdout = stdout_fh.read()
if stderr and not stdout:
raise NetworkError("Remote command failed with: %s", stderr)
else:
return stdout
def _detect_ncpus(self):
"""
docstring
"""
# get number of cpus on linux
cmd = "grep -c 'model name' '/proc/cpuinfo'"
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
try:
self._n_cpus = int(line)
except ValueError:
continue
else:
return
# no CPUs detected, i.e., cmd caused an error
# will use pty on MacOS as well for consistency
cmd = "sysctl -n hw.ncpu"
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
try:
self._n_cpus = int(line)
except ValueError:
continue
else:
return
# return the default value
self.logger.warning("Could not detect number of CPUs,"\
" assuming default '1'")
self._n_cpus = 1
def _detect_cpu_usage(self, num_probes=10.0):
"""
docstring
"""
# for linux, unix, and macosx that's why both -e and -a
cmd = "vmstat 1 %d" % num_probes
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
total = 0.
for line in stdout:
if not line:
continue
tmp = line.split()
# only want to parse lines that start with numbers
try:
float(tmp[0])
except ValueError:
continue
# cheap trick not to parse ordinary text, like %CPU header
# ps --no-headers not available on mac, for example
try:
total += float(tmp[12])
except ValueError:
continue
self._cpu_usage = math.ceil(total / num_probes)
return
# default usage
self.logger.warning("Could not detect CPU usage, assuming 0 %%")
self._cpu_usage = 0.
def remote_shell_cmd(self, cmd, timeout=20.):
"""
"""
try:
channel = self._client.invoke_shell()
except paramiko.SSHException:
raise NetworkError("Failed to invoke remote shell")
if channel.gettimeout():
self.logger.debug("Channel timeout: %f", channel.gettimeout())
else:
channel.settimeout(timeout)
try:
channel.sendall(cmd)
except socket.timeout:
channel.close()
raise NetworkError("Connection timed out")
stdout = ""
expect = "%s@%s:~>\r\n" % (self._options.username, self._host)
while True:
try:
stdout += channel.recv(self._options.buf_size)
if stdout.endswith(expect):
break
except socket.timeout:
break
channel.close()
return stdout
def _setup_job(self, lower, upper, shell_file="batch_jobs.sh"):
"""
docstring
"""
cmd = "screen -dmS batch_simulation %s %d %d\n"\
% (shell_file, lower, upper)
# we only have to check for immediate errors of running this command
# not sure how to do that atm
stdout = self.remote_shell_cmd(cmd)
if stdout:
self.logger.debug(stdout)
def usage(self):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return 0
self.logger.debug("Detecting number of CPUs...")
self._detect_ncpus()
self.logger.debug("There are %d CPUs online", self._n_cpus)
self.logger.debug("Detecting CPU usage...")
self._detect_cpu_usage()
self.logger.debug("Usage is: %f", self._cpu_usage)
# compare work load with number of cpus present
self._cpu_usage = round(self._n_cpus * self._cpu_usage / 100.0, 0)
self._n_cpus = self._n_cpus - int(self._cpu_usage)
self.logger.debug("Number of CPUs to use: %d", self._n_cpus)
self.logger.debug("Closing client")
self._client.close()
return self._n_cpus
def run(self, lower, upper, shell_file="batch_jobs.sh"):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return None
# start simulations
self._setup_job(lower, upper, shell_file)
self.logger.info("Remote job started")
self._client.close()
def _detect_processes(self, *args):
"""
docstring
"""
pids = list()
for comm in args:
cmd = "ps -u %s -o pid,comm | grep %s | grep -v grep" %\
(self._options.username, comm)
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
# cheap trick not to parse ordinary text, like %CPU header
try:
pids.append(int(line.split()[0]))
except ValueError:
continue
except IndexError:
break
return pids
def kill(self, *args):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return 0
self.logger.debug("Killing process(es)...")
pids = self._detect_processes(*args)
self.logger.debug(pids)
killed = 0
for pid in pids:
cmd = "kill %d" % pid
try:
stdout = self.one_time_cmd(cmd)
except NetworkError as err:
self.logger.debug(str(err))
self.logger.debug(stdout)
else:
killed += 1
self.logger.debug("Closing client")
self._client.close()
return killed
|
{
"content_hash": "12d0c4dc038ebea54a771b486da935f5",
"timestamp": "",
"source": "github",
"line_count": 378,
"max_line_length": 82,
"avg_line_length": 31.746031746031747,
"alnum_prop": 0.5305833333333333,
"repo_name": "Midnighter/Everyday-Utilities",
"id": "4b4b2f2a5cc5a21e823ed6ddc192cc2b1fb5bf1c",
"size": "12048",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meb/utils/threaded.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "140440"
}
],
"symlink_target": ""
}
|
from AlgorithmImports import *
### <summary>
### Demonstration algorthm for the Warm Up feature with basic indicators.
### </summary>
### <meta name="tag" content="indicators" />
### <meta name="tag" content="warm up" />
### <meta name="tag" content="history and warm up" />
### <meta name="tag" content="using data" />
class WarmupAlgorithm(QCAlgorithm):
def Initialize(self):
'''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
self.SetStartDate(2013,10,8) #Set Start Date
self.SetEndDate(2013,10,11) #Set End Date
self.SetCash(100000) #Set Strategy Cash
# Find more symbols here: http://quantconnect.com/data
self.AddEquity("SPY", Resolution.Second)
fast_period = 60
slow_period = 3600
self.fast = self.EMA("SPY", fast_period)
self.slow = self.EMA("SPY", slow_period)
self.SetWarmup(slow_period)
self.first = True
def OnData(self, data):
'''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.'''
if self.first and not self.IsWarmingUp:
self.first = False
self.Log("Fast: {0}".format(self.fast.Samples))
self.Log("Slow: {0}".format(self.slow.Samples))
if self.fast.Current.Value > self.slow.Current.Value:
self.SetHoldings("SPY", 1)
else:
self.SetHoldings("SPY", -1)
|
{
"content_hash": "7c5f85479cd71c774715ccc3d5592b71",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 151,
"avg_line_length": 37.390243902439025,
"alnum_prop": 0.6262230919765166,
"repo_name": "StefanoRaggi/Lean",
"id": "d3ef7d9cc7f545bd6f3b3d68dcf9301fa4bc9052",
"size": "2221",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Algorithm.Python/WarmupAlgorithm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1841"
},
{
"name": "C#",
"bytes": "21581235"
},
{
"name": "CSS",
"bytes": "10299"
},
{
"name": "Dockerfile",
"bytes": "1352"
},
{
"name": "F#",
"bytes": "1646"
},
{
"name": "HTML",
"bytes": "15710"
},
{
"name": "Java",
"bytes": "852"
},
{
"name": "Jupyter Notebook",
"bytes": "22419"
},
{
"name": "Python",
"bytes": "1012106"
},
{
"name": "Shell",
"bytes": "1390"
},
{
"name": "Visual Basic .NET",
"bytes": "2448"
}
],
"symlink_target": ""
}
|
"""
Observer pattern "events" implemented as a decorator.
Based on: http://stackoverflow.com/a/1926336
Example usage::
# producer
class MyJob(object):
@event
def progress(pct):
'''Called when progress is made. pct is the percent complete.'''
def run(self):
n = 10
for i in range(n+1):
self.progress(100.0 * i / n)
#consumer
job = myjobs.MyJob()
job.progress += lambda pct: sys.stdout.write("%.1f%% done\n" % pct)
job.run()
"""
__all__ = ['event']
class event(object):
"""
Event decorator. An event function supports the += and -= operators for
adding and removing listeners. Directly calling the event fires it.
"""
def __init__(self, func):
self.__doc__ = func.__doc__
self._key = ' ' + func.__name__
def __get__(self, obj, cls):
try:
return obj.__dict__[self._key]
except KeyError:
be = obj.__dict__[self._key] = BoundEvent()
return be
class BoundEvent(object):
def __init__(self):
self._fns = []
def __iadd__(self, fn):
self._fns.append(fn)
return self
def __isub__(self, fn):
self._fns.remove(fn)
return self
def __call__(self, *args, **kwargs):
for f in self._fns[:]:
f(*args, **kwargs)
|
{
"content_hash": "e0a78177510f5d900c83ff02df66138b",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 76,
"avg_line_length": 22.933333333333334,
"alnum_prop": 0.5254360465116279,
"repo_name": "riggsd/davies",
"id": "06db486f6b772ad5eb0e923635de60ccb2a7df2b",
"size": "1376",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "davies/event.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "319"
},
{
"name": "Python",
"bytes": "67814"
}
],
"symlink_target": ""
}
|
"""
Trading environment class
data: 12/10/2017
author: Tau
"""
from ..datafeed import *
from ..spaces import *
from .utils import *
from ..utils import *
from ..core import Env
import os
import smtplib
from socket import gaierror
from datetime import datetime, timedelta, timezone
from decimal import localcontext, ROUND_UP, Decimal
from time import sleep
import pandas as pd
import empyrical as ec
import optunity as ot
from bokeh.layouts import column
from bokeh.palettes import inferno
from bokeh.plotting import figure, show
from bokeh.models import HoverTool, Legend, Span, Label
from ..exchange_api.poloniex import ExchangeError
# Environments
class TradingEnvironment(Env):
"""
Trading environment base class
"""
## Setup methods
def __init__(self, period, obs_steps, tapi, fiat="USDT", name="TradingEnvironment"):
assert isinstance(name, str), "Name must be a string"
self.name = name
# Data feed api
self.tapi = tapi
# Environment configuration
self.epsilon = dec_zero
self._obs_steps = None
self._period = None
self.pairs = []
self._crypto = []
self._fiat = None
self.tax = {}
# Dataframes
self.obs_df = pd.DataFrame()
self.portfolio_df = pd.DataFrame()
self.action_df = pd.DataFrame()
# Logging and debugging
self.status = {'OOD': False,
'Error': False,
'ValueError': False,
'ActionError': False,
'NotEnoughFiat': False}
if not os.path.exists('./logs'):
os.makedirs('./logs')
# self.logger = Logger(self.name, './logs/')
Logger.info("Trading Environment initialization",
"Trading Environment Initialized!")
# Setup
self.period = period
self.obs_steps = obs_steps
# Init attributes for key sharing
self.results = None
self.action_space = None
self.observation_space = None
self.init_balance = None
self._symbols = []
self.add_pairs(self.tapi.pairs)
self.fiat = fiat
self.set_action_space()
self.reset_benchmark()
self.setup()
## Env properties
@property
def obs_steps(self):
return self._obs_steps
@obs_steps.setter
def obs_steps(self, value):
assert isinstance(value, int), "Obs steps must be a integer."
assert value >= 3, "Obs steps must be >= 3. Value: %s" % str(value)
self._obs_steps = value
@property
def period(self):
return self._period
@period.setter
def period(self, value):
assert isinstance(value, int) and value >= 1,\
"Period must be a integer >= 1."
self._period = value
@property
def symbols(self):
if self._symbols:
return self._symbols
else:
symbols = []
for pair in self.pairs:
symbols.append(pair.split('_')[1])
symbols.append(self._fiat)
self._symbols = tuple(symbols)
return self._symbols
@property
def fiat(self):
try:
i = -1
fiat = self.portfolio_df.at[self.portfolio_df.index[i], self._fiat]
while not convert_to.decimal(fiat.is_finite()):
i -= 1
fiat = self.portfolio_df.at[self.portfolio_df.index[-i], self._fiat]
return fiat
except IndexError:
Logger.error(TradingEnvironment.crypto, "No valid value on portfolio dataframe.")
raise KeyError
except KeyError as e:
Logger.error(TradingEnvironment.fiat, "You must specify a fiat symbol first.")
raise e
except Exception as e:
Logger.error(TradingEnvironment.fiat, self.parse_error(e))
raise e
@fiat.setter
def fiat(self, value):
try:
if isinstance(value, str):
symbols = []
for pair in self.pairs:
symbols.append(pair.split('_')[1])
symbols.append(self.pairs[0].split('_')[0])
assert value in symbols, "Fiat not in symbols."
self._fiat = value
symbols.remove(self._fiat)
self._crypto = symbols
elif isinstance(value, Decimal) or isinstance(value, float) or isinstance(value, int):
self.portfolio_df.at[self.timestamp, self._fiat] = convert_to.decimal(value)
elif isinstance(value, dict):
try:
timestamp = value['timestamp']
except KeyError:
timestamp = self.timestamp
self.portfolio_df.at[timestamp, self._fiat] = convert_to.decimal(value[self._fiat])
except IndexError:
raise AssertionError('You must enter pairs before set fiat.')
except Exception as e:
Logger.error(TradingEnvironment.fiat, self.parse_error(e))
raise e
@property
def crypto(self):
try:
crypto = {}
for symbol in self._crypto:
crypto[symbol] = self.get_crypto(symbol)
return crypto
except KeyError as e:
Logger.error(TradingEnvironment.crypto, "No valid value on portfolio dataframe.")
raise e
except Exception as e:
Logger.error(TradingEnvironment.crypto, self.parse_error(e))
raise e
def get_crypto(self, symbol):
try:
i = -1
value = self.portfolio_df.at[self.portfolio_df.index[i], symbol]
while not convert_to.decimal(value).is_finite():
i -= 1
value = self.portfolio_df.at[self.portfolio_df.index[i], symbol]
return value
except IndexError:
Logger.error(TradingEnvironment.crypto, "No valid value on portfolio dataframe.")
raise KeyError
except KeyError as e:
Logger.error(TradingEnvironment.crypto, "No valid value on portfolio dataframe.")
raise e
except Exception as e:
Logger.error(TradingEnvironment.crypto, self.parse_error(e))
raise e
@crypto.setter
def crypto(self, values):
try:
# assert isinstance(values, dict), "Crypto value must be a dictionary containing the currencies balance."
try:
timestamp = values['timestamp']
except KeyError:
timestamp = self.timestamp
for symbol, value in values.items():
if symbol not in [self._fiat, 'timestamp']:
self.portfolio_df.at[timestamp, symbol] = convert_to.decimal(value)
except TypeError:
raise AssertionError("Crypto value must be a dictionary containing the currencies balance.")
except Exception as e:
Logger.error(TradingEnvironment.crypto, self.parse_error(e))
raise e
@property
def balance(self):
# return self.portfolio_df.ffill().loc[self.portfolio_df.index[-1], self.symbols].to_dict()
balance = self.crypto
balance.update({self._fiat: self.fiat})
return balance
@balance.setter
def balance(self, values):
try:
assert isinstance(values, dict), "Balance must be a dictionary containing the currencies amount."
try:
timestamp = values['timestamp']
except KeyError:
timestamp = self.timestamp
for symbol, value in values.items():
if symbol is not 'timestamp':
self.portfolio_df.at[timestamp, symbol] = convert_to.decimal(value)
except Exception as e:
Logger.error(TradingEnvironment.balance, self.parse_error(e))
raise e
@property
def portval(self):
return self.calc_total_portval()
@portval.setter
def portval(self, value):
try:
self.portfolio_df.at[value['timestamp'], 'portval'] = convert_to.decimal(value['portval'])
except KeyError:
self.portfolio_df.at[self.timestamp, 'portval'] = convert_to.decimal(value['portval'])
except TypeError:
self.portfolio_df.at[self.timestamp, 'portval'] = convert_to.decimal(value)
except Exception as e:
Logger.error(TradingEnvironment.portval, self.parse_error(e))
raise e
@property
def benchmark(self):
return self._benchmark
@benchmark.setter
def benchmark(self, vector):
self._benchmark = self.assert_action(vector)
def reset_benchmark(self):
n_pairs = len(self.pairs)
self.benchmark = np.append(dec_vec_div(convert_to.decimal(np.ones(n_pairs, dtype=np.dtype(Decimal))),
dec_con.create_decimal(n_pairs)), [dec_zero])
def add_pairs(self, *args):
"""
Add pairs for tradeable symbol universe
:param args: str, list:
:return:
"""
universe = self.tapi.returnCurrencies()
for arg in args:
if isinstance(arg, str):
if set(arg.split('_')).issubset(universe):
self.pairs.append(arg)
else:
Logger.error(TradingEnvironment.add_pairs, "Symbol not found on exchange currencies.")
elif isinstance(arg, list):
for item in arg:
if set(item.split('_')).issubset(universe):
if isinstance(item, str):
self.pairs.append(item)
else:
Logger.error(TradingEnvironment.add_pairs, "Symbol name must be a string")
else:
Logger.error(TradingEnvironment.add_pairs, "Symbol name must be a string")
## Data feed methods
@property
def timestamp(self):
# return floor_datetime(datetime.now(timezone.utc) - timedelta(minutes=self.period), self.period)
# Poloniex returns utc timestamp delayed one full bar
return datetime.now(timezone.utc) - timedelta(minutes=self.period)
# Exchange data getters
def get_balance(self):
"""
Get last balance from exchange
:return: dict: Dict containing Decimal values for portfolio allocation
"""
try:
balance = self.tapi.returnBalances()
filtered_balance = {}
for symbol in self.symbols:
filtered_balance[symbol] = convert_to.decimal(balance[symbol])
return filtered_balance
except Exception as e:
try:
Logger.error(LiveTradingEnvironment.get_balance, self.parse_error(e, balance))
except Exception:
Logger.error(LiveTradingEnvironment.get_balance, self.parse_error(e))
raise e
def get_fee(self, symbol, fee_type='takerFee'):
"""
Return transaction fee value for desired symbol
:param symbol: str: Pair name
:param fee_type: str: Take or Maker fee
:return: Decimal:
"""
# TODO MAKE IT UNIVERSAL
try:
fees = self.tapi.returnFeeInfo()
assert fee_type in ['takerFee', 'makerFee'], "fee_type must be whether 'takerFee' or 'makerFee'."
return dec_con.create_decimal(fees[fee_type])
except Exception as e:
Logger.error(TradingEnvironment.get_fee, self.parse_error(e))
raise e
# High frequency getter
# def get_pair_trades(self, pair, start=None, end=None):
# # TODO WRITE TEST
# # TODO FINISH THIS
# try:
# # Pool data from exchage
# if isinstance(end, float):
# data = self.tapi.marketTradeHist(pair, end=end)
# else:
# data = self.tapi.marketTradeHist(pair)
# df = pd.DataFrame.from_records(data)
#
# # Get more data from exchange until have enough to make obs_steps rows
# if isinstance(start, float):
# while datetime.fromtimestamp(start) < \
# datetime.strptime(df.date.iat[-1], "%Y-%m-%d %H:%M:%S"):
#
# market_data = self.tapi.marketTradeHist(pair, end=datetime.timestamp(
# datetime.strptime(df.date.iat[-1], "%Y-%m-%d %H:%M:%S")))
#
# df2 = pd.DataFrame.from_records(market_data).set_index('globalTradeID')
# appended = False
# i = 0
# while not appended:
# try:
# df = df.append(df2.iloc[i:], verify_integrity=True)
# appended = True
# except ValueError:
# i += 1
#
# else:
# while datetime.strptime(df.date.iat[0], "%Y-%m-%d %H:%M:%S") - \
# timedelta(minutes=self.period * self.obs_steps) < \
# datetime.strptime(df.date.iat[-1], "%Y-%m-%d %H:%M:%S"):
#
# market_data = self.tapi.marketTradeHist(pair, end=datetime.timestamp(
# datetime.strptime(df.date.iat[-1], "%Y-%m-%d %H:%M:%S")))
#
# df2 = pd.DataFrame.from_records(market_data).set_index('globalTradeID')
# appended = False
# i = 0
# while not appended:
# try:
# df = df.append(df2.iloc[i:], verify_integrity=True)
# appended = True
# except ValueError:
# i += 1
#
# return df
#
# except Exception as e:
# Logger.error(TradingEnvironment.get_pair_trades, self.parse_error(e))
# raise e
#
# def sample_trades(self, pair, start=None, end=None):
# # TODO WRITE TEST
# df = self.get_pair_trades(pair, start=start, end=end)
#
# period = "%dmin" % self.period
#
# # Sample the trades into OHLC data
# df['rate'] = df['rate'].ffill().apply(convert_to.decimal, raw=True)
# df['amount'] = df['amount'].apply(convert_to.decimal, raw=True)
# df.index = df.date.apply(pd.to_datetime, raw=True)
#
# # TODO REMOVE NANS
# index = df.resample(period).first().index
# out = pd.DataFrame(index=index)
#
# out['open'] = convert_and_clean(df['rate'].resample(period).first())
# out['high'] = convert_and_clean(df['rate'].resample(period).max())
# out['low'] = convert_and_clean(df['rate'].resample(period).min())
# out['close'] = convert_and_clean(df['rate'].resample(period).last())
# out['volume'] = convert_and_clean(df['amount'].resample(period).sum())
#
# return out
# Low frequency getter
def get_ohlc(self, symbol, index):
"""
Return OHLC data for desired pair
:param symbol: str: Pair symbol
:param index: datetime.datetime: Time span for data retrieval
:return: pandas DataFrame: OHLC symbol data
"""
# Get range
start = index[0]
end = index[-1]
# Call for data
ohlc_df = pd.DataFrame.from_records(self.tapi.returnChartData(symbol,
period=self.period * 60,
start=datetime.timestamp(start),
end=datetime.timestamp(end)),
nrows=index.shape[0])
# TODO 1 FIND A BETTER WAY
# TODO: FIX TIMESTAMP
# Set index
ohlc_df.set_index(ohlc_df.date.transform(lambda x: datetime.fromtimestamp(x).astimezone(timezone.utc)),
inplace=True, drop=True)
# Get right values to fill nans
# TODO: FIND A BETTER PERFORMANCE METHOD
# last_close = ohlc_df.at[ohlc_df.close.last_valid_index(), 'close']
# Get last close value
i = -1
last_close = ohlc_df.at[ohlc_df.index[i], 'close']
while not dec_con.create_decimal(last_close).is_finite():
i -= 1
last_close = dec_con.create_decimal(ohlc_df.at[ohlc_df.index[i], 'close'])
# Replace missing values with last close
fill_dict = {col: last_close for col in ['open', 'high', 'low', 'close']}
fill_dict.update({'volume': '0E-16'})
# Reindex with desired time range and fill nans
ohlc_df = ohlc_df[['open','high','low','close',
'volume']].reindex(index).asfreq("%dT" % self.period).fillna(fill_dict)
return ohlc_df.astype(str)#.fillna('0.0')
# Observation maker
def get_history(self, start=None, end=None, portfolio_vector=False):
while True:
try:
obs_list = []
keys = []
# Make desired index
is_bounded = True
if not end:
end = self.timestamp
is_bounded = False
if not start:
start = end - timedelta(minutes=self.period * self.obs_steps)
index = pd.date_range(start=start,
end=end,
freq="%dT" % self.period).ceil("%dT" % self.period)[-self.obs_steps:]
is_bounded = False
else:
index = pd.date_range(start=start,
end=end,
freq="%dT" % self.period).ceil("%dT" % self.period)
if portfolio_vector:
# Get portfolio observation
port_vec = self.get_sampled_portfolio(index)
if port_vec.shape[0] == 0:
port_vec = self.get_sampled_portfolio().iloc[-1:]
port_vec.index = [index[0]]
# Update last observation so it can see possible inter step changes
last_balance = self.get_balance()
port_vec.at[port_vec.index[-1], list(last_balance.keys())] = list(last_balance.values())
# Get pairs history
for pair in self.pairs:
keys.append(pair)
history = self.get_ohlc(pair, index)
history = pd.concat([history, port_vec[pair.split('_')[1]]], axis=1)
obs_list.append(history)
# Get fiat history
keys.append(self._fiat)
obs_list.append(port_vec[self._fiat])
# Concatenate dataframes
obs = pd.concat(obs_list, keys=keys, axis=1)
# Fill missing portfolio observations
cols_to_bfill = [col for col in zip(self.pairs, self.symbols)] + [(self._fiat, self._fiat)]
obs = obs.fillna(obs[cols_to_bfill].ffill().bfill())
if not is_bounded:
assert obs.shape[0] >= self.obs_steps, "Dataframe is to small. Shape: %s" % str(obs.shape)
return obs.apply(convert_to.decimal, raw=True)
else:
# Get history
for pair in self.pairs:
keys.append(pair)
history = self.get_ohlc(pair, index)
obs_list.append(history)
# Concatenate
obs = pd.concat(obs_list, keys=keys, axis=1)
# Check size
if not is_bounded:
assert obs.shape[0] >= self.obs_steps, "Dataframe is to small. Shape: %s" % str(obs.shape)
return obs.apply(convert_to.decimal, raw=True)
except MaxRetriesException:
Logger.error(TradingEnvironment.get_history, "Retries exhausted. Waiting for connection...")
except Exception as e:
Logger.error(TradingEnvironment.get_history, self.parse_error(e))
raise e
def get_observation(self, portfolio_vector=False):
"""
Return observation df with prices and asset amounts
:param portfolio_vector: bool: whether to include or not asset amounts
:return: pandas DataFrame:
"""
try:
self.obs_df = self.get_history(portfolio_vector=portfolio_vector)
return self.obs_df
# except ExchangeError:
# sleep(1)
# self.obs_df = self.get_history(portfolio_vector=portfolio_vector)
# return self.obs_df
except Exception as e:
Logger.error(TradingEnvironment.get_observation, self.parse_error(e))
raise e
def get_sampled_portfolio(self, index=None):
"""
Return sampled portfolio df
:param index:
:return:
"""
if index is None:
start = self.portfolio_df.index[0]
end = self.portfolio_df.index[-1]
else:
start = index[0]
end = index[-1]
# TODO 1 FIND A BETTER WAY
if start != end:
return self.portfolio_df.loc[start:end].resample("%dmin" % self.period).last()
else:
return self.portfolio_df.loc[:end].resample("%dmin" % self.period).last()
def get_sampled_actions(self, index=None):
"""
Return sampled action df
:param index:
:return:
"""
if index is None:
start = self.action_df.index[0]
end = self.action_df.index[-1]
else:
start = index[0]
end = index[-1]
# TODO 1 FIND A BETTER WAY
if start != end:
return self.action_df.loc[start:end].resample("%dmin" % self.period).last()
else:
return self.action_df.loc[:end].resample("%dmin" % self.period).last()
## Trading methods
def get_open_price(self, symbol, timestamp=None):
"""
Get symbol open price
:param symbol: str: Pair name
:param timestamp:
:return: Decimal: Symbol open price
"""
if not timestamp:
timestamp = self.obs_df.index[-1]
return self.obs_df.at[timestamp, ("%s_%s" % (self._fiat, symbol), 'open')]
def calc_total_portval(self, timestamp=None):
"""
Return total portfolio value given optional timestamp
:param timestamp: datetime.datetime:
:return: Decimal: Portfolio value in fiat units
"""
portval = dec_zero
for symbol in self._crypto:
portval = self.get_crypto(symbol).fma(self.get_open_price(symbol, timestamp), portval)
portval = dec_con.add(self.fiat, portval)
return portval
def calc_posit(self, symbol, portval):
"""
Calculate current position vector
:param symbol: str: Symbol name
:param portval: Decimal: Portfolio value
:return:
"""
if symbol == self._fiat:
return safe_div(self.fiat, portval)
else:
return safe_div(dec_con.multiply(self.get_crypto(symbol), self.get_open_price(symbol)), portval)
def calc_portfolio_vector(self):
"""
Return portfolio position vector
:return: numpy array:
"""
portfolio = np.empty(len(self.symbols), dtype=Decimal)
portval = self.calc_total_portval()
for i, symbol in enumerate(self.symbols):
portfolio[i] = self.calc_posit(symbol, portval)
return portfolio
def assert_action(self, action):
"""
Assert that action vector is valid and have norm one
:param action: numpy array: Action array
:return: numpy array: Valid and normalized action vector
"""
# TODO WRITE TEST
try:
action = convert_to.decimal(action)
assert self.action_space.contains(action)
# normalize
if action.sum() != dec_one:
action = safe_div(action, action.sum())
action[-1] += dec_one - action.sum()
assert action.sum() - dec_one < dec_eps
return action
except AssertionError:
action = safe_div(action, action.sum())
action[-1] += dec_one - action.sum()
try:
assert action.sum() - dec_one < dec_eps
return action
except AssertionError:
action = safe_div(action, action.sum())
action[-1] += dec_one - action.sum()
assert action.sum() - dec_one < dec_eps
return action
except Exception as e:
Logger.error(TradingEnvironment.assert_action, self.parse_error(e))
raise e
def log_action(self, timestamp, symbol, value):
"""
Log action to action df
:param timestamp:
:param symbol:
:param value:
:return:
"""
if symbol == 'online':
self.action_df.at[timestamp, symbol] = value
else:
self.action_df.at[timestamp, symbol] = convert_to.decimal(value)
def log_action_vector(self, timestamp, vector, online):
"""
Log complete action vector to action df
:param timestamp:
:param vector:
:param online:
:return:
"""
for i, symbol in enumerate(self.symbols):
self.log_action(timestamp, symbol, vector[i])
self.log_action(timestamp, 'online', online)
def get_last_portval(self):
"""
Retrieve last valid portfolio value from portfolio dataframe
:return: Decimal
"""
try:
i = -1
portval = self.portfolio_df.at[self.portfolio_df.index[i], 'portval']
while not dec_con.create_decimal(portval).is_finite():
i -= 1
portval = self.portfolio_df.at[self.portfolio_df.index[i], 'portval']
return portval
except Exception as e:
Logger.error(TradingEnvironment.get_last_portval, self.parse_error(e))
raise e
def get_reward(self, previous_portval):
"""
Payoff loss function
Reference:
E Hazan.
Logarithmic Regret Algorithms for Online Convex ... - cs.Princeton
www.cs.princeton.edu/~ehazan/papers/log-journal.pdf
:previous_portval: float: Previous portfolio value
:return: numpy float:
"""
# TODO TEST
# Price change
pr = self.obs_df.xs('open', level=1, axis=1).iloc[-2:].values
pr = np.append(safe_div(pr[-1], pr[-2]), [dec_one])
pr_max = pr.max()
# Divide after dot product
# pr = safe_div(pr, pr_max)
# No taxes this way
# port_log_return = rew_con.log10(np.dot(convert_to.decimal(self.action_df.iloc[-1].values[:-1]), pr))
# This way you get taxes from the next reward right after the step init
# try:
# port_change = safe_div(self.portfolio_df.get_value(self.portfolio_df.index[-1], 'portval'),
# self.portfolio_df.get_value(self.portfolio_df.index[-2], 'portval'))
# except IndexError:
# port_change = dec_one
# This way you get taxes from the currently action, after wait for the bar to close
try:
port_change = safe_div(self.calc_total_portval(), previous_portval)
except IndexError:
port_change = dec_one
# Portfolio log returns
port_log_return = rew_con.ln(safe_div(port_change, pr_max))
# Benchmark log returns
bench_log_return = rew_con.ln(safe_div(np.dot(self.benchmark, pr), pr_max))
# Return -regret (negative regret) = Payoff
return rew_con.subtract(port_log_return, bench_log_return).quantize(dec_qua)
def simulate_trade(self, action, timestamp):
"""
Simulates trade on exchange environment
:param action: np.array: Desired portfolio vector
:param timestamp: datetime.datetime: Trade time
:return: None
"""
# TODO: IMPLEMENT SLIPPAGE MODEL
try:
# Assert inputs
action = self.assert_action(action)
# Calculate position change given action
posit_change = dec_vec_sub(action, self.calc_portfolio_vector())[:-1]
# Get initial portval
portval = self.calc_total_portval()
# Sell assets first
for i, change in enumerate(posit_change):
if change < dec_zero:
symbol = self.symbols[i]
crypto_pool = safe_div(dec_con.multiply(portval, action[i]), self.get_open_price(symbol))
with localcontext() as ctx:
ctx.rounding = ROUND_UP
fee = ctx.multiply(dec_con.multiply(portval, change.copy_abs()), self.tax[symbol])
self.fiat = {self._fiat: dec_con.add(self.fiat, portval.fma(change.copy_abs(), -fee)), 'timestamp': timestamp}
self.crypto = {symbol: crypto_pool, 'timestamp': timestamp}
# Uodate prev portval with deduced taxes
portval = self.calc_total_portval()
# Then buy some goods
for i, change in enumerate(posit_change):
if change > dec_zero:
symbol = self.symbols[i]
self.fiat = {self._fiat: dec_con.subtract(self.fiat, dec_con.multiply(portval, change.copy_abs())),
'timestamp': timestamp}
# if fiat_pool is negative, deduce it from portval and clip
if self.fiat < dec_zero:
portval += self.fiat
self.fiat = {self._fiat: dec_zero, 'timestamp': timestamp}
with localcontext() as ctx:
ctx.rounding = ROUND_UP
fee = ctx.multiply(dec_con.multiply(portval, change.copy_abs()), self.tax[symbol])
crypto_pool = safe_div(portval.fma(action[i], -fee), self.get_open_price(symbol))
self.crypto = {symbol: crypto_pool, 'timestamp': timestamp}
# Log executed action and final balance
self.log_action_vector(self.timestamp, self.calc_portfolio_vector(), True)
# Update portfolio_df
final_balance = self.balance
final_balance['timestamp'] = timestamp
self.balance = final_balance
# Calculate new portval
self.portval = {'portval': self.calc_total_portval(),
'timestamp': timestamp}
return True
except Exception as e:
Logger.error(TradingEnvironment.simulate_trade, self.parse_error(e))
if hasattr(self, 'email'):
self.send_email("TradingEnvironment Error: %s at %s" % (e,
datetime.strftime(self.timestamp, "%Y-%m-%d %H:%M:%S")),
self.parse_error(e))
raise e
## Env methods
def set_observation_space(self):
"""
Set environment observation space
:return:
"""
# Observation space:
obs_space = []
# OPEN, HIGH, LOW, CLOSE
for _ in range(4):
obs_space.append(Box(0.0, 1e12, 1))
# VOLUME
obs_space.append(Box(0.0, 1e12, 1))
# POSITION
obs_space.append(Box(0.0, 1.0, 1))
self.observation_space = Tuple(obs_space)
def set_action_space(self):
"""
Set valid action space
:return:
"""
# Action space
self.action_space = Box(dec_zero, dec_one, (len(self.symbols),))
# Logger.info(TrainingEnvironment.set_action_space, "Setting environment with %d symbols." % (len(self.symbols)))
def reset_status(self):
self.status = {'OOD': False, 'Error': False, 'ValueError': False, 'ActionError': False,
'NotEnoughFiat': False}
def setup(self):
# Reset index
self.data_length = self.tapi.data_length
# Set spaces
self.set_observation_space()
self.set_action_space()
# Get fee values
for symbol in self.symbols:
self.tax[symbol] = convert_to.decimal(self.get_fee(symbol))
# Start balance
self.init_balance = self.get_balance()
# Set flag
self.initialized = True
def reset(self):
"""
Setup env with initial values
:return: pandas DataFrame: observation
"""
raise NotImplementedError()
## Analytics methods
def get_results(self, window=7, benchmark="crp"):
"""
Calculate metrics
:param window: int:
:param benchmark: str: crp for constant rebalance or bah for buy and hold
:return:
"""
# Sample portfolio df
self.results = self.get_sampled_portfolio().join(self.get_sampled_actions(), rsuffix='_posit')[1:].ffill()
# Get history
obs = self.get_history(self.results.index[0], self.results.index[-1])
# Init df
self.results['benchmark'] = dec_zero
self.results['returns'] = convert_to.decimal(np.nan)
self.results['benchmark_returns'] = convert_to.decimal(np.nan)
self.results['alpha'] = convert_to.decimal(np.nan)
self.results['beta'] = convert_to.decimal(np.nan)
self.results['drawdown'] = convert_to.decimal(np.nan)
self.results['sharpe'] = convert_to.decimal(np.nan)
## Calculate benchmark portfolio
# Calc init portval
init_portval = dec_zero
init_time = self.results.index[0]
for symbol in self._crypto:
init_portval += convert_to.decimal(self.init_balance[symbol]) * \
obs.at[init_time, (self._fiat + '_' + symbol, 'open')]
init_portval += convert_to.decimal(self.init_balance[self._fiat])
# # Buy and Hold initial equally distributed assets
with localcontext() as ctx:
ctx.rounding = ROUND_UP
for i, symbol in enumerate(self.pairs):
self.results[symbol+'_benchmark'] = (dec_one - self.tax[symbol.split('_')[1]]) * \
obs[symbol, 'open'] * init_portval / (obs.at[init_time,
(symbol, 'open')] * Decimal(self.action_space.shape[0] - 1))
if benchmark == 'bah':
self.results['benchmark'] = self.results['benchmark'] + self.results[symbol + '_benchmark']
# Best Constant Rebalance Portfolio without taxes
hindsight = obs.xs('open', level=1, axis=1).rolling(2,
min_periods=2).apply(lambda x: (safe_div(x[-1],
x[-2]))).fillna(dec_one).applymap(dec_con.create_decimal)
hindsight[self._fiat] = dec_one
# hindsight = hindsight.apply(lambda x: safe_div(x, x.max()), axis=1)
# Take first operation fee just to start at the same point as strategy
if benchmark == 'crp':
self.results['benchmark'] = np.dot(hindsight, self.benchmark).cumprod() * init_portval * \
(dec_one - self.tax[symbol.split('_')[1]])
# Calculate metrics
self.results['returns'] = pd.to_numeric(self.results.portval.rolling(2,
min_periods=2).apply(lambda x: (safe_div(x[-1],
x[-2]) - 1)).fillna(dec_zero))
self.results['benchmark_returns'] = pd.to_numeric(self.results.benchmark.rolling(2,
min_periods=2).apply(lambda x: (safe_div(x[-1],
x[-2]) - 1)).fillna(dec_zero))
self.results['alpha'] = ec.utils.roll(self.results.returns,
self.results.benchmark_returns,
function=ec.alpha_aligned,
window=window,
risk_free=0.001
)
self.results['beta'] = ec.utils.roll(self.results.returns,
self.results.benchmark_returns,
function=ec.beta_aligned,
window=window)
self.results['drawdown'] = ec.roll_max_drawdown(self.results.returns, window=int(window))
self.results['sharpe'] = ec.roll_sharpe_ratio(self.results.returns, window=int(window + 5), risk_free=0.001)
return self.results
def plot_results(self, window=14, benchmark='crp', subset=None):
def config_fig(fig):
fig.background_fill_color = "black"
fig.background_fill_alpha = 0.1
fig.border_fill_color = "#232323"
fig.outline_line_color = "#232323"
fig.title.text_color = "whitesmoke"
fig.xaxis.axis_label_text_color = "whitesmoke"
fig.yaxis.axis_label_text_color = "whitesmoke"
fig.yaxis.major_label_text_color = "whitesmoke"
fig.xaxis.major_label_orientation = np.pi / 4
fig.grid.grid_line_alpha = 0.1
fig.grid.grid_line_dash = [6, 4]
if subset:
df = self.get_results(window=window, benchmark=benchmark).astype(np.float64).iloc[subset[0]:subset[1]]
else:
df = self.get_results(window=window, benchmark=benchmark).astype(np.float64)
# Results figures
results = {}
# Position
pos_hover = HoverTool(
tooltips=[
('date', '<span style="color: #000000;">@x{%F, %H:%M}</span>'),
('position', '<span style="color: #000000;">@y{%f}</span>'),
],
formatters={
'x': 'datetime', # use 'datetime' formatter for 'date' field
'y': 'printf', # use 'printf' formatter for 'adj close' field
},
# display a tooltip whenever the cursor is vertically in line with a glyph
mode='vline'
)
p_pos = figure(title="Position over time",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='position',
plot_width=900, plot_height=400 + len(self.pairs) * 5,
tools=['crosshair','reset','xwheel_zoom','pan,box_zoom', pos_hover, 'save'],
toolbar_location="above"
)
config_fig(p_pos)
palettes = inferno(len(self.symbols))
legend = []
for i, symbol in enumerate(self.symbols):
results[symbol + '_posit'] = p_pos.line(df.index, df[symbol + '_posit'], color=palettes[i], line_width=1.2)#, muted_color=palettes[i], muted_alpha=0.2)
p_pos.legend.click_policy = "hide"
legend.append((str(symbol), [results[symbol + '_posit']]))
p_pos.add_layout(Legend(items=legend, location=(0, -31)), 'right')
p_pos.legend.click_policy = "hide"
# Portifolio and benchmark values
val_hover = HoverTool(
tooltips=[
('date', '<span style="color: #000000;">@x{%F, %H:%M}</span>'),
('val', '<span style="color: #000000;">$@y{%0.2f}</span>'),
],
formatters={
'x': 'datetime', # use 'datetime' formatter for 'date' field
'y': 'printf', # use 'printf' formatter for 'adj close' field
},
# display a tooltip whenever the cursor is vertically in line with a glyph
mode='vline'
)
p_val = figure(title="Portfolio / Benchmark Value",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='value',
plot_width=900, plot_height=400,
tools=['crosshair', 'reset', 'xwheel_zoom', 'pan,box_zoom', val_hover, 'save'],
toolbar_location="above"
)
config_fig(p_val)
results['benchmark'] = p_val.line(df.index, df.benchmark, color='red', line_width=1.2)
results['m_bench'] = p_val.line(df.index, df.benchmark.rolling(int(window * 10)).mean(), color='black', line_width=1.2, alpha=0.8)
results['portval'] = p_val.line(df.index, df.portval, color='green', line_width=1.2)
results['m_portval'] = p_val.line(df.index, df.portval.rolling(int(window * 10)).mean(), color='yellow', line_width=1.2, alpha=0.8)
p_val.add_layout(Legend(items=[("portval", [results['portval']]),
("benchmark", [results['benchmark']]),
("mean portval", [results['m_portval']]),
("mean bench", [results['m_bench']])
], location=(0, -31)), 'right')
p_val.legend.click_policy = "hide"
# Individual assets portval
p_pval = figure(title="Pair Performance",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='performance',
plot_width=900, plot_height=400 + len(self.pairs) * 5,
tools=['crosshair', 'reset', 'xwheel_zoom', 'pan,box_zoom', val_hover, 'save'],
toolbar_location="above"
)
config_fig(p_pval)
legend = []
for i, symbol in enumerate(self.pairs):
results[symbol+'_benchmark'] = p_pval.line(df.index, df[symbol+'_benchmark'], color=palettes[i], line_width=1.2)
legend.append((symbol,[results[symbol+'_benchmark']]))
p_pval.add_layout(Legend(items=legend, location=(0, -31)), 'right')
p_pval.legend.click_policy = "hide"
# Portifolio and benchmark returns
p_ret = figure(title="Portfolio / Benchmark Returns",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='Returns',
plot_width=900, plot_height=400,
tools=['crosshair','reset','xwheel_zoom','pan', 'box_zoom', 'save'],
toolbar_location="above"
)
config_fig(p_ret)
roll_mu = df.returns.rolling(int(df.index.shape[0] / 5)).mean()
roll_std = df.returns.rolling(int(df.index.shape[0] / 5)).var()
results['bench_ret'] = p_ret.line(df.index, df.benchmark_returns, color='red', line_width=1.2)
results['port_ret'] = p_ret.line(df.index, df.returns, color='green', line_width=1.2, alpha=0.6)
results['ret_mean'] = p_ret.line(df.index, roll_mu,
color='yellow', line_width=1.2, alpha=0.6)
results['ret_std_1'] = p_ret.line(df.index, roll_mu + roll_std,
color='blue', line_width=1.2, alpha=0.6)
results['ret_std_2'] = p_ret.line(df.index, roll_mu - roll_std,
color='blue', line_width=1.2, alpha=0.6)
p_ret.add_layout(Legend(items=[("bench returns", [results['bench_ret']]),
("port returns", [results['port_ret']]),
("returns_mean", [results['ret_mean']]),
("returns_std", [results['ret_std_1'], results['ret_std_2']])
], location=(0, -31),), 'right')
p_ret.legend.click_policy = "hide"
# Returns histogram
p_hist = figure(title="Portfolio Value Pct Change Distribution",
x_axis_label='Pct Change',
y_axis_label='frequency',
plot_width=900, plot_height=400,
tools=['crosshair','reset','xwheel_zoom','pan', 'box_zoom', 'save'],
toolbar_location="above"
)
config_fig(p_hist)
hist, edges = np.histogram(df.returns, density=True, bins=100)
p_hist.quad(top=hist, bottom=0, left=edges[:-1], right=edges[1:],
fill_color="#036564", line_color="#033649")
sigma = df.returns.std()
mu = df.returns.mean()
quantiles = (df.returns.quantile(0.05), df.returns.quantile(0.95))
results['mhist'] = Span(location=mu, dimension='height', line_color='red',
line_dash='dashed', line_width=2)
p_hist.add_layout(results['mhist'])
p_hist.add_layout(Label(x=mu, y=max(hist), x_offset=4,
y_offset=-5, text='%.06f' % mu,
text_color='red'))
p_hist.add_layout(Label(x=quantiles[0], y=0, text='%.06f' % quantiles[0], text_color='yellow', angle=45,))
p_hist.add_layout(Label(x=quantiles[1], y=0, text='%.06f' % quantiles[1], text_color='yellow', angle=45))
# PDF
# x = np.linspace(df.returns.min(), df.returns.max(), 1000)
# pdf = 1 / (sigma * np.sqrt(2 * np.pi)) * np.exp(-(x - mu) ** 2 / (2 * sigma ** 2))
#
# p_hist.line(x, pdf, line_color="#D95B43", line_width=1.8, alpha=0.7)
results['cihist'] = p_hist.line(np.linspace(quantiles[0], quantiles[1], 1000), 0, line_color='yellow',
line_width=3, alpha=0.7, line_dash='dashed')
p_hist.add_layout(Legend(items=[
("95% credible interval", [results['cihist']])
], location=(0, -31),), 'right')
# Portifolio rolling alpha
p_alpha = figure(title="Portfolio rolling alpha",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='alpha',
plot_width=900, plot_height=270,
tools=['crosshair','reset','xwheel_zoom','pan', 'box_zoom', 'save'],
toolbar_location="above"
)
config_fig(p_alpha)
mu = df.alpha.mean()
results['alpha'] = p_alpha.line(df.index, df.alpha, color='yellow', line_width=1.2)
p_alpha.add_layout(Span(location=0, dimension='width', line_color='black',
line_dash='dashed', line_width=1.5))
results['malpha'] = Span(location=mu, dimension='width', line_color='whitesmoke',
line_dash='dashed', line_width=1.5)
p_alpha.add_layout(results['malpha'])
p_alpha.add_layout(Label(x=df.index[window], y=mu, x_offset=10,
y_offset=1, text='mu: %.06f' % mu,
text_color='whitesmoke'))
p_alpha.add_layout(Legend(items=[("alpha", [results['alpha']])
], location=(0, -31),), 'right')
# Portifolio rolling beta
p_beta = figure(title="Portfolio rolling beta",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='beta',
plot_width=900, plot_height=270,
tools=['crosshair','reset','xwheel_zoom','pan', 'box_zoom', 'save'],
toolbar_location="above"
)
config_fig(p_beta)
mu = df.beta.mean()
results['beta'] = p_beta.line(df.index, df.beta, color='yellow', line_width=1.2)
p_beta.add_layout(Span(location=0, dimension='width', line_color='black',
line_dash='dashed', line_width=1.5))
results['mbeta'] = Span(location=mu, dimension='width', line_color='whitesmoke',
line_dash='dashed', line_width=1.5)
p_beta.add_layout(results['mbeta'])
p_beta.add_layout(Label(x=df.index[window], y=mu, x_offset=10,
y_offset=1, text='mu: %.06f' % mu,
text_color='whitesmoke'))
p_beta.add_layout(Legend(items=[("beta", [results['beta']])
], location=(0, -31),), 'right')
# Portifolio Sharpe ratio
p_sharpe = figure(title="Portfolio rolling Sharpe ratio",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='Sharpe ratio',
plot_width=900, plot_height=270,
tools=['crosshair','reset','xwheel_zoom','pan', 'box_zoom', 'save'],
toolbar_location="above"
)
config_fig(p_sharpe)
mu = df.sharpe.mean()
results['sharpe'] = p_sharpe.line(df.index, df.sharpe, color='yellow', line_width=1.2)
p_sharpe.add_layout(Span(location=0, dimension='width', line_color='black',
line_dash='dashed', line_width=1.5))
results['msharpe'] = Span(location=mu, dimension='width', line_color='whitesmoke',
line_dash='dashed', line_width=1.5)
p_sharpe.add_layout(results['msharpe'])
p_sharpe.add_layout(Label(x=df.index[window], y=mu, x_offset=10,
y_offset=1, text='mu: %.06f' % mu,
text_color='whitesmoke'))
p_sharpe.add_layout(Legend(items=[("sharpe", [results['sharpe']])
], location=(0, -31),), 'right')
# Rolling Drawdown
p_dd = figure(title="Portfolio rolling drawdown",
x_axis_type="datetime",
x_axis_label='timestep',
y_axis_label='drawdown',
plot_width=900, plot_height=270,
tools=['crosshair','reset','xwheel_zoom','pan', 'box_zoom', 'save'],
toolbar_location="above"
)
config_fig(p_dd)
md = df.drawdown.min()
results['drawdown'] = p_dd.line(df.index, df.drawdown, color='red', line_width=1.2)
results['mdrawdown'] = Span(location=md, dimension='width',
line_color='whitesmoke', line_dash='dashed', line_width=2)
p_dd.add_layout(results['mdrawdown'])
p_dd.add_layout(Label(x=df.index[window], y=md, x_offset=4,
y_offset=5, text='max dd: %.06f' % md,
text_color='whitesmoke'))
p_dd.add_layout(Legend(items=[("drawdown", [results['drawdown']])
], location=(0, -31),), 'right')
print("\n################### > Portfolio Performance Analysis < ###################\n")
print("Portfolio excess Sharpe: %f" % ec.excess_sharpe(df.returns, df.benchmark_returns))
print("Portfolio / Benchmark Sharpe ratio: %f / %f" % (ec.sharpe_ratio(df.returns),
ec.sharpe_ratio(df.benchmark_returns)))
print("Portfolio / Benchmark Omega ratio: %f / %f" % (ec.omega_ratio(df.returns),
ec.omega_ratio(df.benchmark_returns)))
print("Portfolio / Benchmark max drawdown: %f / %f" % (ec.max_drawdown(df.returns),
ec.max_drawdown(df.benchmark_returns)))
results['handle'] = show(column(p_val, p_pval, p_pos, p_ret, p_hist, p_sharpe, p_dd, p_alpha, p_beta),
notebook_handle=True)
return results
## Report methods
def parse_error(self, e, *args):
error_msg = '\n' + self.name + ' error -> ' + type(e).__name__ + ' in line ' + str(
e.__traceback__.tb_lineno) + ': ' + str(e)
for args in args:
error_msg += "\n" + str(args)
return error_msg
def set_email(self, email):
"""
Set Gmail address and password for log keeping
:param email: str: Gmail address
:param psw: str: account password
:return:
"""
try:
assert isinstance(email, dict)
self.email = email
Logger.info(TradingEnvironment.set_email, "Email report address set to: %s" % (str([email[key] for key in email if key == 'to'])))
except Exception as e:
Logger.error(TradingEnvironment.set_email, self.parse_error(e))
def send_email(self, subject, body):
try:
assert isinstance(self.email, dict) and \
isinstance(subject, str) and isinstance(body, str)
for key in self.email:
if key == 'email':
gmail_user = self.email[key]
elif key == 'psw':
gmail_pwd = self.email[key]
elif key == 'to':
TO = self.email[key] if type(self.email[key]) is list else [self.email[key]]
FROM = gmail_user
SUBJECT = subject
TEXT = body
# Prepare actual message
message = """From: %s\nTo: %s\nSubject: %s\n\n%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
server = smtplib.SMTP("smtp.gmail.com", 587)
server.ehlo()
server.starttls()
server.login(gmail_user, gmail_pwd)
server.sendmail(FROM, TO, message)
server.close()
# If we have no internet, wait five seconds and retry
except gaierror:
try:
sleep(5)
self.send_email(subject, body)
except gaierror as e:
# If there is no internet yet, log error and move on
Logger.error(TradingEnvironment.send_email, self.parse_error(e))
except smtplib.SMTPServerDisconnected as e:
# If there is no internet yet, log error and move on
Logger.error(TradingEnvironment.send_email, self.parse_error(e))
except smtplib.SMTPSenderRefused as e:
# If there is no internet yet, log error and move on
Logger.error(TradingEnvironment.send_email, self.parse_error(e))
except Exception as e:
try:
Logger.error(TradingEnvironment.send_email, self.parse_error(e))
if hasattr(self, 'email'):
self.send_email("Error sending email: %s at %s" % (e,
datetime.strftime(self.timestamp, "%Y-%m-%d %H:%M:%S")),
self.parse_error(e))
except Exception as e:
Logger.error(TradingEnvironment.send_email, self.parse_error(e))
class BacktestEnvironment(TradingEnvironment):
"""
Backtest environment for financial strategies history testing
"""
def __init__(self, period, obs_steps, tapi, fiat, name):
assert isinstance(tapi, BacktestDataFeed), "Backtest tapi must be a instance of BacktestDataFeed."
super().__init__(period, obs_steps, tapi, fiat, name)
self.index = obs_steps
self.data_length = None
self.training = False
self.initialized = False
@property
def timestamp(self):
return datetime.fromtimestamp(self.tapi.ohlc_data[self.tapi.pairs[0]].index[self.index]).astimezone(timezone.utc)
def get_hindsight(self):
"""
Stay away from look ahead bias!
:return: pandas dataframe: Full history dataframe
"""
# Save env obs_steps
obs_steps = self.obs_steps
# Change it so you can recover all the data
self.obs_steps = self.data_length
self.index = self.obs_steps - 1
# Pull the entire data set
hindsight = self.get_observation()
# Change env obs_steps back
self.obs_steps = obs_steps
self.index = self.obs_steps
return hindsight
def optimize_benchmark(self, nb_steps, verbose=False):
# Init var
i = 0
## Acquire open price hindsight
hindsight = self.get_hindsight().xs('open', level=1,
axis=1).rolling(2, min_periods=2).apply(
lambda x: (safe_div(x[-1], x[-2]))).dropna().astype('f')
hindsight[self._fiat] = 1.0
# Scale it
hindsight = hindsight.apply(lambda x: safe_div(x, x.max()), axis=1)
# Calculate benchmark return
# Benchmark: Equally distributed constant rebalanced portfolio
ed_crp = array_normalize(np.append(np.ones(len(self.symbols) - 1), [0.0]))
ed_crp_returns = np.dot(hindsight, ed_crp)
initial_benchmark_returns = np.dot(hindsight, np.float64(self.benchmark))
initial_reward = np.log(initial_benchmark_returns).sum() - np.log(ed_crp_returns).sum()
## Define params
# Constraints declaration
# bench_constraints = [lambda **kwargs: sum([kwargs[key] for key in kwargs]) <= 1]
## Define benchmark optimization routine
# @ot.constraints.constrained(bench_constrains)
# @ot.constraints.violations_defaulted(-10)
def find_bench(**kwargs):
try:
# Init variables
nonlocal i, nb_steps, hindsight, ed_crp_returns
# Best constant rebalance portfolio
b_crp = array_normalize(np.array([kwargs[key] for key in kwargs]))
# Best constant rebalance portfolio returns
b_crp_returns = np.dot(hindsight, b_crp)
# Calculate sharpe regret
reward = np.log(b_crp_returns).sum() - np.log(ed_crp_returns).sum()
# Increment counter
i += 1
# Update progress
if verbose and i % 10 == 0:
print("Benchmark optimization step {0}/{1}, step reward: {2}".format(i,
int(nb_steps),
float(reward)),
end="\r")
return reward
except KeyboardInterrupt:
raise ot.api.fun.MaximumEvaluationsException(0)
# Search space declaration
n_assets = len(self.symbols)
bench_search_space = {str(i): j for i, j in zip(np.arange(n_assets), [[0, 1] for _ in range(n_assets)])}
print("Optimizing benchmark...")
# Call optimizer to benchmark
BCR, info, _ = ot.maximize_structured(
find_bench,
num_evals=int(nb_steps),
search_space=bench_search_space
)
if float(info.optimum) > float(initial_reward):
self.benchmark = convert_to.decimal(array_normalize(np.array([BCR[key] for key in BCR])))
print("\nOptimum benchmark reward: %f" % info.optimum)
print("Best Constant Rebalance portfolio found in %d optimization rounds:\n" % i, self.benchmark.astype(float))
else:
print("Initial benchmark was already optimum. Reward: %s" % str(initial_reward))
print("Benchmark portfolio: %s" % str(np.float32(self.benchmark)))
return self.benchmark
def get_history(self, start=None, end=None, portfolio_vector=False):
while True:
try:
obs_list = []
keys = []
# Make desired index
is_bounded = True
if not end:
end = self.timestamp
is_bounded = False
if not start:
start = end - timedelta(minutes=self.period * self.obs_steps)
index = pd.date_range(start=start,
end=end,
freq="%dT" % self.period).ceil("%dT" % self.period)[-self.obs_steps:]
is_bounded = False
else:
index = pd.date_range(start=start,
end=end,
freq="%dT" % self.period).ceil("%dT" % self.period)
if portfolio_vector:
# Get portfolio observation
port_vec = self.get_sampled_portfolio(index)
if port_vec.shape[0] == 0:
port_vec = self.get_sampled_portfolio().iloc[-1:]
port_vec.index = [index[0]]
# Get pairs history
for pair in self.pairs:
keys.append(pair)
history = self.get_ohlc(pair, index)
history = pd.concat([history, port_vec[pair.split('_')[1]]], axis=1)
obs_list.append(history)
# Get fiat history
keys.append(self._fiat)
obs_list.append(port_vec[self._fiat])
# Concatenate dataframes
obs = pd.concat(obs_list, keys=keys, axis=1)
# Fill missing portfolio observations
cols_to_bfill = [col for col in zip(self.pairs, self.symbols)] + [(self._fiat, self._fiat)]
obs = obs.fillna(obs[cols_to_bfill].ffill().bfill())
if not is_bounded:
assert obs.shape[0] >= self.obs_steps, "Dataframe is too small. Shape: %s" % str(obs.shape)
return obs.apply(convert_to.decimal, raw=True)
else:
# Get history
for pair in self.pairs:
keys.append(pair)
history = self.get_ohlc(pair, index)
obs_list.append(history)
# Concatenate
obs = pd.concat(obs_list, keys=keys, axis=1)
# Check size
if not is_bounded:
assert obs.shape[0] >= self.obs_steps, "Dataframe is to small. Shape: %s" % str(obs.shape)
return obs.apply(convert_to.decimal, raw=True)
except MaxRetriesException:
Logger.error(TradingEnvironment.get_history, "Retries exhausted. Waiting for connection...")
except Exception as e:
Logger.error(TradingEnvironment.get_history, self.parse_error(e))
raise e
def get_ohlc(self, symbol, index):
# Get range
start = index[0]
end = index[-1]
# Call for data
ohlc_df = pd.DataFrame.from_records(self.tapi.returnChartData(symbol,
period=self.period * 60,
start=datetime.timestamp(start),
end=datetime.timestamp(end)),
nrows=index.shape[0])
# TODO 1 FIND A BETTER WAY
# TODO: FIX TIMESTAMP
# Set index
ohlc_df.set_index(ohlc_df.date.transform(lambda x: datetime.fromtimestamp(x).astimezone(timezone.utc)),
inplace=True, drop=True)
# Disabled fill on backtest for performance.
# We assume that backtest data feed will not return nan values
# Get right values to fill nans
# fill_dict = {col: ohlc_df.loc[ohlc_df.close.last_valid_index(), 'close'] for col in ['open', 'high', 'low', 'close']}
# fill_dict.update({'volume': '0E-8'})
# Reindex with desired time range and fill nans
ohlc_df = ohlc_df[['open','high','low','close',
'volume']].reindex(index).asfreq("%dT" % self.period)#.fillna(fill_dict)
return ohlc_df.astype(str)
def reset(self):
"""
Setup env with initial values
:param reset_dfs: bool: Reset log dfs
:return: pandas DataFrame: Initial observation
"""
try:
# If need setup, do it
if not self.initialized:
self.setup()
# Get start point
if self.training:
self.index = np.random.random_integers(self.obs_steps, self.data_length - 3)
else:
self.index = self.obs_steps
# Reset log dfs
self.obs_df = pd.DataFrame()
self.portfolio_df = pd.DataFrame(columns=list(self.symbols) + ['portval'])
# Reset balance
self.balance = self.init_balance
# Get new index
self.index += 1
# Get fisrt observation
obs = self.get_observation(True)
# Reset portfolio value
self.portval = {'portval': self.calc_total_portval(self.obs_df.index[-1]),
'timestamp': self.portfolio_df.index[-1]}
# Clean actions
self.action_df = pd.DataFrame([list(self.calc_portfolio_vector()) + [False]],
columns=list(self.symbols) + ['online'],
index=[self.portfolio_df.index[-1]])
# Return first observation
return obs.astype(np.float64)
except IndexError:
print("Insufficient tapi data. You must choose a bigger time span or a lower period.")
raise IndexError
def step(self, action):
try:
# Get step timestamp
timestamp = self.timestamp
# Save portval for reward calculation
previous_portval = self.calc_total_portval()
# Simulate portifolio rebalance
self.simulate_trade(action, timestamp)
# Check for end condition
if self.index >= self.data_length - 2:
done = True
self.status["OOD"] += 1
else:
done = False
# Get new index
self.index += 1
# Get new observation
new_obs = self.get_observation(True)
# Get reward for action took
reward = self.get_reward(previous_portval)
# Return new observation, reward, done flag and status for debugging
return new_obs.astype(np.float64), np.float64(reward), done, self.status
except KeyboardInterrupt:
self.status["OOD"] += 1
# return self.get_observation(True).astype(np.float64), np.float64(0), False, self.status
raise KeyboardInterrupt
except Exception as e:
Logger.error(BacktestEnvironment.step, self.parse_error(e))
if hasattr(self, 'email'):
self.send_email("TradingEnvironment Error: %s at %s" % (e,
datetime.strftime(self.timestamp, "%Y-%m-%d %H:%M:%S")),
self.parse_error(e))
print("step action:", action)
raise e
class TrainingEnvironment(BacktestEnvironment):
def __init__(self, period, obs_steps, tapi, fiat, name):
super(TrainingEnvironment, self).__init__(period, obs_steps, tapi, fiat, name)
@property
def timestamp(self):
return datetime.fromtimestamp(self.data.index[self.index]).astimezone(timezone.utc)
def get_history(self, start=None, end=None, portfolio_vector=False):
while True:
try:
obs_list = []
keys = []
# Make desired index
end = self.timestamp
start = end - timedelta(minutes=self.period * self.obs_steps)
index = pd.date_range(start=start,
end=end,
freq="%dT" % self.period).ceil("%dT" % self.period)[-self.obs_steps:]
# Get portfolio observation
port_vec = self.get_sampled_portfolio(index)
if port_vec.shape[0] == 0:
port_vec = self.get_sampled_portfolio().iloc[-1:]
port_vec.index = [index[0]]
# Get pairs history
for pair in self.pairs:
keys.append(pair)
history = self.get_ohlc(pair, index)
history = pd.concat([history, port_vec[pair.split('_')[1]]], axis=1)
obs_list.append(history)
# Get fiat history
keys.append(self._fiat)
obs_list.append(port_vec[self._fiat])
# Concatenate dataframes
obs = pd.concat(obs_list, keys=keys, axis=1)
# Fill missing portfolio observations
cols_to_bfill = [col for col in zip(self.pairs, self.symbols)] + [(self._fiat, self._fiat)]
obs = obs.fillna(obs[cols_to_bfill].ffill().bfill())
return obs.apply(convert_to.decimal, raw=True)
except Exception as e:
Logger.error(TrainingEnvironment.get_history, self.parse_error(e))
raise e
def get_observation(self, portfolio_vector=False):
"""
Return observation df with prices and asset amounts
:param portfolio_vector: bool: whether to include or not asset amounts
:return: pandas DataFrame:
"""
try:
self.obs_df = self.get_history(portfolio_vector=portfolio_vector)
return self.obs_df
except Exception as e:
Logger.error(TrainingEnvironment.get_observation, self.parse_error(e))
raise e
def setup(self):
# Reset index
self.data_length = self.tapi.data_length
# Get data set
obs_steps = self.obs_steps
self.obs_steps = self.data_length
self.index = self.obs_steps - 1
self.data = super().get_observation().astype('f')
self.obs_steps = obs_steps
self.index = self.obs_steps
# Set spaces
self.set_observation_space()
self.set_action_space()
# Get fee values
for symbol in self.symbols:
self.tax[symbol] =float(self.get_fee(symbol))
# Start balance
self.init_balance = self.get_balance()
# Set flag
self.initialized = True
def reset(self):
# If need setup, do it
if not self.initialized:
self.setup()
# choose new start point
self.index = np.random.random_integers(self.obs_steps, self.data_length - 3)
# Clean data frames
self.obs_df = pd.DataFrame()
self.portfolio_df = pd.DataFrame()
# Reset balance
self.balance = self.init_balance = self.get_balance()
# Get new index
self.index += 1
# Observe environment
obs = self.get_observation(True)
# Reset portfolio value
self.portval = {'portval': self.calc_total_portval(self.obs_df.index[-1]),
'timestamp': self.portfolio_df.index[-1]}
# Init state
self.action_df = pd.DataFrame([list(self.calc_portfolio_vector()) + [False]],
columns=self.symbols + ['online'],
index=[self.portfolio_df.index[0]])
# Return first observation
return obs.astype('f')
def simulate_trade(self, action, timestamp):
raise NotImplementedError('HERE NOW')
def step(self, action):
try:
# Get step timestamp
timestamp = self.timestamp
# Save portval for reward calculation
previous_portval = self.calc_total_portval()
# Simulate portifolio rebalance
self.simulate_trade(action, timestamp)
# Check for end condition
if self.index >= self.data_length - 2:
done = True
self.status["OOD"] += 1
else:
done = False
# Get new index
self.index += 1
# Get new observation
new_obs = self.get_observation(True)
# Get reward for action took
reward = self.get_reward(previous_portval)
# Return new observation, reward, done flag and status for debugging
return new_obs.astype(np.float64), np.float64(reward), done, self.status
except KeyboardInterrupt:
self.status["OOD"] += 1
# return self.get_observation(True).astype(np.float64), np.float64(0), False, self.status
raise KeyboardInterrupt
except Exception as e:
Logger.error(TrainingEnvironment.step, self.parse_error(e))
if hasattr(self, 'email'):
self.send_email("TradingEnvironment Error: %s at %s" % (e,
datetime.strftime(self.timestamp, "%Y-%m-%d %H:%M:%S")),
self.parse_error(e))
print("step action:", action)
raise e
class PaperTradingEnvironment(TradingEnvironment):
"""
Paper trading environment for financial strategies forward testing
"""
def __init__(self, period, obs_steps, tapi, fiat, name):
# assert isinstance(tapi, PaperTradingDataFeed) or isinstance(tapi, DataFeed), "Paper trade tapi must be a instance of PaperTradingDataFeed."
super().__init__(period, obs_steps, tapi, fiat, name)
def setup(self):
# Set spaces
self.set_observation_space()
self.set_action_space()
# Get fee values
for symbol in self.symbols:
self.tax[symbol] = convert_to.decimal(self.get_fee(symbol))
# Start balance
# self.init_balance = self.get_balance()
# Set flag
self.initialized = True
def reset(self):
self.obs_df = pd.DataFrame()
self.portfolio_df = pd.DataFrame()
# self.set_observation_space()
# self.set_action_space()
self.balance = self.init_balance = self.get_balance()
# for symbol in self.symbols:
# self.tax[symbol] = convert_to.decimal(self.get_fee(symbol))
obs = self.get_observation(True)
self.action_df = pd.DataFrame([list(self.calc_portfolio_vector()) + [False]],
columns=list(self.symbols) + ['online'],
index=[self.timestamp])
self.portval = {'portval': self.calc_total_portval(),
'timestamp': self.portfolio_df.index[-1]}
return obs.astype(np.float64)
def step(self, action):
# Get step timestamp
timestamp = self.timestamp
# Log desired action
self.log_action_vector(timestamp, action, False)
# Save portval for reward calculation
previous_portval = self.calc_total_portval()
# Simulate portifolio rebalance
done = self.simulate_trade(action, timestamp)
# Wait for next bar open
try:
sleep(datetime.timestamp(floor_datetime(timestamp, self.period) + timedelta(minutes=self.period)) -
datetime.timestamp(self.timestamp))
except ValueError:
pass
# Observe environment
new_obs = self.get_observation(True).astype(np.float64)
# Get reward for previous action
reward = self.get_reward(previous_portval)
# Return new observation, reward, done flag and status for debugging
return new_obs, np.float64(reward), done, self.status
class LiveTradingEnvironment(TradingEnvironment):
"""
Live trading environment for financial strategies execution
** USE AT YOUR OWN RISK**
"""
def __init__(self, period, obs_steps, tapi, fiat, name):
assert isinstance(tapi, ExchangeConnection), "tapi must be an ExchangeConnection instance."
super().__init__(period, obs_steps, tapi, fiat, name)
# Data feed methods
def get_balance_array(self):
"""
Return ordered balance array
:return: numpy ndarray:
"""
balance_array = np.empty(len(self.symbols), dtype=Decimal)
balance = self.get_balance()
for i, symbol in enumerate(self.symbols):
balance_array[i] = balance[symbol]
return balance_array
def calc_total_portval(self, ticker=None, timestamp=None):
"""
Calculate total portfolio value given last pair prices
:param timestamp: For compatibility only
:return: Decimal: Total portfolio value in fiat units
"""
portval = dec_zero
balance = self.get_balance()
if not ticker:
ticker = self.tapi.returnTicker()
for pair in self.pairs:
portval = balance[pair.split('_')[1]].fma(convert_to.decimal(ticker[pair]['last']),
portval)
portval = dec_con.add(portval, balance[self._fiat])
return dec_con.create_decimal(portval)
def calc_portfolio_vector(self, ticker=None):
"""
Calculate portfolio position vector
:return:
"""
portfolio = np.empty(len(self.symbols), dtype=np.dtype(Decimal))
portval = self.calc_total_portval(ticker)
if not ticker:
ticker = self.tapi.returnTicker()
balance = self.get_balance()
for i, pair in enumerate(self.pairs):
portfolio[i] = safe_div(dec_con.multiply(balance[pair.split('_')[1]],
convert_to.decimal(ticker[pair]['last'])), portval)
portfolio[-1] = safe_div(balance[self._fiat], portval)
return convert_to.decimal(portfolio)
def calc_desired_balance_array(self, action, ticker=None):
"""
Return asset amounts given action array
:param action: numpy ndarray: action array with norm summing one
:return: numpy ndarray: asset amount array given action
"""
desired_balance = np.empty(len(self.symbols), dtype=np.dtype(Decimal))
portval = fiat = self.calc_total_portval(ticker)
if not ticker:
ticker = self.tapi.returnTicker()
for i, pair in enumerate(self.pairs):
desired_balance[i] = safe_div(dec_con.multiply(portval , action[i]),
dec_con.create_decimal(ticker[pair]['last']))
fiat = dec_con.subtract(fiat, dec_con.multiply(portval, action[i]))
desired_balance[-1] = dec_con.create_decimal(fiat)
return desired_balance
def immediate_sell(self, symbol, amount):
"""
Immediate or cancel sell order
:param symbol: str: Pair name
:param amount: str: Asset amount to sell
:return: bool: if executed: True, else False
"""
try:
pair = self._fiat + '_' + symbol
amount = str(amount)
while True:
try:
price = self.tapi.returnTicker()[pair]['highestBid']
Logger.debug(LiveTradingEnvironment.immediate_sell,
"Selling %s %s at %s" % (pair, amount, price))
response = self.tapi.sell(pair, price, amount, orderType="immediateOrCancel")
Logger.debug(LiveTradingEnvironment.immediate_sell,
"Response: %s" % str(response))
if 'amountUnfilled' in response:
if response['amountUnfilled'] == '0.00000000':
return True
else:
amount = response['amountUnfilled']
if 'Total must be at least' in response:
return True
elif 'Amount must be at least' in response:
return True
elif 'Not enough %s.' % symbol == response:
amount = self.get_balance()[symbol]
if dec_con.create_decimal(amount) < dec_con.create_decimal('1E-8'):
return True
elif 'Order execution timed out.' == response:
amount = self.get_balance()[symbol]
except ExchangeError as error:
Logger.error(LiveTradingEnvironment.immediate_sell, self.parse_error(error))
if 'Total must be at least' in error.__str__():
return True
elif 'Amount must be at least' in error.__str__():
return True
elif 'Not enough %s.' % symbol == error.__str__():
amount = self.get_balance()[symbol]
if dec_con.create_decimal(amount) < dec_con.create_decimal('1E-8'):
return True
elif 'Order execution timed out.' == error.__str__():
amount = self.get_balance()[symbol]
else:
raise error
except MaxRetriesException as error:
Logger.error(LiveTradingEnvironment.immediate_sell, self.parse_error(error))
if hasattr(self, 'email'):
self.send_email("Failed to sell %s at %s" % (symbol,
datetime.strftime(self.timestamp,
"%Y-%m-%d %H:%M:%S")),
self.parse_error(error))
raise error
except Exception as error:
try:
Logger.error(LiveTradingEnvironment.immediate_sell,
self.parse_error(error, price, amount, response))
except Exception:
Logger.error(LiveTradingEnvironment.immediate_sell,
self.parse_error(error))
if hasattr(self, 'email'):
self.send_email("LiveTradingEnvironment Error: %s at %s" % (error,
datetime.strftime(self.timestamp,
"%Y-%m-%d %H:%M:%S")),
self.parse_error(error))
raise e
def immediate_buy(self, symbol, amount):
"""
Immediate or cancel buy order
:param symbol: str: Pair name
:param amount: str: Asset amount to buy
:return: bool: if executed: True, else False
"""
try:
pair = self._fiat + '_' + symbol
amount = str(amount)
while True:
try:
price = self.tapi.returnTicker()[pair]['lowestAsk']
Logger.debug(LiveTradingEnvironment.immediate_buy,
"Buying %s %s at %s" % (pair, amount, price))
response = self.tapi.buy(pair, price, amount, orderType="immediateOrCancel")
Logger.debug(LiveTradingEnvironment.immediate_buy,
"Response: %s" % str(response))
if 'amountUnfilled' in response:
if response['amountUnfilled'] == '0.00000000':
return True
else:
amount = response['amountUnfilled']
if 'Total must be at least' in response:
return True
elif 'Amount must be at least' in response:
return True
elif 'Not enough %s.' % self._fiat == response:
self.status['NotEnoughFiat'] += 1
price = convert_to.decimal(self.tapi.returnTicker()[pair]['lowestAsk'])
fiat_units = self.get_balance()[self._fiat]
amount = str(safe_div(fiat_units, price).quantize(dec_eps))
elif 'Order execution timed out.' == response:
amount = self.get_balance()[symbol]
except ExchangeError as error:
Logger.error(LiveTradingEnvironment.immediate_buy,
self.parse_error(error))
if 'Total must be at least' in error.__str__():
return True
elif 'Amount must be at least' in error.__str__():
return True
elif 'Not enough %s.' % self._fiat == error.__str__():
if not self.status['NotEnoughFiat']:
self.status['NotEnoughFiat'] += 1
price = convert_to.decimal(self.tapi.returnTicker()[pair]['lowestAsk'])
fiat_units = self.get_balance()[self._fiat]
amount = str(safe_div(fiat_units, price))
else:
self.status['NotEnoughFiat'] += 1
return True
elif 'Order execution timed out.' == error.__str__():
amount = self.get_balance()[symbol]
else:
raise error
except MaxRetriesException as error:
Logger.error(LiveTradingEnvironment.immediate_buy, self.parse_error(error))
if hasattr(self, 'email'):
self.send_email("Failed to buy %s at %s" % (symbol,
datetime.strftime(self.timestamp,
"%Y-%m-%d %H:%M:%S")),
self.parse_error(error))
raise error
except Exception as error:
try:
Logger.error(LiveTradingEnvironment.immediate_buy, self.parse_error(error, price, amount, response))
except Exception:
Logger.error(LiveTradingEnvironment.immediate_buy, self.parse_error(error))
if hasattr(self, 'email'):
self.send_email("LiveTradingEnvironment Error: %s at %s" % (error,
datetime.strftime(self.timestamp,
"%Y-%m-%d %H:%M:%S")),
self.parse_error(error))
raise error
# Online Trading methods
def rebalance_sell(self, balance_change, order_type="immediate"):
"""
Execute rebalance sell orders sequentially
:param balance_change: numpy array: Balance change
:param order_type: str: Order type to use
:return: bool: True if executed successfully
"""
done = True
for i, change in enumerate(balance_change):
if change < dec_zero:
# Reset flag
resp = False
# Get symbol
symbol = self.symbols[i]
# While order is not completed, try to sell
while not resp:
while not resp:
try:
resp = self.immediate_sell(symbol, abs(change.quantize(dec_qua)))
except Exception as e:
Logger.error(LiveTradingEnvironment.rebalance_buy,
self.parse_error(e))
break
# Update flag
if not resp:
done = False
return done
def rebalance_buy(self, balance_change, order_type="immediate"):
"""
Execute rebalance buy orders sequentially
:param balance_change: numpy array: Balance change
:param order_type: str: Order type to use
:return: bool: True if executed successfully
"""
done = True
for i, change in enumerate(balance_change):
if change > dec_zero:
# Reset flag
resp = False
# Get symbol
symbol = self.symbols[i]
# While order is not completed, try to buy
while not resp:
try:
resp = self.immediate_buy(symbol, abs(change.quantize(dec_qua)))
except Exception as e:
Logger.error(LiveTradingEnvironment.rebalance_buy,
self.parse_error(e))
break
# Update flag
if not resp:
done = False
return done
def online_rebalance(self, action, timestamp):
"""
Performs online portfolio rebalance within ExchangeConnection
:param action: numpy array: action vector with desired portfolio weights. Norm must be one.
:return: bool: True if fully executed; False otherwise.
"""
try:
done = False
self.status['NotEnoughFiat'] = False
# First, assert action is valid
action = self.assert_action(action)
# Calculate position change given last portftolio and action vector
ticker = self.tapi.returnTicker()
balance_change = dec_vec_sub(self.calc_desired_balance_array(action, ticker), self.get_balance_array())[:-1]
# Sell assets first
resp_1 = self.rebalance_sell(balance_change)
# Then, buy what you want
resp_2 = self.rebalance_buy(balance_change)
# If everything went well, return True
if resp_1 and resp_2:
done = True
# Get new ticker
ticker = self.tapi.returnTicker()
# Log executed action and final balance
self.log_action_vector(self.timestamp, self.calc_portfolio_vector(ticker), done)
# Update portfolio_df
final_balance = self.get_balance()
final_balance['timestamp'] = timestamp
self.balance = final_balance
# Calculate new portval
self.portval = {'portval': self.calc_total_portval(ticker),
'timestamp': self.portfolio_df.index[-1]}
return done
except Exception as e:
# Log error for debug
try:
Logger.error(LiveTradingEnvironment.online_rebalance,
self.parse_error(e, action, ticker, balance_change))
except Exception:
Logger.error(LiveTradingEnvironment.online_rebalance,
self.parse_error(e))
# Wake up nerds for the rescue
if hasattr(self, 'email'):
self.send_email("Online Rebalance Error: %s at %s" % (e,
datetime.strftime(self.timestamp, "%Y-%m-%d %H:%M:%S")),
self.parse_error(e))
raise e
# Env methods
def setup(self):
# Set spaces
self.set_observation_space()
self.set_action_space()
# Get fee values
for symbol in self.symbols:
self.tax[symbol] = convert_to.decimal(self.get_fee(symbol))
# Start balance
# self.init_balance = self.get_balance()
# Set flag
self.initialized = True
def reset(self):
self.obs_df = pd.DataFrame()
self.portfolio_df = pd.DataFrame()
# self.set_observation_space()
# self.set_action_space()
self.balance = self.init_balance = self.get_balance()
# for symbol in self.symbols:
# self.tax[symbol] = convert_to.decimal(self.get_fee(symbol))
obs = self.get_observation(True)
self.action_df = pd.DataFrame([list(self.calc_portfolio_vector()) + [False]],
columns=list(self.symbols) + ['online'],
index=[self.timestamp])
self.portval = {'portval': self.calc_total_portval(),
'timestamp': self.portfolio_df.index[-1]}
return obs.astype(np.float64)
def step(self, action):
# Get step timestamp
timestamp = self.timestamp
# Log desired action
self.log_action_vector(timestamp, action, False)
# Save portval for reward calculation
previous_portval = self.calc_total_portval()
# Simulate portifolio rebalance
done = self.online_rebalance(action, timestamp)
# Wait for next bar open
try:
sleep(datetime.timestamp(floor_datetime(timestamp, self.period) + timedelta(minutes=self.period)) -
datetime.timestamp(self.timestamp))
except ValueError:
pass
# Observe environment
new_obs = self.get_observation(True).astype(np.float64)
# Get reward for previous action
reward = self.get_reward(previous_portval)
# Return new observation, reward, done flag and status for debugging
return new_obs, np.float64(reward), done, self.status
|
{
"content_hash": "8d2d0b37f6a57461e098b4253fd4b636",
"timestamp": "",
"source": "github",
"line_count": 2391,
"max_line_length": 163,
"avg_line_length": 39.624843161856965,
"alnum_prop": 0.5191623655573498,
"repo_name": "naripok/cryptotrader",
"id": "cc1b7dd3a27949622f40671183c99abc4f8bd4e8",
"size": "94743",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cryptotrader/envs/trading.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "42269"
},
{
"name": "Python",
"bytes": "679007"
}
],
"symlink_target": ""
}
|
''' A donut chart populated with browser market share percentages. This example
demonstrates the low-level |bokeh.models| API.
.. bokeh-example-metadata::
:apis: bokeh.models.AnnularWedge, bokeh.models.Legend
:refs: :ref:`ug_topics_pie`
:keywords: pandas, donut, wedge
'''
from math import pi
from bokeh.io import show
from bokeh.models import (AnnularWedge, ColumnDataSource,
Legend, LegendItem, Plot, Range1d)
from bokeh.sampledata.browsers import browsers_nov_2013 as df
xdr = Range1d(start=-2, end=2)
ydr = Range1d(start=-2, end=2)
plot = Plot(x_range=xdr, y_range=ydr)
plot.title.text = "Web browser market share (November 2013)"
plot.toolbar_location = None
colors = {
"Chrome": "seagreen",
"Firefox": "tomato",
"Safari": "orchid",
"Opera": "firebrick",
"IE": "skyblue",
"Other": "lightgray"
}
aggregated = df.groupby("Browser").sum(numeric_only=True)
selected = aggregated[aggregated.Share >= 1].copy()
selected.loc["Other"] = aggregated[aggregated.Share < 1].sum()
browsers = selected.index.tolist()
angles = selected.Share.map(lambda x: 2*pi*(x/100)).cumsum().tolist()
browsers_source = ColumnDataSource(dict(
start = [0] + angles[:-1],
end = angles,
colors = [colors[browser] for browser in browsers],
))
glyph = AnnularWedge(x=0, y=0, inner_radius=0.9, outer_radius=1.8,
start_angle="start", end_angle="end",
line_color="white", line_width=3, fill_color="colors")
r= plot.add_glyph(browsers_source, glyph)
legend = Legend(location="center")
for i, name in enumerate(colors):
legend.items.append(LegendItem(label=name, renderers=[r], index=i))
plot.add_layout(legend, "center")
show(plot)
|
{
"content_hash": "f9c78bbc5f646b7aa89862d8ef6ca82e",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 79,
"avg_line_length": 30.982142857142858,
"alnum_prop": 0.6720461095100865,
"repo_name": "bokeh/bokeh",
"id": "9e0d58889e4687589ed9ad2d32fb6920da7deebd",
"size": "1735",
"binary": false,
"copies": "1",
"ref": "refs/heads/branch-3.1",
"path": "examples/topics/pie/donut.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1884"
},
{
"name": "Dockerfile",
"bytes": "1924"
},
{
"name": "GLSL",
"bytes": "44696"
},
{
"name": "HTML",
"bytes": "53475"
},
{
"name": "JavaScript",
"bytes": "20301"
},
{
"name": "Less",
"bytes": "46376"
},
{
"name": "Python",
"bytes": "4475226"
},
{
"name": "Shell",
"bytes": "7673"
},
{
"name": "TypeScript",
"bytes": "3652153"
}
],
"symlink_target": ""
}
|
import pytest
from async_generator import async_generator, yield_
from geopy.exc import ConfigurationError, GeocoderQueryError
from geopy.geocoders import IGNFrance
from test.geocoders.util import BaseTestGeocoder, env
from test.proxy_server import ProxyServerThread
class TestUnitIGNFrance:
def test_user_agent_custom(self):
geocoder = IGNFrance(
api_key='DUMMYKEY1234',
username='MUSTERMANN',
password='tops3cr3t',
user_agent='my_user_agent/1.0'
)
assert geocoder.headers['User-Agent'] == 'my_user_agent/1.0'
def test_invalid_auth_1(self):
with pytest.raises(ConfigurationError):
IGNFrance(api_key="a")
def test_invalid_auth_2(self):
with pytest.raises(ConfigurationError):
IGNFrance(api_key="a", username="b", referer="c")
def test_invalid_auth_3(self):
with pytest.raises(ConfigurationError):
IGNFrance(api_key="a", username="b")
class BaseTestIGNFrance(BaseTestGeocoder):
async def test_invalid_query_type(self):
with pytest.raises(GeocoderQueryError):
self.geocoder.geocode("44109000EX0114", query_type="invalid")
async def test_invalid_query_parcel(self):
with pytest.raises(GeocoderQueryError):
self.geocoder.geocode(
"incorrect length string",
query_type="CadastralParcel",
)
async def test_geocode(self):
await self.geocode_run(
{"query": "44109000EX0114",
"query_type": "CadastralParcel"},
{"latitude": 47.222482, "longitude": -1.556303},
)
async def test_geocode_no_result(self):
await self.geocode_run(
{"query": 'asdfasdfasdf'},
{},
expect_failure=True,
)
async def test_reverse_no_result(self):
await self.reverse_run(
# North Atlantic Ocean
{"query": (35.173809, -37.485351)},
{},
expect_failure=True
)
async def test_geocode_with_address(self):
await self.geocode_run(
{"query": "Camp des Landes, 41200 VILLEFRANCHE-SUR-CHER",
"query_type": "StreetAddress"},
{"latitude": 47.293048,
"longitude": 1.718985,
"address": "le camp des landes, 41200 Villefranche-sur-Cher"},
)
async def test_geocode_freeform(self):
await self.geocode_run(
{"query": "8 rue Général Buat, Nantes",
"query_type": "StreetAddress",
"is_freeform": True},
{"address": "8 r general buat , 44000 Nantes"},
)
async def test_geocode_position_of_interest(self):
res = await self.geocode_run(
{"query": "Chambéry",
"query_type": "PositionOfInterest",
"exactly_one": False},
{},
)
addresses = [location.address for location in res]
assert "02000 Chambry" in addresses
assert "16420 Saint-Christophe" in addresses
async def test_geocode_filter_by_attribute(self):
res = await self.geocode_run(
{"query": "Les Molettes",
"query_type": "PositionOfInterest",
"maximum_responses": 10,
"filtering": '<Place type="Departement">38</Place>',
"exactly_one": False},
{},
)
departements = [location.raw['departement'] for location in res]
unique = list(set(departements))
assert len(unique) == 1
assert unique[0] == "38"
async def test_geocode_filter_by_envelope(self):
lat_min, lng_min, lat_max, lng_max = 45.00, 5, 46, 6.40
spatial_filtering_envelope = """
<gml:envelope>
<gml:pos>{lat_min} {lng_min}</gml:pos>
<gml:pos>{lat_max} {lng_max}</gml:pos>
</gml:envelope>
""".format(
lat_min=lat_min,
lng_min=lng_min,
lat_max=lat_max,
lng_max=lng_max
)
res_spatial_filter = await self.geocode_run(
{"query": 'Les Molettes',
"query_type": 'PositionOfInterest',
"maximum_responses": 10,
"filtering": spatial_filtering_envelope,
"exactly_one": False},
{},
)
departements_spatial = list(
{i.raw['departement'] for i in res_spatial_filter}
)
res_no_spatial_filter = await self.geocode_run(
{"query": 'Les Molettes',
"query_type": 'PositionOfInterest',
"maximum_responses": 10,
"exactly_one": False},
{},
)
departements_no_spatial = list(
set([
i.raw['departement']
for i in res_no_spatial_filter
])
)
assert len(departements_no_spatial) > len(departements_spatial)
async def test_reverse(self):
res = await self.reverse_run(
{"query": '47.229554,-1.541519'},
{},
)
assert res.address == '7 av camille guerin, 44000 Nantes'
async def test_reverse_invalid_preference(self):
with pytest.raises(GeocoderQueryError):
self.geocoder.reverse(
query='47.229554,-1.541519',
reverse_geocode_preference=['a'] # invalid
)
async def test_reverse_preference(self):
res = await self.reverse_run(
{"query": '47.229554,-1.541519',
"exactly_one": False,
"reverse_geocode_preference": ['StreetAddress', 'PositionOfInterest']},
{},
)
addresses = [location.address for location in res]
assert "3 av camille guerin, 44000 Nantes" in addresses
assert "5 av camille guerin, 44000 Nantes" in addresses
async def test_reverse_by_radius(self):
spatial_filtering_radius = """
<gml:CircleByCenterPoint>
<gml:pos>{coord}</gml:pos>
<gml:radius>{radius}</gml:radius>
</gml:CircleByCenterPoint>
""".format(coord='48.8033333 2.3241667', radius='50')
res_call_radius = await self.reverse_run(
{"query": '48.8033333,2.3241667',
"exactly_one": False,
"maximum_responses": 10,
"filtering": spatial_filtering_radius},
{},
)
res_call = await self.reverse_run(
{"query": '48.8033333,2.3241667',
"exactly_one": False,
"maximum_responses": 10},
{},
)
coordinates_couples_radius = set([
(str(location.latitude) + ' ' + str(location.longitude))
for location in res_call_radius
])
coordinates_couples = set([
(str(location.latitude) + ' ' + str(location.longitude))
for location in res_call
])
assert coordinates_couples_radius.issubset(coordinates_couples)
class TestIGNFranceApiKeyAuth(BaseTestIGNFrance):
@classmethod
def make_geocoder(cls, **kwargs):
return IGNFrance(
api_key=env['IGNFRANCE_KEY'],
referer=env['IGNFRANCE_REFERER'],
timeout=10
)
class TestIGNFranceUsernameAuth(BaseTestIGNFrance):
@classmethod
def make_geocoder(cls, **kwargs):
return IGNFrance(
api_key=env['IGNFRANCE_USERNAME_KEY'],
username=env['IGNFRANCE_USERNAME'],
password=env['IGNFRANCE_PASSWORD'],
timeout=10,
**kwargs
)
class TestIGNFranceUsernameAuthProxy(BaseTestGeocoder):
proxy_timeout = 5
@classmethod
def make_geocoder(cls, **kwargs):
return IGNFrance(
api_key=env['IGNFRANCE_USERNAME_KEY'],
username=env['IGNFRANCE_USERNAME'],
password=env['IGNFRANCE_PASSWORD'],
timeout=10,
**kwargs
)
@pytest.fixture(scope='class', autouse=True)
@async_generator
async def start_proxy(_, request, class_geocoder):
cls = request.cls
cls.proxy_server = ProxyServerThread(timeout=cls.proxy_timeout)
cls.proxy_server.start()
cls.proxy_url = cls.proxy_server.get_proxy_url()
async with cls.inject_geocoder(cls.make_geocoder(proxies=cls.proxy_url)):
await yield_()
cls.proxy_server.stop()
cls.proxy_server.join()
async def test_proxy_is_respected(self):
assert 0 == len(self.proxy_server.requests)
await self.geocode_run(
{"query": "Camp des Landes, 41200 VILLEFRANCHE-SUR-CHER",
"query_type": "StreetAddress"},
{"latitude": 47.293048,
"longitude": 1.718985,
"address": "le camp des landes, 41200 Villefranche-sur-Cher"},
)
assert 1 == len(self.proxy_server.requests)
|
{
"content_hash": "258cdb4f71886996dc513e2fbd5f5aa7",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 84,
"avg_line_length": 32.17689530685921,
"alnum_prop": 0.5619881072590598,
"repo_name": "jmb/geopy",
"id": "ca4cd9d2a716abf66dcdfae47cce75b12a249f56",
"size": "8916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/geocoders/ignfrance.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1334"
},
{
"name": "Python",
"bytes": "477174"
}
],
"symlink_target": ""
}
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0008_board_private'),
]
operations = [
migrations.AlterField(
model_name='pin',
name='referer',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='pin',
name='url',
field=models.CharField(blank=True, max_length=512, null=True),
),
]
|
{
"content_hash": "f168d1d9d1dee532ae5a1703a6b47d13",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 74,
"avg_line_length": 24.857142857142858,
"alnum_prop": 0.553639846743295,
"repo_name": "pinry/pinry",
"id": "c31bbe43853f17d253c2fdc9f5cdb0e754bd6c88",
"size": "572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/migrations/0009_auto_20200825_0800.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "270"
},
{
"name": "HTML",
"bytes": "660"
},
{
"name": "JavaScript",
"bytes": "24244"
},
{
"name": "Makefile",
"bytes": "1340"
},
{
"name": "Python",
"bytes": "79651"
},
{
"name": "SCSS",
"bytes": "2559"
},
{
"name": "Shell",
"bytes": "6050"
},
{
"name": "Vue",
"bytes": "79127"
}
],
"symlink_target": ""
}
|
import re
import os
from fxpt.qt.pyside import QtCore, QtGui
from com import *
from fxpt.fx_refsystem.ref_handle import ATTR_REF_FILENAME
from fxpt.fx_utils.utils_maya import getParent
class NodeInfo(object):
def __init__(self):
self.selectionString = None
self.fullPathName = None
self.shortName = None
class SearchDesc(object):
def __init__(self):
self.searchString = None
self.caseSensitive = None
self.regex = None
self.selectFound = None
self.includeShapes = None
self.searchSelected = None
class SearcherBase(object):
def __init__(self, name):
self.name = name
self.active = False
self.searchDesc = None
self.model = QtGui.QStandardItemModel()
self.initModel()
def search(self, sd):
if not self.getActive():
return
self.initModel()
self.searchDesc = sd
searchData = self.gatherSearchData()
matchedData = self.doSearch(searchData)
modelData = self.prepareModelData(matchedData)
self.fillModel(modelData)
def getName(self):
return self.name
def getModel(self):
return self.model
def setActive(self, state):
self.active = state
def getActive(self):
return self.active
def hasResult(self):
return bool(self.model.rowCount())
def reset(self):
self.model.clear()
def filterShapes(self, nodes):
if self.searchDesc.includeShapes:
return nodes
else:
return [x for x in nodes if not isShape(x)]
def getRegexpObject(self):
searchPattern = self.searchDesc.searchString
if not self.searchDesc.regex:
searchPattern = re.escape(searchPattern)
if searchPattern.startswith('\?'):
searchPattern = '^' + searchPattern
if searchPattern.endswith('\?'):
searchPattern += '$'
searchPattern = re.sub(r'\\\?', r'.', searchPattern)
searchPattern = re.sub(r'\\\*', r'.*', searchPattern)
if self.searchDesc.caseSensitive:
regexp = re.compile(searchPattern)
else:
regexp = re.compile(searchPattern, flags=re.IGNORECASE)
return regexp
def doSearch(self, searchData):
regexp = self.getRegexpObject()
return [item for item in searchData if regexp.search(item[1])]
def initModel(self):
columnNames = self.getColumnNames()
self.model.clear()
self.model.setRowCount(0)
self.model.setColumnCount(len(columnNames))
for i, col in enumerate(columnNames):
self.model.setHeaderData(i, QtCore.Qt.Horizontal, col, QtCore.Qt.DisplayRole)
def fillModel(self, modelData):
if not modelData:
return
itemsNum = len(modelData)
colNum = len(modelData[0]) - 1
self.model.insertRows(0, itemsNum)
for i, modelRowDataItem in enumerate(modelData):
for col in range(colNum):
modelIndex = self.model.index(i, col)
self.model.setData(modelIndex, modelRowDataItem[col])
modelIndex = self.model.index(i, 0)
self.model.setData(modelIndex, modelRowDataItem[-1], QtCore.Qt.UserRole)
def getColumnNames(self):
raise NotImplementedError("Call to abstract method.")
def gatherSearchData(self):
raise NotImplementedError("Call to abstract method.")
def prepareModelData(self, matchedData):
raise NotImplementedError("Call to abstract method.")
class SearcherSimpleBase(SearcherBase):
def __init__(self, name):
super(SearcherSimpleBase, self).__init__(name)
def getColumnNames(self):
return [
'Name',
'Type',
'Path'
]
def getTargetNodes(self):
raise NotImplementedError("Call to abstract method.")
def gatherSearchData(self):
# return [(fullPathName, searchField), (fullPathName, searchField), ...]
allNodes = self.getTargetNodes()
searchData = [(x, shortNameOf(x)) for x in self.filterShapes(allNodes)]
return searchData
def prepareModelData(self, matchedData):
# input [(fullPathName, searchField), ....]
# return [(shortName, type, path, nodeInfo), ...]
modelData = []
for matchedItem in matchedData:
fullPathName = matchedItem[0]
shortName = matchedItem[1]
path = pathOf(fullPathName)
typ = getNodeTypeString(fullPathName)
ni = NodeInfo()
ni.selectionString = [fullPathName]
ni.fullPathName = fullPathName
ni.shortName = shortName
modelData.append((shortName, typ, path, ni))
return modelData
class SearcherNodes(SearcherSimpleBase):
def __init__(self, name):
super(SearcherNodes, self).__init__(name)
def getTargetNodes(self):
if self.searchDesc.searchSelected:
allNodes = m.ls(sl=True, l=True)
else:
allNodes = m.ls(l=True)
return allNodes
class SearcherDagNodes(SearcherSimpleBase):
def __init__(self, name):
super(SearcherDagNodes, self).__init__(name)
def getTargetNodes(self):
if self.searchDesc.searchSelected:
allNodes = m.ls(sl=True, l=True, dag=True)
else:
allNodes = m.ls(l=True, dag=True)
return allNodes
class SearcherTransforms(SearcherSimpleBase):
def __init__(self, name):
super(SearcherTransforms, self).__init__(name)
def getTargetNodes(self):
if self.searchDesc.searchSelected:
allNodes = m.ls(sl=True, l=True, transforms=True)
else:
allNodes = m.ls(l=True, transforms=True)
return allNodes
class SearcherType(SearcherBase):
def __init__(self, name):
super(SearcherType, self).__init__(name)
def getColumnNames(self):
return [
'Node Name',
'Node Type',
'Path'
]
def gatherSearchData(self):
# return [(fullPathName, searchField), (fullPathName, searchField), ...]
if self.searchDesc.searchSelected:
nodes = m.ls(sl=True, l=True)
else:
nodes = m.ls(l=True)
return [(x, typeOf(x)) for x in nodes]
def prepareModelData(self, matchedData):
# input [(fullPathName, searchField), ....]
# return [(shortName, type, path, nodeInfo), ...]
modelData = []
for matchedItem in matchedData:
fullPathName = matchedItem[0]
shortName = shortNameOf(fullPathName)
path = pathOf(fullPathName)
typ = getNodeTypeString(fullPathName)
ni = NodeInfo()
ni.selectionString = [fullPathName]
ni.fullPathName = fullPathName
ni.shortName = shortName
modelData.append((shortName, typ, path, ni))
return modelData
class SearcherTexturesBase(SearcherBase):
def __init__(self, name):
super(SearcherTexturesBase, self).__init__(name)
def getColumnNames(self):
return [
'Name',
'Texture',
'Path'
]
def gatherSearchData(self):
# return [(fullPathName, searchField), (fullPathName, searchField), ...]
if self.searchDesc.searchSelected:
fileNodes = m.ls(sl=True, l=True, typ='file')
else:
fileNodes = m.ls(l=True, typ='file')
searchData = []
for f in fileNodes:
filename = m.getAttr(f + '.fileTextureName')
sFilename = os.path.basename(filename)
searchData.append((f, sFilename))
return searchData
class SearcherTextures(SearcherTexturesBase):
def __init__(self, name):
super(SearcherTextures, self).__init__(name)
def prepareModelData(self, matchedData):
# input [(fullPathName, searchField), ....]
# return [(fileNodeName, textureShortName, fileNodeTexturePath, nodeInfo), ...]
modelData = []
for matchedItem in matchedData:
fullPathName = matchedItem[0]
shortName = shortNameOf(fullPathName)
fileNodeTexturePath = m.getAttr(fullPathName + '.fileTextureName')
textureShortName = os.path.basename(fileNodeTexturePath)
ni = NodeInfo()
ni.selectionString = [fullPathName]
ni.fullPathName = fullPathName
ni.shortName = shortName
modelData.append((shortName, textureShortName, fileNodeTexturePath, ni))
return modelData
class SearcherTexturedBy(SearcherTexturesBase):
def __init__(self, name):
super(SearcherTexturesBase, self).__init__(name)
self.visitedNodes = set()
def prepareModelData(self, matchedData):
# input [(fullPathName, searchField), ....]
# return [(fileNodeName, textureShortName, fileNodeTexturePath, nodeInfo), ...]
modelData = []
for matchedItem in matchedData:
fullPathName = matchedItem[0]
shortName = shortNameOf(fullPathName)
fileNodeTexturePath = m.getAttr(fullPathName + '.fileTextureName')
textureShortName = os.path.basename(fileNodeTexturePath)
shadingGroups = self.getShadingGroups(fullPathName)
selectionStrings = []
for sg in shadingGroups:
selectionStrings.extend(m.sets(sg, q=True))
ni = NodeInfo()
ni.selectionString = selectionStrings
ni.fullPathName = fullPathName
ni.shortName = shortName
modelData.append((shortName, textureShortName, fileNodeTexturePath, ni))
return modelData
def getShadingGroups(self, fileNode):
self.visitedNodes.clear()
self.visitedNodes.add(fileNode)
res = []
destinations = m.listConnections(fileNode, s=False, d=True)
if destinations:
for d in destinations:
if d in self.visitedNodes:
continue
if typeOf(d) == 'shadingEngine':
res.append(d)
continue
res.extend(self.getShadingGroups(d))
return res
class SearcherFxRefs(SearcherBase):
def __init__(self, name):
super(SearcherFxRefs, self).__init__(name)
# noinspection PyMethodMayBeStatic
def getRefFilename(self, refLoc):
return m.getAttr('{}.{}'.format(refLoc, ATTR_REF_FILENAME))
# noinspection PyMethodMayBeStatic
def isRefLocator(self, refLoc):
return m.objExists('{}.{}'.format(refLoc, ATTR_REF_FILENAME))
def getColumnNames(self):
return [
'Exists',
'Name',
'Filename',
'Path'
]
def gatherSearchData(self):
# return [(fullPathName, searchField), (fullPathName, searchField), ...]
if self.searchDesc.searchSelected:
locators = m.ls(sl=True, l=True, dag=True, ap=True, typ='locator')
else:
locators = m.ls(l=True, typ='locator')
res = []
for loc in locators:
if self.isRefLocator(loc):
filename = self.getRefFilename(loc)
shortName = os.path.splitext(os.path.basename(filename))[0]
res.append((loc, shortName))
return res
def prepareModelData(self, matchedData):
# input [(fullPathName, searchField), ....]
# return [(exists, shortName, filename, mayaPath, nodeInfo), ...]
modelData = []
for fullPathName, shortName in matchedData:
filename = self.getRefFilename(fullPathName)
exists = 'Yes' if os.path.exists(os.path.expandvars(filename)) else 'No'
mayaPath = getParent(fullPathName)
ni = NodeInfo()
ni.selectionString = [mayaPath]
ni.fullPathName = mayaPath
ni.shortName = shortName
modelData.append((exists, shortName, filename, mayaPath, ni))
# print modelData
return modelData
|
{
"content_hash": "3785164e92cc83f8775b2bc4c4d5e19a",
"timestamp": "",
"source": "github",
"line_count": 417,
"max_line_length": 89,
"avg_line_length": 29.28537170263789,
"alnum_prop": 0.5968719292499182,
"repo_name": "theetcher/fxpt",
"id": "fff0762428ad9466b63e16fa3012a4bcc770553c",
"size": "12212",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fxpt/fx_search/searchers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3470"
},
{
"name": "CSS",
"bytes": "5256"
},
{
"name": "HTML",
"bytes": "7180"
},
{
"name": "JavaScript",
"bytes": "15796"
},
{
"name": "Python",
"bytes": "1509988"
}
],
"symlink_target": ""
}
|
import numpy, code
from .utility_functions import expand_indexes, index_to_zyx, \
find_halo, calc_distance
from .load_config import c
nx = c.nx
ny = c.ny
nz = c.nz
def calc_com(mask):
pts = index_to_zyx(mask)
z = pts[0][:].astype(float).mean()
# Correct Center of Mass for reentrant domain
y1 = pts[1][:].astype(float)
x1 = pts[2][:].astype(float)
y2 = (y1 < ny/2.)*y1 + (y1>= ny/2.)*(y1 - ny)
x2 = (x1 < nx/2.)*x1 + (x1>= nx/2.)*(x1 - nx)
y1m = y1.mean()
y2m = y2.mean()
x1m = x1.mean()
x2m = x2.mean()
if numpy.var(y2 - y2m) > numpy.var(y1 - y1m):
y = y1m
else:
y = (y2m + .5)%ny - .5
if numpy.var(y2 - y2m) > numpy.var(y1 - y1m):
y = y1m
else:
y = (y2m + .5)%ny - .5
if numpy.var(x2 - x2m) > numpy.var(x1 - x1m):
x = x1m
else:
x = (x2m + .5)%nx - .5
return numpy.array(([z], [y], [x]))
#----------------------------
#def calc_distance(point1, point2):
# # Calculate distances corrected for reentrant domain
# point1 = numpy.atleast_2d(point1)
# point2 = numpy.atleast_2d(point2)
#
# delta_x = numpy.abs(point2[2, :] - point1[2, :])
# mask = delta_x >= (nx/2)
# delta_x[mask] = nx - delta_x[mask]
#
# delta_y = numpy.abs(point2[1, :] - point1[1, :])
# mask = delta_y >= (ny/2)
# delta_y[mask] = ny - delta_y[mask]
#
# delta_z = point2[0, :] - point1[0, :]
#
# return numpy.sqrt(delta_x**2 + delta_y**2 + delta_z**2)
#----------------------------
class Cloudlet:
def __init__(self, id, time, cloudlet_dict):
self.id = id
self.time = time
self.masks = {}
for item in ('core', 'condensed', 'plume'):
self.masks[item] = cloudlet_dict[item]
self.adjacent = {'core': [],
'condensed': [],
'plume': []}
self.overlap = {'condensed->condensed': [],
'condensed->plume': [],
'plume->condensed': [],
'plume->plume': []}
self.u = {'condensed': cloudlet_dict['u_condensed'],
'plume': cloudlet_dict['u_plume']}
self.v = {'condensed': cloudlet_dict['v_condensed'],
'plume': cloudlet_dict['v_plume']}
self.w = {'condensed': cloudlet_dict['w_condensed'],
'plume': cloudlet_dict['w_plume']}
self.cluster = None
if self.has_core():
self.volume = len(self.core_mask())
elif self.has_condensed():
self.volume = len(self.condensed_mask())
else:
self.volume = len(self.plume_mask())
def has_core(self):
return len(self.masks['core']) > 0
def core_mask(self):
return self.masks['core']
def has_condensed(self):
return len(self.masks['condensed']) > 0
def condensed_mask(self):
return self.masks['condensed']
def condensed_halo(self):
return find_halo(self.condensed_mask())
def plume_mask(self):
return self.masks['plume']
def plume_halo(self):
return find_halo(self.plume_mask())
#----------------------------
class Cluster:
def __init__(self, cluster_id, initial_cloudlets):
self.id = cluster_id
self.cloudlets = set()
self.past_connections = set()
self.split_connections = set()
self.merge_connections = set()
self.add_cloudlets(initial_cloudlets)
self.events = []
def add_cloudlet(self, cloudlet):
if not cloudlet.cluster:
cloudlet.cluster = self
self.cloudlets.add(cloudlet)
else:
raise "Cloudlet already belongs to a cluster!"
def add_cloudlets(self, cloudlets):
for cloudlet in cloudlets:
self.add_cloudlet( cloudlet )
def remove_cloudlets(self, cloudlets):
for cloudlet in cloudlets:
if cloudlet.cluster:
cloudlet.cluster = None
self.cloudlets.remove( cloudlet )
else:
raise "Cloudlet does not belong to cluster!"
def has_core(self):
for cloudlet in self.cloudlets:
if cloudlet.has_core(): return True
return False
def core_mask(self):
clist = []
for cloudlet in self.cloudlets:
clist.append(cloudlet.masks['core'])
return numpy.hstack(clist)
def has_condensed(self):
for cloudlet in self.cloudlets:
if cloudlet.has_condensed(): return True
return False
def condensed_mask(self):
clist = []
for cloudlet in self.cloudlets:
clist.append(cloudlet.masks['condensed'])
return numpy.hstack(clist)
def plume_mask(self):
clist = []
for cloudlet in self.cloudlets:
clist.append(cloudlet.masks['plume'])
return numpy.hstack(clist)
def condensed_halo(self):
return find_halo(self.condensed_mask())
def plume_halo(self):
return find_halo(self.plume_mask())
def adjacent_cloudlets(self, key):
result = {}
for cloudlet in self.cloudlets:
for volume, adjacent_cloudlet in cloudlet.adjacent[key]:
if adjacent_cloudlet not in self.cloudlets:
if adjacent_cloudlet in result:
result[adjacent_cloudlet] += volume
else:
result[adjacent_cloudlet] = volume
final = [(result[cloudlet], cloudlet) for cloudlet in result]
final.sort(key=lambda key:key[0])
final.reverse()
result = [item[1] for item in final]
return result
def connected_cloudlet_groups(self):
# only split if both components are connected to ground
plume_cloudlets = []
condensed_cloudlets = []
for cloudlet in self.cloudlets:
if cloudlet.has_condensed():
condensed_cloudlets.append(cloudlet)
else:
plume_cloudlets.append(cloudlet)
groups = []
while condensed_cloudlets:
cloudlet = condensed_cloudlets.pop()
group = [cloudlet, ]
conns = cloudlet.adjacent['condensed'][:]
for vol, cloudlet in conns:
if cloudlet in condensed_cloudlets:
group.append(cloudlet)
condensed_cloudlets.remove(cloudlet)
conns.extend(cloudlet.adjacent['condensed'][:])
groups.append(group)
if len(groups) > 1:
detached_groups = []
attached_groups = []
for group in groups:
mask = []
for cloudlet in group:
mask.append(cloudlet.plume_mask())
mask = numpy.hstack(mask)
if (mask < nx * ny).any():
attached_groups.append(group)
else:
detached_groups.append(group)
if attached_groups:
volumes = []
volume = 0
for group in attached_groups:
for cloudlet in group:
volume += cloudlet.volume
volumes.append((volume, group))
volumes.sort(key=lambda key:key[0])
if detached_groups:
attached_group_masks = []
for group in attached_groups:
mask_list = [cloudlet.condensed_mask() for cloudlet in group]
mask = numpy.hstack(mask_list)
attached_group_masks.append((calc_com(mask), group))
for item in detached_groups:
mask_list = [cloudlet.condensed_mask() for cloudlet in item]
item_mask = numpy.hstack(mask_list)
item_com = calc_com(item_mask)
com_list = [(calc_distance(item_com, current_group[0]), current_group[1])
for current_group in attached_group_masks]
com_list.sort(key=lambda key:key[0])
com_list[0][1].extend(item)
groups = attached_groups
else:
groups = detached_groups
while plume_cloudlets:
cloudlet = plume_cloudlets.pop()
group = [cloudlet, ]
conns = cloudlet.adjacent['plume'][:]
for volume, cloudlet in conns:
if cloudlet in plume_cloudlets:
group.append(cloudlet)
plume_cloudlets.remove(cloudlet)
conns.extend(cloudlet.adjacent['plume'][:])
groups.append(group)
return groups
|
{
"content_hash": "d4f8d975726bdebf20a3920cd322f5cb",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 98,
"avg_line_length": 32.261648745519715,
"alnum_prop": 0.5070547716920342,
"repo_name": "lorenghoh/loh_tracker",
"id": "8f7e5e950d4f4173d7a8805e1994127202f871f4",
"size": "9024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudtracker/cloud_objects.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "100423"
}
],
"symlink_target": ""
}
|
from datetime import date, time, datetime
class CellMode(object):
raw = 0x01
cooked = 0x02
class Cell(object):
def __init__(self, value=None, note=None):
self.value = value
self.note = note
def copy(self):
return Cell(self.value, self.note)
def __str__(self):
return "Cell(value={0}, note={1})".format(self.value, self.note)
class WorksheetBase(object):
def __init__(self, raw_sheet, ordinal):
self.raw_sheet = raw_sheet
self.ordinal = ordinal
self.name = None
self.nrows = -1
self.ncols = -1
def to_cell_table(self, merged=True):
"""Returns a list of lists of Cells with the cooked value and note for each cell."""
new_rows = []
for row_index, row in enumerate(self.rows(CellMode.cooked)):
new_row = []
for col_index, cell_value in enumerate(row):
new_row.append(Cell(cell_value, self.get_note((col_index, row_index))))
new_rows.append(new_row)
if merged:
for cell_low, cell_high in self.merged_cell_ranges():
anchor_cell = new_rows[cell_low[1]][cell_low[0]]
for row_index in range(cell_low[1], cell_high[1]):
for col_index in range(cell_low[0], cell_high[0]):
# NOTE: xlrd occassionally returns ranges that don't have cells.
try:
new_rows[row_index][col_index] = anchor_cell.copy()
except IndexError:
pass
return new_rows
def rows(self, cell_mode=CellMode.cooked):
for row_index in range(self.nrows):
yield self.parse_row(self.get_row(row_index), row_index, cell_mode)
def parse_row(self, row, row_index, cell_mode=CellMode.cooked):
return [self.parse_cell(cell, (col_index, row_index), cell_mode) for col_index, cell in enumerate(row)]
def get_cell(self, coords, mode=CellMode.cooked):
return self.parse_cell(self.get_row(coords[1])[coords[0]], coords, mode)
def tuple_to_datetime(self, date_tuple):
# NOTE: Should this be an instance method, a class method, or perhaps just a module method?
Y, M, D, h, m, s = date_tuple
if Y == M == D == 0:
return time(h, m, s)
elif h == m == s == 0:
return date(Y, M, D)
else:
return datetime(Y, M, D, h, m, s)
def parse_cell(self, cell, coords, mode=CellMode.cooked):
raise NotImplementedError()
def get_row(self, row_index):
raise NotImplementedError()
def merged_cell_ranges(self):
"""Enumerates cell ranges as ((cell_low, row_low), (cell_high, row_high))"""
return []
def get_note(self, coords):
"""Coords are (col, row)"""
return None
class WorkbookBase(object):
def __init__(self, filename):
self.filename = filename
self._sheets = None
def sheets(self, index=None):
if self._sheets is None:
self._sheets = [self.get_worksheet(s, i) for i, s in enumerate(self.iterate_sheets())]
if index is None:
return self._sheets
else:
return self._sheets[index]
def iterate_sheets(self):
raise NotImplementedError()
def get_worksheet(self, raw_sheet, index):
return WorksheetBase(raw_sheet, index)
from .csv import CSVWorkbook
from .excel import ExcelWorkbook
from .open_document import OpenDocumentWorkbook
|
{
"content_hash": "75d2876cd26f5e065ae1c062259661a6",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 107,
"avg_line_length": 29.990654205607477,
"alnum_prop": 0.6410096603303209,
"repo_name": "treycucco/py-utils",
"id": "37aad9396cee08b855c09d176757e03213750858",
"size": "3209",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "idb/spreadsheet/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PLpgSQL",
"bytes": "1165"
},
{
"name": "Python",
"bytes": "150596"
},
{
"name": "Shell",
"bytes": "487"
}
],
"symlink_target": ""
}
|
import asyncio
from pypeman import channels
from pypeman.channels import BaseChannel
from pypeman import nodes
from pypeman.test import TearDownProjectTestCase as TestCase
from pypeman.tests.common import generate_msg
class TestNode(nodes.BaseNode):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class TestingTests(TestCase):
def clean_loop(self):
# Useful to execute future callbacks
pending = asyncio.Task.all_tasks(loop=self.loop)
if pending:
self.loop.run_until_complete(asyncio.gather(*pending))
def start_channels(self):
# Start channels
for chan in channels.all:
self.loop.run_until_complete(chan.start())
def setUp(self):
# Create class event loop used for tests to avoid failing
# previous tests to impact next test ? (Not sure)
self.loop = asyncio.new_event_loop()
self.loop.set_debug(True)
# Remove thread event loop to be sure we are not using
# another event loop somewhere
asyncio.set_event_loop(None)
# Avoid calling already tested channels
channels.all.clear()
def tearDown(self):
super().tearDown()
self.clean_loop()
def test_chan_process_in_test_mode(self):
""" Whether BaseChannel handling with test mode is working """
chan = BaseChannel(name="test_channel_test_1", loop=self.loop)
n = TestNode(name="testme")
chan.add(n)
msg = generate_msg(message_content="X")
# Launch channel processing
self.start_channels()
chan._reset_test()
chan.handle_and_wait(msg)
self.assertEqual(n.processed, 1, "Channel in test mode not working")
self.assertTrue(hasattr(n, '_handle'), "Channel in test mode not working")
self.assertEqual(chan.get_node("testme").last_input().payload, "X", "Last input broken")
def test_chan_node_mocking_in_test_mode(self):
""" Whether node mocking input and output is working """
chan = BaseChannel(name="test_channel_test_2", loop=self.loop)
n = TestNode(name="testme")
chan.add(n)
msg_x = generate_msg(message_content="X")
msg_a = generate_msg(message_content="A")
msg_b = generate_msg(message_content="B")
msg_c = generate_msg(message_content="C")
msg_d = generate_msg(message_content="D")
def concat_e(msg):
msg.payload += "E"
return msg
def concat_f(msg):
msg.payload += "F"
return msg
# Launch channel processing
self.start_channels()
# Mock input
chan._reset_test()
chan.get_node("testme").mock(input=msg_a)
ret = chan.handle_and_wait(msg_x)
self.assertEqual(n.processed, 1, "Channel in test mode not working")
self.assertEqual(ret.payload, "A", "Mocking input broken")
self.assertEqual(chan.get_node("testme").last_input().payload, "X", "Last input broken")
# Mock output
chan._reset_test()
chan.get_node("testme").mock(output=msg_b)
ret = chan.handle_and_wait(msg_x)
self.assertEqual(n.processed, 1, "Channel in test mode not working")
self.assertEqual(ret.payload, "B", "Mocking input broken")
self.assertEqual(chan.get_node("testme").last_input().payload, "X", "Last input broken")
# Mock both
chan._reset_test()
chan.get_node("testme").mock(input=msg_c, output=msg_d)
ret = chan.handle_and_wait(msg_x)
self.assertEqual(n.processed, 1, "Channel in test mode not working")
self.assertEqual(ret.payload, "D", "Mocking both input and output broken")
# Mock both functions
chan._reset_test()
chan.get_node("testme").mock(input=concat_e, output=concat_f)
ret = chan.handle_and_wait(msg_x)
self.assertEqual(n.processed, 1, "Channel in test mode not working")
self.assertEqual(ret.payload, "XEF", "Mocking with function broken")
|
{
"content_hash": "f292b878bbf7ad95c1de988078fa77f0",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 96,
"avg_line_length": 33.35245901639344,
"alnum_prop": 0.6230031948881789,
"repo_name": "Zluurk/pypeman",
"id": "2105ca3916744f6ff40ba48ac92afc3369004444",
"size": "4069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pypeman/tests/test_testing.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1452"
},
{
"name": "HTML",
"bytes": "264"
},
{
"name": "JavaScript",
"bytes": "26871"
},
{
"name": "Python",
"bytes": "224330"
},
{
"name": "Vue",
"bytes": "10192"
}
],
"symlink_target": ""
}
|
"""Add exclusion constraint to membership
Revision ID: 6815546f681c
Revises: 20234ac06668
Create Date: 2021-11-14 00:38:58.192514
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '6815546f681c'
down_revision = '20234ac06668'
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
alter table membership
add constraint "membership_group_id_user_id_active_during_excl"
EXCLUDE USING gist (group_id with =, user_id with =, active_during with &&);
""")
op.execute("alter index ix_active_during rename to ix_membership_active_during")
def downgrade():
op.drop_constraint(
'membership_group_id_user_id_active_during_excl',
table_name='membership',
)
op.execute("alter index ix_membership_active_during rename to ix_active_during")
|
{
"content_hash": "53ac29a107e1818580247384a4a97d69",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 84,
"avg_line_length": 27.096774193548388,
"alnum_prop": 0.7011904761904761,
"repo_name": "agdsn/pycroft",
"id": "2f19569b198695ee490a879a227d89552e7c25e2",
"size": "840",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pycroft/model/alembic/versions/6815546f681c_add_exclusion_constraint_to_membership.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10320"
},
{
"name": "Dockerfile",
"bytes": "3341"
},
{
"name": "HTML",
"bytes": "124781"
},
{
"name": "JavaScript",
"bytes": "74707"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "1172012"
},
{
"name": "Shell",
"bytes": "13660"
},
{
"name": "TypeScript",
"bytes": "5231"
}
],
"symlink_target": ""
}
|
"""Command implementation."""
from copy import deepcopy
class Command(object):
"""The object for coap commands."""
def __init__(
self,
method,
path,
data=None,
*,
parse_json=True,
observe=False,
observe_duration=0,
process_result=None,
err_callback=None
):
self._method = method
self._path = path
self._data = data
self._parse_json = parse_json
self._process_result = process_result
self._err_callback = err_callback
self._observe = observe
self._observe_duration = observe_duration
self._raw_result = None
self._result = None
@property
def method(self):
return self._method
@property
def path(self):
return self._path
@property
def data(self):
return self._data
@property
def parse_json(self):
return self._parse_json
@property
def process_result(self):
return self._process_result
@property
def err_callback(self):
"""This will be fired when an observe request fails."""
return self._err_callback
@property
def observe(self):
return self._observe
@property
def observe_duration(self):
return self._observe_duration
@property
def raw_result(self):
return self._raw_result
@property
def result(self):
return self._result
@result.setter
def result(self, value):
"""The result of the command."""
if self._process_result:
self._result = self._process_result(value)
self._raw_result = value
@property
def path_str(self) -> str:
"""Return coap path."""
return "/".join(str(v) for v in self._path)
def url(self, host):
"""Generate url for coap client."""
return "coaps://{}:5684/{}".format(host, self.path_str)
def _merge(self, a, b):
"""Merges a into b."""
for k, v in a.items():
if isinstance(v, dict):
item = b.setdefault(k, {})
self._merge(v, item)
elif isinstance(v, list):
item = b.setdefault(k, [{}])
if len(v) == 1 and isinstance(v[0], dict):
self._merge(v[0], item[0])
else:
b[k] = v
else:
b[k] = v
return b
def combine_data(self, command2):
"""Combines the data for this command with another."""
if command2 is None:
return
self._data = self._merge(command2._data, self._data)
def __add__(self, other):
if other is None:
return deepcopy(self)
if isinstance(other, self.__class__):
newObj = deepcopy(self)
newObj.combine_data(other)
return newObj
else:
raise (
TypeError(
"unsupported operand type(s) for +: "
"'{}' and '{}'".format(self.__class__, type(other))
)
)
|
{
"content_hash": "6581a86fec350c09854858edffabe91f",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 71,
"avg_line_length": 25.04,
"alnum_prop": 0.5146964856230032,
"repo_name": "ggravlingen/pytradfri",
"id": "25d4400e9b9e495848acb05a88b30ea5aee5e125",
"size": "3130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytradfri/command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "339"
},
{
"name": "Python",
"bytes": "115017"
},
{
"name": "Shell",
"bytes": "1179"
}
],
"symlink_target": ""
}
|
from mixbox import entities
from mixbox import fields
import cybox.bindings.network_route_object as network_route_binding
from cybox.common import Duration, ObjectProperties, StructuredText
from cybox.objects.network_route_entry_object import NetworkRouteEntry
class NetworkRouteEntries(entities.EntityList):
_binding = network_route_binding
_binding_class = network_route_binding.NetworkRouteEntriesType
_namespace = "http://cybox.mitre.org/objects#NetworkRouteObject-2"
network_route_entry = fields.TypedField("Network_Route_Entry", NetworkRouteEntry, multiple=True)
class NetRoute(ObjectProperties):
_binding = network_route_binding
_binding_class = network_route_binding.NetRouteObjectType
_namespace = "http://cybox.mitre.org/objects#NetworkRouteObject-2"
_XSI_NS = "NetworkRouteObj"
_XSI_TYPE = "NetRouteObjectType"
is_ipv6 = fields.TypedField("is_ipv6")
is_autoconfigure_address = fields.TypedField("is_autoconfigure_address")
is_immortal = fields.TypedField("is_immortal")
is_loopback = fields.TypedField("is_loopback")
is_publish = fields.TypedField("is_publish")
description = fields.TypedField("Description", StructuredText)
preferred_lifetime = fields.TypedField("Preferred_Lifetime", Duration)
valid_lifetime = fields.TypedField("Valid_Lifetime", Duration)
route_age = fields.TypedField("Route_Age", Duration)
network_route_entries = fields.TypedField("Network_Route_Entries", NetworkRouteEntries)
|
{
"content_hash": "da53e403ae5b7b1c63b2c5750ce6365c",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 100,
"avg_line_length": 43.970588235294116,
"alnum_prop": 0.7665551839464882,
"repo_name": "CybOXProject/python-cybox",
"id": "2168db04538233a66bb74499a3e27def0fa6e20a",
"size": "1600",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cybox/objects/network_route_object.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "4610747"
}
],
"symlink_target": ""
}
|
import math
import numpy as np
import tensorflow as tf
try:
from tensorflow.contrib.rnn import RNNCell
except ImportError:
RNNCell = tf.nn.rnn_cell.RNNCel
class LSTMCell(RNNCell):
"""Vanilla LSTM implemented with same initializations as BN-LSTM"""
def __init__(self, num_units):
self.num_units = num_units
@property
def state_size(self):
return (self.num_units, self.num_units)
@property
def output_size(self):
return self.num_units
def __call__(self, x, state, scope=None):
with tf.variable_scope(scope or type(self).__name__):
c, h = state
# Keep W_xh and W_hh separate here as well to reuse initialization methods
x_size = x.get_shape().as_list()[1]
W_xh = tf.get_variable('W_xh',
[x_size, 4 * self.num_units],
initializer=orthogonal_initializer())
W_hh = tf.get_variable('W_hh',
[self.num_units, 4 * self.num_units],
initializer=bn_lstm_identity_initializer(0.95))
bias = tf.get_variable('bias', [4 * self.num_units])
# hidden = tf.matmul(x, W_xh) + tf.matmul(h, W_hh) + bias
# improve speed by concat.
concat = tf.concat([x, h], 1)
W_both = tf.concat([W_xh, W_hh], 0)
hidden = tf.matmul(concat, W_both) + bias
i, j, f, o = tf.split(hidden, 4, axis=1)
new_c = c * tf.sigmoid(f) + tf.sigmoid(i) * tf.tanh(j)
new_h = tf.tanh(new_c) * tf.sigmoid(o)
return new_h, (new_c, new_h)
class BNLSTMCell(RNNCell):
"""Batch normalized LSTM as described in http://arxiv.org/abs/1603.09025"""
def __init__(self, num_units, training):
self.num_units = num_units
self.training = training
@property
def state_size(self):
return (self.num_units, self.num_units)
@property
def output_size(self):
return self.num_units
def __call__(self, x, state, scope=None):
with tf.variable_scope(scope or 'bn_lstm'):
c, h = state
x_size = x.get_shape().as_list()[1]
W_xh = tf.get_variable('W_xh',
[x_size, 4 * self.num_units],
initializer=orthogonal_initializer())
W_hh = tf.get_variable('W_hh',
[self.num_units, 4 * self.num_units],
initializer=bn_lstm_identity_initializer(0.95))
bias = tf.get_variable('bias', [4 * self.num_units])
xh = tf.matmul(x, W_xh)
hh = tf.matmul(h, W_hh)
bn_xh = batch_norm(xh, 'xh', self.training)
bn_hh = batch_norm(hh, 'hh', self.training)
hidden = bn_xh + bn_hh + bias
i, j, f, o = tf.split(hidden, 4, axis=1)
new_c = c * tf.sigmoid(f) + tf.sigmoid(i) * tf.tanh(j)
bn_new_c = batch_norm(new_c, 'c', self.training)
new_h = tf.tanh(bn_new_c) * tf.sigmoid(o)
return new_h, (new_c, new_h)
def orthogonal(shape):
flat_shape = (shape[0], np.prod(shape[1:]))
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v
return q.reshape(shape)
def bn_lstm_identity_initializer(scale):
def _initializer(shape, dtype=tf.float32, partition_info=None):
"""Ugly cause LSTM params calculated in one matrix multiply"""
size = shape[0]
# gate (j) is identity
t = np.zeros(shape)
t[:, size:size * 2] = np.identity(size) * scale
t[:, :size] = orthogonal([size, size])
t[:, size * 2:size * 3] = orthogonal([size, size])
t[:, size * 3:] = orthogonal([size, size])
return tf.constant(t, dtype=dtype)
return _initializer
def orthogonal_initializer():
def _initializer(shape, dtype=tf.float32, partition_info=None):
return tf.constant(orthogonal(shape), dtype)
return _initializer
def batch_norm(x, name_scope, training, epsilon=1e-3, decay=0.999):
"""Assume 2d [batch, values] tensor"""
with tf.variable_scope(name_scope):
size = x.get_shape().as_list()[1]
scale = tf.get_variable('scale', [size],
initializer=tf.constant_initializer(0.1))
offset = tf.get_variable('offset', [size])
pop_mean = tf.get_variable('pop_mean', [size],
initializer=tf.zeros_initializer(),
trainable=False)
pop_var = tf.get_variable('pop_var', [size],
initializer=tf.ones_initializer(),
trainable=False)
batch_mean, batch_var = tf.nn.moments(x, [0])
train_mean_op = tf.assign(
pop_mean,
pop_mean * decay + batch_mean * (1 - decay))
train_var_op = tf.assign(
pop_var,
pop_var * decay + batch_var * (1 - decay))
def batch_statistics():
with tf.control_dependencies([train_mean_op, train_var_op]):
return tf.nn.batch_normalization(x, batch_mean, batch_var, offset, scale, epsilon)
def population_statistics():
return tf.nn.batch_normalization(x, pop_mean, pop_var, offset, scale, epsilon)
return tf.cond(training, batch_statistics, population_statistics)
|
{
"content_hash": "c37b68f44cab2d2555583fc02224d279",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 98,
"avg_line_length": 34.59477124183007,
"alnum_prop": 0.5639523899489892,
"repo_name": "ematvey/deep-text-classifier",
"id": "36a5502c30dff42b4cf27c519e849379e55ed566",
"size": "5361",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bn_lstm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30098"
}
],
"symlink_target": ""
}
|
from django.core.management import setup_environ
import os
import sys
sys.path.append(os.path.dirname(__file__))
import settings
setup_environ(settings)
#===================================#
from django.contrib.gis.utils import LayerMapping
from arp.models import Watershed
a = Watershed.objects.all()
for i in a:
i.delete()
mapping = {
'fid': "OBJECTID",
'coho': 'Coho_m',
'chinook': 'Chnk_m',
'steelhead': 'StlHd_m',
'climate_cost': 'pcp80bdfmm',
'area': 'area_km2',
'name': 'HU_12_NAME',
'huc12': 'HUC_12',
'geometry': 'POLYGON'
}
lm = LayerMapping(Watershed,'../media/staticmap/data/huc6_4326.shp',mapping)
lm.save(verbose=True)
|
{
"content_hash": "e904c80d3bc9f5352d1a730e9e36a0f5",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 76,
"avg_line_length": 22.766666666666666,
"alnum_prop": 0.6251830161054173,
"repo_name": "tectronics/watershed-priorities",
"id": "37474095d43d62f1271f733e9cbe6a1d0625b373",
"size": "683",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wp/load.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "32813"
},
{
"name": "HTML",
"bytes": "243041"
},
{
"name": "JavaScript",
"bytes": "14420"
},
{
"name": "PHP",
"bytes": "3062"
},
{
"name": "Python",
"bytes": "196164"
},
{
"name": "Shell",
"bytes": "12851"
}
],
"symlink_target": ""
}
|
"""
stock.py
"""
from decimal import Decimal
import base64
from lxml import etree
from logbook import Logger
from ups.shipping_package import ShipmentConfirm, ShipmentAccept
from ups.base import PyUPSException
from ups.worldship_api import WorldShip
from trytond.model import fields, ModelView
from trytond.wizard import Wizard, StateView, Button
from trytond.transaction import Transaction
from trytond.pool import Pool, PoolMeta
from trytond.pyson import Eval
from trytond.rpc import RPC
from .sale import UPS_PACKAGE_TYPES
__metaclass__ = PoolMeta
__all__ = [
'ShipmentOut', 'StockMove', 'ShippingUps',
'GenerateShippingLabel', 'Package'
]
STATES = {
'readonly': Eval('state') == 'done',
}
logger = Logger('trytond_ups')
class ShipmentOut:
"Shipment Out"
__name__ = 'stock.shipment.out'
is_ups_shipping = fields.Function(
fields.Boolean('Is UPS Shipping ?'),
'get_is_ups_shipping'
)
is_ups_worldship_shipping = fields.Function(
fields.Boolean('Is UPS Worldship Shipping ?'),
'get_is_ups_shipping'
)
ups_service_type = fields.Many2One(
'ups.service', 'UPS Service Type', states=STATES, depends=['state']
)
ups_package_type = fields.Selection(
UPS_PACKAGE_TYPES, 'Package Content Type', states=STATES,
depends=['state']
)
ups_saturday_delivery = fields.Boolean(
"Is Saturday Delivery", states=STATES, depends=['state']
)
def _get_weight_uom(self):
"""
Returns uom for ups
"""
if self.is_ups_shipping and self.carrier.ups_weight_uom:
return self.carrier.ups_weight_uom
return super(ShipmentOut, self)._get_weight_uom()
@staticmethod
def default_ups_package_type():
Config = Pool().get('sale.configuration')
config = Config(1)
return config.ups_package_type
@staticmethod
def default_ups_service_type():
Config = Pool().get('sale.configuration')
config = Config(1)
return config.ups_service_type and config.ups_service_type.id or None
@staticmethod
def default_ups_saturday_delivery():
return False
def get_is_ups_shipping(self, name):
"""
Check if shipping is from UPS
"""
return self.carrier and self.carrier.carrier_cost_method == name[3:-9]
@classmethod
def __setup__(cls):
super(ShipmentOut, cls).__setup__()
# There can be cases when people might want to use a different
# shipment carrier at any state except `done`.
cls.carrier.states = STATES
cls._error_messages.update({
'ups_wrong_carrier':
'Carrier for selected shipment is not UPS',
'ups_service_type_missing':
'UPS service type missing.',
'tracking_number_already_present':
'Tracking Number is already present for this shipment.',
'invalid_state': 'Labels can only be generated when the '
'shipment is in Packed or Done states only',
'no_packages': 'Shipment %s has no packages',
})
cls.__rpc__.update({
'make_ups_labels': RPC(readonly=False, instantiate=0),
'get_ups_shipping_cost': RPC(readonly=False, instantiate=0),
'get_worldship_xml': RPC(instantiate=0, readonly=True),
})
def _get_ups_packages(self):
"""
Return UPS Packages XML
"""
package_containers = []
for package in self.packages:
package_containers.append(package.get_ups_package_container())
return package_containers
def _get_carrier_context(self):
"Pass shipment in the context"
context = super(ShipmentOut, self)._get_carrier_context()
if not self.carrier.carrier_cost_method == 'ups':
return context
context = context.copy()
context['shipment'] = self.id
return context
def _get_shipment_confirm_xml(self):
"""
Return XML of shipment for shipment_confirm
"""
Company = Pool().get('company.company')
carrier = self.carrier
if not self.ups_service_type:
self.raise_user_error('ups_service_type_missing')
payment_info_prepaid = \
ShipmentConfirm.payment_information_prepaid_type(
AccountNumber=carrier.ups_shipper_no
)
payment_info = ShipmentConfirm.payment_information_type(
payment_info_prepaid)
packages = self._get_ups_packages()
shipment_service = ShipmentConfirm.shipment_service_option_type(
SaturdayDelivery='1' if self.ups_saturday_delivery
else 'None'
)
description = ','.join([
move.product.name for move in self.outgoing_moves
])
from_address = self._get_ship_from_address()
shipment_args = [
from_address.to_ups_shipper(carrier=carrier),
self.delivery_address.to_ups_to_address(),
from_address.to_ups_from_address(),
ShipmentConfirm.service_type(Code=self.ups_service_type.code),
payment_info, shipment_service,
]
if carrier.ups_negotiated_rates:
shipment_args.append(
ShipmentConfirm.rate_information_type(negotiated=True)
)
if from_address.country.code == 'US' and \
self.delivery_address.country.code in ['PR', 'CA']:
# Special case for US to PR or CA InvoiceLineTotal should be sent
monetary_value = str(sum(map(
lambda move: move.get_monetary_value_for_ups(),
self.outgoing_moves
)))
company_id = Transaction().context.get('company')
if not company_id:
self.raise_user_error("Company is not in context")
company = Company(company_id)
shipment_args.append(ShipmentConfirm.invoice_line_total_type(
MonetaryValue=monetary_value,
CurrencyCode=company.currency.code
))
shipment_args.extend(packages)
shipment_confirm = ShipmentConfirm.shipment_confirm_request_type(
*shipment_args, Description=description[:35]
)
return shipment_confirm
def _get_ups_shipment_cost(self, shipment_confirm):
"""
The shipment_confirm is an xml container in the response which has the
standard rates and negotiated rates. This method should extract the
value and return it with the currency
"""
Currency = Pool().get('currency.currency')
shipment_charges = shipment_confirm.ShipmentCharges
currency, = Currency.search([
('code', '=', str(
shipment_charges.TotalCharges.CurrencyCode
))
])
if self.carrier.ups_negotiated_rates and \
hasattr(shipment_confirm, 'NegotiatedRates'):
# If there are negotiated rates return that instead
charges = shipment_confirm.NegotiatedRates.NetSummaryCharges
charges = currency.round(Decimal(
str(charges.GrandTotal.MonetaryValue)
))
else:
charges = currency.round(
Decimal(str(shipment_charges.TotalCharges.MonetaryValue))
)
return charges, currency
def get_ups_shipping_cost(self):
"""Returns the calculated shipping cost as sent by ups
:returns: The shipping cost with currency
"""
carrier = self.carrier
shipment_confirm = self._get_shipment_confirm_xml()
shipment_confirm_instance = carrier.ups_api_instance(call="confirm")
# Logging.
logger.debug(
'Making Shipment Confirm Request for'
'Shipment ID: {0} and Carrier ID: {1}'
.format(self.id, carrier.id)
)
logger.debug(
'--------SHIPMENT CONFIRM REQUEST--------\n%s'
'\n--------END REQUEST--------'
% etree.tostring(shipment_confirm, pretty_print=True)
)
try:
response = shipment_confirm_instance.request(shipment_confirm)
# Logging.
logger.debug(
'--------SHIPMENT CONFIRM RESPONSE--------\n%s'
'\n--------END RESPONSE--------'
% etree.tostring(response, pretty_print=True)
)
except PyUPSException, e:
self.raise_user_error(unicode(e[0]))
shipping_cost, currency = self._get_ups_shipment_cost(response)
return shipping_cost, currency.id
def make_ups_labels(self):
"""
Make labels for the given shipment
:return: Tracking number as string
"""
Attachment = Pool().get('ir.attachment')
Currency = Pool().get('currency.currency')
carrier = self.carrier
if self.state not in ('packed', 'done'):
self.raise_user_error('invalid_state')
if not self.is_ups_shipping:
self.raise_user_error('ups_wrong_carrier')
if self.tracking_number:
self.raise_user_error('tracking_number_already_present')
if not self.packages:
self.raise_user_error("no_packages", error_args=(self.id,))
shipment_confirm = self._get_shipment_confirm_xml()
shipment_confirm_instance = carrier.ups_api_instance(call="confirm")
# Logging.
logger.debug(
'Making Shipment Confirm Request for'
'Shipment ID: {0} and Carrier ID: {1}'
.format(self.id, self.carrier.id)
)
logger.debug(
'--------SHIPMENT CONFIRM REQUEST--------\n%s'
'\n--------END REQUEST--------'
% etree.tostring(shipment_confirm, pretty_print=True)
)
try:
response = shipment_confirm_instance.request(shipment_confirm)
# Logging.
logger.debug(
'--------SHIPMENT CONFIRM RESPONSE--------\n%s'
'\n--------END RESPONSE--------'
% etree.tostring(response, pretty_print=True)
)
except PyUPSException, e:
self.raise_user_error(unicode(e[0]))
digest = ShipmentConfirm.extract_digest(response)
shipment_accept = ShipmentAccept.shipment_accept_request_type(digest)
shipment_accept_instance = carrier.ups_api_instance(call="accept")
# Logging.
logger.debug(
'Making Shipment Accept Request for'
'Shipment ID: {0} and Carrier ID: {1}'
.format(self.id, self.carrier.id)
)
logger.debug(
'--------SHIPMENT ACCEPT REQUEST--------\n%s'
'\n--------END REQUEST--------'
% etree.tostring(shipment_accept, pretty_print=True)
)
try:
response = shipment_accept_instance.request(shipment_accept)
# Logging.
logger.debug(
'--------SHIPMENT ACCEPT RESPONSE--------\n%s'
'\n--------END RESPONSE--------'
% etree.tostring(response, pretty_print=True)
)
except PyUPSException, e:
self.raise_user_error(unicode(e[0]))
shipment_res = response.ShipmentResults
shipment_identification_number = \
shipment_res.ShipmentIdentificationNumber.pyval
currency, = Currency.search([
('code', '=', str(
shipment_res.ShipmentCharges.TotalCharges.CurrencyCode
))
])
shipping_cost = currency.round(Decimal(
str(shipment_res.ShipmentCharges.TotalCharges.MonetaryValue)
))
self.__class__.write([self], {
'cost': shipping_cost,
'cost_currency': currency,
'tracking_number': shipment_identification_number
})
index = 0
for package in response.ShipmentResults.PackageResults:
tracking_number = package.TrackingNumber.pyval
# The package results do not hold any info to identify which
# result if for what package, instead it returns the results
# in the order in which the packages were sent in request, so
# we read the result in the same order.
stock_package = self.packages[index]
stock_package.tracking_number = unicode(tracking_number)
stock_package.save()
index += 1
Attachment.create([{
'name': "%s_%s_%s.png" % (
tracking_number,
shipment_identification_number,
stock_package.code,
),
'data': buffer(base64.decodestring(
package.LabelImage.GraphicImage.pyval
)),
'resource': '%s,%s' % (self.__name__, self.id)
}])
return shipment_identification_number
@fields.depends('ups_service_type')
def on_change_carrier(self):
"""
Show/Hide UPS Tab in view on change of carrier
"""
with Transaction().set_context(ignore_carrier_computation=True):
res = super(ShipmentOut, self).on_change_carrier()
res['is_ups_shipping'] = self.carrier and \
self.carrier.carrier_cost_method == 'ups'
res['is_ups_worldship_shipping'] = self.carrier and \
self.carrier.carrier_cost_method == 'ups_worldship'
return res
def get_worldship_xml(self):
"""
Return shipment data with worldship understandable xml
"""
if not self.carrier:
self.raise_user_error('Carrier is not defined for shipment.')
if self.carrier.carrier_cost_method != 'ups_worldship':
self.raise_user_error(
'Shipment %s is to be shipped with %s, not Worldship.',
(self.reference, self.carrier.rec_name)
)
description = ','.join([
move.product.name for move in self.outgoing_moves
])
ship_to = self.delivery_address.to_worldship_to_address()
ship_from = self._get_ship_from_address().to_worldship_from_address()
shipment_information = WorldShip.shipment_information_type(
ServiceType="Standard", # Worldease
DescriptionOfGoods=description[:50],
GoodsNotInFreeCirculation="0",
BillTransportationTo="Shipper",
)
xml_packages = []
for package in self.packages:
xml_packages.append(WorldShip.package_type(
PackageID=str(package.id),
PackageType='CP', # Custom Package
Weight="%.2f" % package.weight,
))
final_xml = WorldShip.get_xml(
ship_to, ship_from, shipment_information, *xml_packages
)
rv = {
'id': self.id,
'worldship_xml': final_xml,
}
return rv
class StockMove:
"Stock move"
__name__ = "stock.move"
def get_monetary_value_for_ups(self):
"""
Returns monetary_value as required for ups
"""
ProductUom = Pool().get('product.uom')
# Find the quantity in the default uom of the product as the weight
# is for per unit in that uom
if self.uom != self.product.default_uom:
quantity = ProductUom.compute_qty(
self.uom,
self.quantity,
self.product.default_uom
)
else:
quantity = self.quantity
return Decimal(self.product.list_price) * Decimal(quantity)
class ShippingUps(ModelView):
'Generate Labels'
__name__ = 'shipping.label.ups'
ups_service_type = fields.Many2One('ups.service', 'UPS Service Type')
ups_package_type = fields.Selection(
UPS_PACKAGE_TYPES, 'Package Content Type'
)
ups_saturday_delivery = fields.Boolean("Is Saturday Delivery ?")
class GenerateShippingLabel(Wizard):
'Generate Labels'
__name__ = 'shipping.label'
ups_config = StateView(
'shipping.label.ups',
'shipping_ups.shipping_ups_configuration_view_form',
[
Button('Back', 'start', 'tryton-go-previous'),
Button('Continue', 'generate', 'tryton-go-next'),
]
)
def default_ups_config(self, data):
Config = Pool().get('sale.configuration')
config = Config(1)
shipment = self.start.shipment
return {
'ups_service_type': (
shipment.ups_service_type and shipment.ups_service_type.id
) or (
config.ups_service_type and config.ups_service_type.id
) or None,
'ups_package_type': (
shipment.ups_package_type or config.ups_package_type
),
'ups_saturday_delivery': shipment.ups_saturday_delivery
}
def transition_next(self):
state = super(GenerateShippingLabel, self).transition_next()
if self.start.carrier.carrier_cost_method == 'ups':
return 'ups_config'
return state
def update_shipment(self):
shipment = super(GenerateShippingLabel, self).update_shipment()
if self.start.carrier.carrier_cost_method == 'ups':
shipment.ups_service_type = self.ups_config.ups_service_type
shipment.ups_package_type = self.ups_config.ups_package_type
shipment.ups_saturday_delivery = \
self.ups_config.ups_saturday_delivery
return shipment
class Package:
__name__ = 'stock.package'
def get_ups_package_container(self):
"""
Return UPS package container for a single package
"""
shipment = self.shipment
carrier = shipment.carrier
package_type = ShipmentConfirm.packaging_type(
Code=shipment.ups_package_type
) # FIXME: Support multiple packaging type
package_weight = ShipmentConfirm.package_weight_type(
Weight="%.2f" % self.weight,
Code=carrier.ups_weight_uom_code,
)
package_service_options = ShipmentConfirm.package_service_options_type(
ShipmentConfirm.insured_value_type(MonetaryValue='0')
)
package_container = ShipmentConfirm.package_type(
package_type,
package_weight,
package_service_options
)
return package_container
|
{
"content_hash": "8896f75de3910d3d8cd2299708b471d8",
"timestamp": "",
"source": "github",
"line_count": 550,
"max_line_length": 79,
"avg_line_length": 33.77272727272727,
"alnum_prop": 0.5795423956931359,
"repo_name": "priyankarani/trytond-shipping-ups",
"id": "176c3bf66f5456fcaf1283c2ea51068633a3152f",
"size": "18599",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "stock.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "90190"
}
],
"symlink_target": ""
}
|
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import socket
try:
import queue
except ImportError:
import Queue as queue
class TestSocket(object):
"""
A dummy socket that reads and writes to queues instead
of an actual networking socket.
Methods:
next_sent -- Return the next sent stanza.
recv_data -- Make a stanza available to read next.
recv -- Read the next stanza from the socket.
send -- Write a stanza to the socket.
makefile -- Dummy call, returns self.
read -- Read the next stanza from the socket.
"""
def __init__(self, *args, **kwargs):
"""
Create a new test socket.
Arguments:
Same as arguments for socket.socket
"""
self.socket = socket.socket(*args, **kwargs)
self.recv_queue = queue.Queue()
self.send_queue = queue.Queue()
self.is_live = False
def __getattr__(self, name):
"""
Return attribute values of internal, dummy socket.
Some attributes and methods are disabled to prevent the
socket from connecting to the network.
Arguments:
name -- Name of the attribute requested.
"""
def dummy(*args):
"""Method to do nothing and prevent actual socket connections."""
return None
overrides = {'connect': dummy,
'close': dummy,
'shutdown': dummy}
return overrides.get(name, getattr(self.socket, name))
# ------------------------------------------------------------------
# Testing Interface
def next_sent(self, timeout=None):
"""
Get the next stanza that has been 'sent'.
Arguments:
timeout -- Optional timeout for waiting for a new value.
"""
args = {'block': False}
if timeout is not None:
args = {'block': True, 'timeout': timeout}
try:
return self.send_queue.get(**args)
except:
return None
def recv_data(self, data):
"""
Add data to the receiving queue.
Arguments:
data -- String data to 'write' to the socket to be received
by the XMPP client.
"""
self.recv_queue.put(data)
# ------------------------------------------------------------------
# Socket Interface
def recv(self, *args, **kwargs):
"""
Read a value from the received queue.
Arguments:
Placeholders. Same as for socket.Socket.recv.
"""
return self.read(block=True)
def send(self, data):
"""
Send data by placing it in the send queue.
Arguments:
data -- String value to write.
"""
self.send_queue.put(data)
# ------------------------------------------------------------------
# File Socket
def makefile(self, *args, **kwargs):
"""
File socket version to use with ElementTree.
Arguments:
Placeholders, same as socket.Socket.makefile()
"""
return self
def read(self, block=True, timeout=None, **kwargs):
"""
Implement the file socket interface.
Arguments:
block -- Indicate if the read should block until a
value is ready.
timeout -- Time in seconds a block should last before
returning None.
"""
if timeout is not None:
block = True
try:
return self.recv_queue.get(block, timeout)
except:
return None
|
{
"content_hash": "56fc70ef51b40f76280aded118534b5f",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 77,
"avg_line_length": 27.34285714285714,
"alnum_prop": 0.518025078369906,
"repo_name": "skinkie/SleekXMPP--XEP-0080-",
"id": "e3ddd70058bb422225b806071f6d14e0fd3fc839",
"size": "3828",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sleekxmpp/test/mocksocket.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "526767"
}
],
"symlink_target": ""
}
|
"""Test configuration for Shelly."""
from unittest.mock import AsyncMock, Mock, patch
import pytest
from homeassistant.components.shelly.const import (
EVENT_SHELLY_CLICK,
REST_SENSORS_UPDATE_INTERVAL,
)
from tests.common import async_capture_events, async_mock_service, mock_device_registry
MOCK_SETTINGS = {
"name": "Test name",
"mode": "relay",
"device": {
"mac": "test-mac",
"hostname": "test-host",
"type": "SHSW-25",
"num_outputs": 2,
},
"coiot": {"update_period": 15},
"fw": "20201124-092159/v1.9.0@57ac4ad8",
"relays": [{"btn_type": "momentary"}, {"btn_type": "toggle"}],
"rollers": [{"positioning": True}],
}
def mock_light_set_state(
turn="on",
mode="color",
red=45,
green=55,
blue=65,
white=70,
gain=19,
temp=4050,
brightness=50,
effect=0,
transition=0,
):
"""Mock light block set_state."""
return {
"ison": turn == "on",
"mode": mode,
"red": red,
"green": green,
"blue": blue,
"white": white,
"gain": gain,
"temp": temp,
"brightness": brightness,
"effect": effect,
"transition": transition,
}
MOCK_BLOCKS = [
Mock(
sensor_ids={"inputEvent": "S", "inputEventCnt": 2},
channel="0",
type="relay",
set_state=AsyncMock(side_effect=lambda turn: {"ison": turn == "on"}),
),
Mock(
sensor_ids={"roller": "stop", "rollerPos": 0},
channel="1",
type="roller",
set_state=AsyncMock(
side_effect=lambda go, roller_pos=0: {
"current_pos": roller_pos,
"state": go,
}
),
),
Mock(
sensor_ids={},
channel="0",
output=mock_light_set_state()["ison"],
colorTemp=mock_light_set_state()["temp"],
**mock_light_set_state(),
type="light",
set_state=AsyncMock(side_effect=mock_light_set_state),
),
]
MOCK_CONFIG = {
"input:0": {"id": 0, "type": "button"},
"switch:0": {"name": "test switch_0"},
"cover:0": {"name": "test cover_0"},
"sys": {
"ui_data": {},
"device": {"name": "Test name"},
},
}
MOCK_SHELLY_COAP = {
"mac": "test-mac",
"auth": False,
"fw": "20201124-092854/v1.9.0@57ac4ad8",
"num_outputs": 2,
}
MOCK_SHELLY_RPC = {
"name": "Test Gen2",
"id": "shellyplus2pm-123456789abc",
"mac": "123456789ABC",
"model": "SNSW-002P16EU",
"gen": 2,
"fw_id": "20220830-130540/0.11.0-gfa1bc37",
"ver": "0.11.0",
"app": "Plus2PM",
"auth_en": False,
"auth_domain": None,
"profile": "cover",
}
MOCK_STATUS_COAP = {
"update": {
"status": "pending",
"has_update": True,
"beta_version": "some_beta_version",
"new_version": "some_new_version",
"old_version": "some_old_version",
},
"uptime": 5 * REST_SENSORS_UPDATE_INTERVAL,
}
MOCK_STATUS_RPC = {
"switch:0": {"output": True},
"cover:0": {"state": "stopped", "pos_control": True, "current_pos": 50},
"sys": {
"available_updates": {
"beta": {"version": "some_beta_version"},
"stable": {"version": "some_beta_version"},
}
},
}
@pytest.fixture(autouse=True)
def mock_coap():
"""Mock out coap."""
with patch("homeassistant.components.shelly.utils.get_coap_context"):
yield
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
@pytest.fixture
def events(hass):
"""Yield caught shelly_click events."""
return async_capture_events(hass, EVENT_SHELLY_CLICK)
@pytest.fixture
async def mock_block_device():
"""Mock block (Gen1, CoAP) device."""
with patch("homeassistant.components.shelly.utils.COAP", autospec=True), patch(
"aioshelly.block_device.BlockDevice.create"
) as block_device_mock:
def update():
block_device_mock.return_value.subscribe_updates.call_args[0][0]({})
device = Mock(
blocks=MOCK_BLOCKS,
settings=MOCK_SETTINGS,
shelly=MOCK_SHELLY_COAP,
status=MOCK_STATUS_COAP,
firmware_version="some fw string",
update=AsyncMock(),
update_status=AsyncMock(),
trigger_ota_update=AsyncMock(),
trigger_reboot=AsyncMock(),
initialize=AsyncMock(),
initialized=True,
)
block_device_mock.return_value = device
block_device_mock.return_value.mock_update = Mock(side_effect=update)
yield block_device_mock.return_value
@pytest.fixture
async def mock_rpc_device():
"""Mock rpc (Gen2, Websocket) device."""
with patch("aioshelly.rpc_device.RpcDevice.create") as rpc_device_mock:
def update():
rpc_device_mock.return_value.subscribe_updates.call_args[0][0]({})
device = Mock(
call_rpc=AsyncMock(),
config=MOCK_CONFIG,
event={},
shelly=MOCK_SHELLY_RPC,
status=MOCK_STATUS_RPC,
firmware_version="some fw string",
update=AsyncMock(),
trigger_ota_update=AsyncMock(),
trigger_reboot=AsyncMock(),
initialized=True,
shutdown=AsyncMock(),
)
rpc_device_mock.return_value = device
rpc_device_mock.return_value.mock_update = Mock(side_effect=update)
yield rpc_device_mock.return_value
|
{
"content_hash": "97d72d71f63b6a4507e26a56554efc2a",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 87,
"avg_line_length": 25.69683257918552,
"alnum_prop": 0.5567881669307977,
"repo_name": "nkgilley/home-assistant",
"id": "cca4aebb9ead37774c785931ceb683980839b593",
"size": "5679",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/shelly/conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "51597279"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
from rest_framework.permissions import BasePermission
from django.shortcuts import get_object_or_404
class IsNewsManager(BasePermission):
message = 'Permissions denied'
safe_method = ['PUT', 'POST', 'DELETE']
def has_permission(self, request, view):
group = request.user.groups.get()
if 'News manager' or 'Administrator' in group.name:
return True
return False
|
{
"content_hash": "83cb06df7520ee0b9e1f0494b761a718",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 59,
"avg_line_length": 31.692307692307693,
"alnum_prop": 0.6844660194174758,
"repo_name": "bakowroc/newsfeed-system",
"id": "ef0c2fafc293bcfbc4a5462a951e1700bf319131",
"size": "412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "newsfeedsystem/news/api/permissions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27521"
},
{
"name": "HTML",
"bytes": "18285"
},
{
"name": "JavaScript",
"bytes": "2149"
},
{
"name": "Python",
"bytes": "33883"
},
{
"name": "TypeScript",
"bytes": "34866"
}
],
"symlink_target": ""
}
|
import base64
import json
import urllib, urllib2
from get_credentials import get_api_keys
API_KEY = get_api_keys()['pushbullet']
USER_DATA_URL = 'https://api.pushbullet.com/v2/users/me'
DEVICE_URL = 'https://api.pushbullet.com/v2/devices'
PUSH_URL = 'https://api.pushbullet.com/v2/pushes'
def send_request(url, post_data=None):
request = urllib2.Request(url)
base64string = base64.encodestring('%s:%s' % (API_KEY, '')).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
if post_data is not None:
request.data = urllib.urlencode(post_data)
call = urllib2.urlopen(request)
result = call.read()
return json.loads(result)
def get_user_email():
user_info = send_request(USER_DATA_URL)
return user_info['email']
def get_device_list():
return send_request(DEVICE_URL)['devices']
def get_device_by_name(name):
devices = get_device_list()
return [i for i in devices if i.get(u'nickname', '') == name][0]
def get_devices_by_type(device_type):
devices = get_device_list()
return [i for i in devices if i.get(u'type', '') == device_type]
def send_note(email, title, body, sending_device=None):
if sending_device is None:
return send_request(PUSH_URL,
post_data={'email': email, 'type':'note', 'title': title,
'body': body})
else:
return send_request(PUSH_URL,
post_data={'email': email, 'type':'note', 'title': title,
'body': body, 'source_device_iden': sending_device})
def send_url(email, title, url, body='', sending_device=None):
if sending_device is None:
return send_request(PUSH_URL,
post_data={'email': email, 'type':'note', 'title': title,
'url': url, 'body': body})
else:
return send_request(PUSH_URL,
post_data={'email': email, 'type':'note', 'title': title,
'url': url, 'body': body, 'source_device_iden': sending_device})
def get_pushes(modified_after=0):
return send_request(PUSH_URL + '?modified_after=' + str(modified_after))['pushes']
def filter_pushes(attr, value):
pushes = get_pushes()
return [i for i in pushes if i.get(attr, '') == value]
def get_pushes_for_device(dev_name):
dev_iden = get_device_by_name(dev_name)
return filter_pushes(u'receiver_iden', dev_iden)
|
{
"content_hash": "85c5a4195c3124c0b9124b13f3271670",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 86,
"avg_line_length": 36.25757575757576,
"alnum_prop": 0.6234851650647723,
"repo_name": "Serpens/pywebapis",
"id": "1682b08af925643ba862fb9bd9ecc9edb1be8d78",
"size": "2415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pushbullet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2883"
}
],
"symlink_target": ""
}
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
import datetime
from django.db import transaction
from django.contrib.sessions.models import Session
def clean_up():
"""Clean up expired sessions."""
Session.objects.filter(expire_date__lt=datetime.datetime.now()).delete()
transaction.commit_unless_managed()
if __name__ == "__main__":
clean_up()
|
{
"content_hash": "fe7885631eb842c2207cc22bf1ea639f",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 77,
"avg_line_length": 24.833333333333332,
"alnum_prop": 0.7114093959731543,
"repo_name": "diofeher/django-nfa",
"id": "c87be1e4c39ae705669e13bf8f4abfac50e10c4c",
"size": "470",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "build/lib/django/bin/daily_cleanup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "66105"
},
{
"name": "Python",
"bytes": "5174003"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
}
|
"""Process the ImageNet Challenge bounding boxes for TensorFlow model training.
Associate the ImageNet 2012 Challenge validation data set with labels.
The raw ImageNet validation data set is expected to reside in JPEG files
located in the following directory structure.
data_dir/ILSVRC2012_val_00000001.JPEG
data_dir/ILSVRC2012_val_00000002.JPEG
...
data_dir/ILSVRC2012_val_00050000.JPEG
This script moves the files into a directory structure like such:
data_dir/n01440764/ILSVRC2012_val_00000293.JPEG
data_dir/n01440764/ILSVRC2012_val_00000543.JPEG
...
where 'n01440764' is the unique synset label associated with
these images.
This directory reorganization requires a mapping from validation image
number (i.e. suffix of the original file) to the associated label. This
is provided in the ImageNet development kit via a Matlab file.
In order to make life easier and divorce ourselves from Matlab, we instead
supply a custom text file that provides this mapping for us.
Sample usage:
./preprocess_imagenet_validation_data.py ILSVRC2012_img_val \
imagenet_2012_validation_synset_labels.txt
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import errno
import os.path
import sys
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Invalid usage\n'
'usage: preprocess_imagenet_validation_data.py '
'<validation data dir> <validation labels file>')
sys.exit(-1)
data_dir = sys.argv[1]
validation_labels_file = sys.argv[2]
# Read in the 50000 synsets associated with the validation data set.
labels = [l.strip() for l in open(validation_labels_file).readlines()]
unique_labels = set(labels)
# Make all sub-directories in the validation data dir.
for label in unique_labels:
labeled_data_dir = os.path.join(data_dir, label)
# Catch error if sub-directory exists
try:
os.makedirs(labeled_data_dir)
except OSError as e:
# Raise all errors but 'EEXIST'
if e.errno != errno.EEXIST:
raise
# Move all of the image to the appropriate sub-directory.
for i in range(len(labels)):
basename = 'ILSVRC2012_val_000%.5d.JPEG' % (i + 1)
original_filename = os.path.join(data_dir, basename)
if not os.path.exists(original_filename):
print('Failed to find: %s' % original_filename)
sys.exit(-1)
new_filename = os.path.join(data_dir, labels[i], basename)
os.rename(original_filename, new_filename)
|
{
"content_hash": "43f252f8162b376bf48dfa87440e72d6",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 33.78378378378378,
"alnum_prop": 0.7296,
"repo_name": "derekjchow/models",
"id": "da85f8f14c8d0fc84980a2611d6bc7522668aa62",
"size": "3195",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "research/inception/inception/data/preprocess_imagenet_validation_data.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1523636"
},
{
"name": "Dockerfile",
"bytes": "9821"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33316"
},
{
"name": "Jupyter Notebook",
"bytes": "2831692"
},
{
"name": "Makefile",
"bytes": "4933"
},
{
"name": "Python",
"bytes": "14201542"
},
{
"name": "Shell",
"bytes": "158255"
}
],
"symlink_target": ""
}
|
import time
from envirophat import light, weather
from beebotte import *
### Replace CHANNEL_TOKEN with that of your channel
bbt = BBT(token = 'CHANNEL_TOKEN')
period = 300 ## Sensor data reporting period (5 minutes)
### Change channel name as suits you - in this instance, it is called Enviro_pHAT
temp_resource = Resource(bbt, 'Enviro_pHAT', 'temperature')
pressure_resource = Resource(bbt, 'Enviro_pHAT', 'pressure')
light_resource = Resource(bbt, 'Enviro_pHAT', 'light')
def run():
while True:
### Assume - the '-9' is a temperature calibration to take the Pi's heat into consideration. Adjust if needed.
temperature, pressure, lux = weather.temperature() -9, weather.pressure()/100, light.light()
if temperature is not None and pressure is not None and lux is not None:
print ("Temp={0:.1f}*C Pressure={1:.0f} hPa Light={2:.0f} lux".format(temperature, pressure, lux))
try:
#Send temperature to Beebotte
temp_resource.write(temperature)
#Send pressure to Beebotte
pressure_resource.write(pressure)
#Send light to Beebotte
light_resource.write(lux)
except Exception:
## Process exception here
print ("Error while writing to Beebotte")
else:
print ("Failed to get reading. Try again!")
#Sleep some time
time.sleep( period )
run()
|
{
"content_hash": "b57c5ff778df434fc945d5fb739522c5",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 114,
"avg_line_length": 37.4054054054054,
"alnum_prop": 0.666907514450867,
"repo_name": "raspberrycoulis/beebotte-envirophat",
"id": "c2b60c2e44e462c44df4ee93ebec69ab90f65194",
"size": "1407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sense.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1407"
}
],
"symlink_target": ""
}
|
from boomslang import Line, Plot
from ImageComparisonTestCase import ImageComparisonTestCase
import unittest
class TickStylesTest(ImageComparisonTestCase, unittest.TestCase):
def __init__(self, testCaseName):
super(TickStylesTest, self).__init__(testCaseName)
self.imageName = "tickstyles.png"
def constructImage(self):
plot = Plot()
line = Line()
line.yValues = [25, 40, 30, 23, 10, 50]
line.xValues = range(len(line.yValues))
line.xTickLabels = ["X 1", "X 2", "X 3", "X 4", "X 5"]
line.yTickLabels = ["Y Ten", "Y Twenty", "Y Thirty", "Y Forty",
"Y Fifty", "Y Sixty"]
line.yTickLabelPoints = [10, 20, 30, 40, 50, 60]
# You can set tick label properties with a dictionary ...
line.xTickLabelProperties = {
"color" : "blue",
"weight" : "bold",
"rotation" : 45
}
line.yTickLabelProperties = {
"style" : "italic",
"alpha" : 0.5,
"color" : "red"
}
# (clearing for demonstrative purposes)
line.xTickLabelProperties.clear()
line.yTickLabelProperties.clear()
# You can also set by direct elementwise access
line.xTickLabelProperties["color"] = "blue"
line.xTickLabelProperties["weight"] = "bold"
line.xTickLabelProperties["rotation"] = "45"
line.yTickLabelProperties["style"] = "italic"
line.yTickLabelProperties["alpha"] = 0.5
line.yTickLabelProperties["color"] = "red"
plot.add(line)
plot.title = "Craaazy Title"
plot.setTitleProperties(
style="italic", weight="bold", rotation="5",
color="orange")
plot.xLabel = "X Label"
plot.yLabel = "Y Label"
plot.yLimits = (0, 60)
plot.tight = True
plot.save(self.imageName)
ImageComparisonTestCase.register(TickStylesTest)
if __name__ == "__main__":
test = TickStylesTest("testImageComparison")
test.constructImage()
|
{
"content_hash": "f4c8d200f1418a3dd5ffc3cde51b5617",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 71,
"avg_line_length": 31.37878787878788,
"alnum_prop": 0.5789473684210527,
"repo_name": "alexras/boomslang",
"id": "e00a2d994a8b66bf56817bcd49b7a328f6a67e4d",
"size": "2094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_tickstyles.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "105"
},
{
"name": "Makefile",
"bytes": "6855"
},
{
"name": "Python",
"bytes": "178970"
}
],
"symlink_target": ""
}
|
"""
:Author Patrik Valkovic
:Created 04.07.2017 20:02
:Licence MIT
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.old_api import Terminal
class TempClass:
pass
class TerminalCreationTest(TestCase):
def test_createWithSymbol(self):
ter = Terminal('a')
self.assertEqual('a', ter.symbol(), 'Terminal should return same symbol')
def test_createWithNumber(self):
ter = Terminal(5)
self.assertEqual(5, ter.symbol(), 'Terminal should return same number')
def test_createWithClass(self):
ter = Terminal(TempClass)
self.assertEqual(TempClass, ter.symbol(), 'Terminal should return same class')
def test_createWithInstance(self):
inst = TempClass()
ter = Terminal(inst)
self.assertEqual(inst, ter.symbol(), 'Terminal should return same instance')
if __name__ == '__main__':
main()
|
{
"content_hash": "4efe295422a0e6be5d70e2dc5471535a",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 86,
"avg_line_length": 24.944444444444443,
"alnum_prop": 0.6692650334075724,
"repo_name": "PatrikValkovic/grammpy",
"id": "c77ade0fa0dba2faa2072cdd189390451134d026",
"size": "920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/grammpy_test/oldapi_tests/terminal_tests/TerminalCreationTest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "604926"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import apache.thrift.metadata.thrift_types as _fbthrift_metadata
import facebook.thrift.annotation.thrift.thrift_metadata
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_Fiery(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.Fiery"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="message", is_optional=False, structured_annotations=[
]),
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
])
new_struct = metadata_struct(exceptions=struct_dict)
# message
return new_struct
def gen_metadata_exception_Fiery() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_Fiery(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_Serious(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.Serious"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="sonnet", is_optional=False, structured_annotations=[
]),
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
])
new_struct = metadata_struct(exceptions=struct_dict)
# sonnet
return new_struct
def gen_metadata_exception_Serious() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_Serious(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_ComplexFieldNames(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.ComplexFieldNames"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="error_message", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=2, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="internal_error_message", is_optional=False, structured_annotations=[
]),
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
])
new_struct = metadata_struct(exceptions=struct_dict)
# error_message
# internal_error_message
return new_struct
def gen_metadata_exception_ComplexFieldNames() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_ComplexFieldNames(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_CustomFieldNames(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.CustomFieldNames"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="error_message", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=2, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="internal_error_message", is_optional=False, structured_annotations=[
]),
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
])
new_struct = metadata_struct(exceptions=struct_dict)
# error_message
# internal_error_message
return new_struct
def gen_metadata_exception_CustomFieldNames() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_CustomFieldNames(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_ExceptionWithPrimitiveField(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.ExceptionWithPrimitiveField"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="message", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=2, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_I32_TYPE), name="error_code", is_optional=False, structured_annotations=[
]),
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
])
new_struct = metadata_struct(exceptions=struct_dict)
# message
# error_code
return new_struct
def gen_metadata_exception_ExceptionWithPrimitiveField() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_ExceptionWithPrimitiveField(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_ExceptionWithStructuredAnnotation(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.ExceptionWithStructuredAnnotation"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), name="message_field", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=2, type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_I32_TYPE), name="error_code", is_optional=False, structured_annotations=[
]),
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
_fbthrift_metadata.ThriftConstStruct(type=_fbthrift_metadata.ThriftStructType(name="thrift.ExceptionMessage"), fields= { "field": _fbthrift_metadata.ThriftConstValue(cv_string="message_field"), }),
])
new_struct = metadata_struct(exceptions=struct_dict)
# message_field
# error_code
return new_struct
def gen_metadata_exception_ExceptionWithStructuredAnnotation() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_ExceptionWithStructuredAnnotation(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
# TODO (ffrancet): This general pattern can be optimized by using tuples and dicts
# instead of re-generating thrift structs
def _fbthrift_gen_metadata_exception_Banal(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.Banal"
if qualified_name in metadata_struct.exceptions:
return metadata_struct
fields = [
]
struct_dict = dict(metadata_struct.exceptions)
struct_dict[qualified_name] = _fbthrift_metadata.ThriftException(name=qualified_name, fields=fields,
structured_annotations=[
])
new_struct = metadata_struct(exceptions=struct_dict)
return new_struct
def gen_metadata_exception_Banal() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_exception_Banal(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
def gen_metadata_service_Raiser() -> _fbthrift_metadata.ThriftMetadata:
return _fbthrift_gen_metadata_service_Raiser(_fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={}))
def _fbthrift_gen_metadata_service_Raiser(metadata_struct: _fbthrift_metadata.ThriftMetadata) -> _fbthrift_metadata.ThriftMetadata:
qualified_name = "module.Raiser"
if qualified_name in metadata_struct.services:
return metadata_struct
functions = [
_fbthrift_metadata.ThriftFunction(name="doBland", return_type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_VOID_TYPE), arguments=[
], exceptions = [
], is_oneway=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftFunction(name="doRaise", return_type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_VOID_TYPE), arguments=[
], exceptions = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_struct=_fbthrift_metadata.ThriftStructType(name="module.Banal")), name="b", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=2, type=_fbthrift_metadata.ThriftType(t_struct=_fbthrift_metadata.ThriftStructType(name="module.Fiery")), name="f", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=3, type=_fbthrift_metadata.ThriftType(t_struct=_fbthrift_metadata.ThriftStructType(name="module.Serious")), name="s", is_optional=False, structured_annotations=[
]),
], is_oneway=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftFunction(name="get200", return_type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), arguments=[
], exceptions = [
], is_oneway=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftFunction(name="get500", return_type=_fbthrift_metadata.ThriftType(t_primitive=_fbthrift_metadata.ThriftPrimitiveType.THRIFT_STRING_TYPE), arguments=[
], exceptions = [
_fbthrift_metadata.ThriftField(id=1, type=_fbthrift_metadata.ThriftType(t_struct=_fbthrift_metadata.ThriftStructType(name="module.Fiery")), name="f", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=2, type=_fbthrift_metadata.ThriftType(t_struct=_fbthrift_metadata.ThriftStructType(name="module.Banal")), name="b", is_optional=False, structured_annotations=[
]),
_fbthrift_metadata.ThriftField(id=3, type=_fbthrift_metadata.ThriftType(t_struct=_fbthrift_metadata.ThriftStructType(name="module.Serious")), name="s", is_optional=False, structured_annotations=[
]),
], is_oneway=False, structured_annotations=[
]),
]
service_dict = dict(metadata_struct.services)
service_dict[qualified_name] = _fbthrift_metadata.ThriftService(name=qualified_name, functions=functions, structured_annotations=[
])
new_struct = metadata_struct(services=service_dict)
# return value
new_struct = _fbthrift_gen_metadata_exception_Banal(new_struct) # b
new_struct = _fbthrift_gen_metadata_exception_Fiery(new_struct) # f
new_struct = _fbthrift_gen_metadata_exception_Serious(new_struct) # s
# return value
# return value
new_struct = _fbthrift_gen_metadata_exception_Fiery(new_struct) # f
new_struct = _fbthrift_gen_metadata_exception_Banal(new_struct) # b
new_struct = _fbthrift_gen_metadata_exception_Serious(new_struct) # s
# return value
return new_struct
def _fbthrift_metadata_service_response_Raiser() -> _fbthrift_metadata.ThriftServiceMetadataResponse:
metadata = gen_metadata_service_Raiser()
context = _fbthrift_metadata.ThriftServiceContext(service_info=metadata.services["module.Raiser"], module=_fbthrift_metadata.ThriftModuleContext(name="module"))
services = [_fbthrift_metadata.ThriftServiceContextRef(module=_fbthrift_metadata.ThriftModuleContext(name=name.split('.')[0]), service_name=name) for name in metadata.services]
return _fbthrift_metadata.ThriftServiceMetadataResponse(metadata=metadata,context=context,services=services)
def getThriftModuleMetadata() -> _fbthrift_metadata.ThriftMetadata:
meta = _fbthrift_metadata.ThriftMetadata(structs={}, enums={}, exceptions={}, services={})
meta = _fbthrift_gen_metadata_exception_Fiery(meta)
meta = _fbthrift_gen_metadata_exception_Serious(meta)
meta = _fbthrift_gen_metadata_exception_ComplexFieldNames(meta)
meta = _fbthrift_gen_metadata_exception_CustomFieldNames(meta)
meta = _fbthrift_gen_metadata_exception_ExceptionWithPrimitiveField(meta)
meta = _fbthrift_gen_metadata_exception_ExceptionWithStructuredAnnotation(meta)
meta = _fbthrift_gen_metadata_exception_Banal(meta)
meta = _fbthrift_gen_metadata_service_Raiser(meta)
return meta
|
{
"content_hash": "c1554f62a70e7d7408c83b92e49077bc",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 226,
"avg_line_length": 53.3235294117647,
"alnum_prop": 0.7345559845559846,
"repo_name": "facebook/fbthrift",
"id": "886e4a973ce585ec52c534746419a9d8239ac5c4",
"size": "14565",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "thrift/compiler/test/fixtures/exceptions/gen-python/module/thrift_metadata.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "15608"
},
{
"name": "C++",
"bytes": "10658844"
},
{
"name": "CMake",
"bytes": "147347"
},
{
"name": "CSS",
"bytes": "4028"
},
{
"name": "Cython",
"bytes": "339005"
},
{
"name": "Emacs Lisp",
"bytes": "11229"
},
{
"name": "Go",
"bytes": "447092"
},
{
"name": "Hack",
"bytes": "313122"
},
{
"name": "Java",
"bytes": "1990062"
},
{
"name": "JavaScript",
"bytes": "38872"
},
{
"name": "Mustache",
"bytes": "1269560"
},
{
"name": "Python",
"bytes": "1623026"
},
{
"name": "Ruby",
"bytes": "6111"
},
{
"name": "Rust",
"bytes": "283392"
},
{
"name": "Shell",
"bytes": "6615"
},
{
"name": "Thrift",
"bytes": "1859041"
},
{
"name": "Vim Script",
"bytes": "2887"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import logging
import unittest
from nba.scrapers.nbacom import NBAComScraper
class NBAComScraper_test(unittest.TestCase):
def setUp(self):
logging.getLogger(__name__).addHandler(logging.NullHandler())
self.nbs = NBAComScraper()
def test_boxscore(self):
box = self.nbs.boxscore_traditional('0021500001')
self.assertIsInstance(box, dict)
self.assertIsNotNone(box.get('resultSets', None))
box = self.nbs.boxscore_advanced('0021500001')
self.assertIsInstance(box, dict)
self.assertIsNotNone(box.get('resultSets', None))
box = self.nbs.boxscore_scoring('0021500001')
self.assertIsInstance(box, dict)
self.assertIsNotNone(box.get('resultSets', None))
box = self.nbs.boxscore_misc('0021500001')
self.assertIsInstance(box, dict)
self.assertIsNotNone(box.get('resultSets', None))
box = self.nbs.boxscore_usage('0021500001')
self.assertIsInstance(box, dict)
self.assertIsNotNone(box.get('resultSets', None))
def test_playerstats(self):
ps = self.nbs.playerstats('2016-17')
self.assertIsInstance(ps, dict)
rs = ps.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_player_info(self):
pinfo = self.nbs.player_info('203083', '2016-17')
self.assertIsInstance(pinfo, dict)
rs = pinfo.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_one_player_gamelogs(self):
pgl = self.nbs.one_player_gamelogs('203083', '2016-17')
self.assertIsInstance(pgl, dict)
rs = pgl.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_players (self):
players = self.nbs.players(season='2016-17', cs_only='1')
self.assertIsInstance(players, dict)
rs = players.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_season_gamelogs(self):
pgl = self.nbs.season_gamelogs(season='2016-17', player_or_team='P')
self.assertIsInstance(pgl, dict)
rs = pgl.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
tgl = self.nbs.season_gamelogs(season='2016-17', player_or_team='T')
self.assertIsInstance(tgl, dict)
rs = tgl.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_team_dashboard(self):
team_id = '1610612765'
season = '2016-17'
tdb = self.nbs.team_dashboard(team_id, season)
self.assertIsInstance(tdb, dict)
rs = tdb.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_team_opponent_dashboard(self):
season = '2016-17'
tdb = self.nbs.team_opponent_dashboard(season)
self.assertIsInstance(tdb, dict)
rs = tdb.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_one_team_gamelogs(self):
team_id = '1610612765'
season = '2016-17'
tdb = self.nbs.one_team_gamelogs(team_id, season)
self.assertIsInstance(tdb, dict)
rs = tdb.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_teamstats(self):
season = '2016-17'
ts = self.nbs.team_opponent_dashboard(season)
self.assertIsInstance(ts, dict)
rs = ts.get('resultSets', None)
self.assertIsNotNone(rs)
self.assertIsNotNone(rs[0].get('headers', None))
def test_teams(self):
t = self.nbs.teams()
self.assertIsInstance(t, basestring)
self.assertRegexpMatches(t, r'Pistons')
if __name__=='__main__':
logging.basicConfig(level=logging.ERROR)
unittest.main()
|
{
"content_hash": "8d278c04c9d390262935c36f00feef14",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 76,
"avg_line_length": 36.80530973451327,
"alnum_prop": 0.6333253185861986,
"repo_name": "sansbacon/nba",
"id": "c97e3e3ede1f116ac9d9c3a1d485b9893d4e62e3",
"size": "4159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_nbacomscraper.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "350728"
},
{
"name": "Python",
"bytes": "198341"
}
],
"symlink_target": ""
}
|
import sys
import collections
import itertools
import unicodedata
import bisect
PY_LEGACY = sys.version_info < (3, )
if PY_LEGACY:
chr = unichr # noqa
range = xrange # noqa
map = itertools.imap # noqa
class RangeGroup(tuple):
'''
Immutable iterable representing a list of unicode code point ranges.
Every range is reperesented using a tuple (start, end), with **end** itself
being outside range for compatibility with python's :func:`range` builtin.
It is assumed given data does not contain any overlapping range and it is
already sorted from small to bigger range start. If it is not, use
:func:`unicategories.merge` to fix values prior to passing to this.
'''
def __new__(cls, range_list=()):
'''
Create and return a new object. See help(type) for accurate signature
'''
return super(RangeGroup, cls).__new__(cls, map(tuple, range_list))
def __add__(self, other):
'''
Return self+value.
x.__add__(y) <==> x+y
'''
return merge(self, other)
def __mul__(self, mult):
'''
Return self*value.
x.__mul__(n) <==> x*n
'''
return self
def characters(self):
'''
Get iterator with all characters on this range group.
:yields: iterator of characters (str of size 1)
:ytype: str
'''
return map(chr, self.codes())
def codes(self):
'''
Get iterator for all unicode code points contained in this range group.
:yields: iterator of character index (int)
:ytype: int
'''
for start, end in self:
for item in range(start, end):
yield item
def has(self, character):
'''
Get if character (or character code point) is contained by any range on
this range group.
:param character: character or unicode code point to look for
:type character: str or int
:returns: True if character is contained by any range, False otherwise
:rtype: bool
'''
if not self:
return False
character = character if isinstance(character, int) else ord(character)
last = self[-1][-1]
start, end = self[bisect.bisect_right(self, (character, last)) - 1]
return start <= character < end
def __repr__(self):
'''
Return repr(self).
repr(object) -> string
Return the canonical string representation of the object.
For most object types, eval(repr(object)) == object.
'''
return '%s(%s)' % (
self.__class__.__name__,
super(RangeGroup, self).__repr__()
)
def merge(*range_lists, **kwargs):
'''
Join given range groups, collapsing their overlapping ranges. If only one
group is given, this method will still fix it (sort and collapsing).
No typecheck is performed, so a valid range group will be any iterable
(or iterator) containing an (start, end) iterable pair. Result type will
be defined by group_class parameter (defaults to RangeGroup)
:param *range_lists: several range groups to join
:type *range_list: iterable of iterables
:param group_class: result type, defaults to RangeGroup
:type group_class: type
:returns: merged range group
:rtype: taken from group_class
:
'''
group_class = kwargs.pop('group_class', RangeGroup) # FIXME: python2
range_list = [
unirange
for range_list in range_lists
for unirange in range_list
]
range_list.sort()
it = iter(range_list)
slast, elast = last = list(next(it))
result = [last]
for start, end in it:
if start > elast:
slast, elast = last = [start, end]
result.append(last)
elif end > elast:
last[1] = elast = end
return group_class(result)
def generate(categorize=unicodedata.category, group_class=RangeGroup):
'''
Generate a dict of RangeGroups for each unicode character category,
including general ones.
:param categorize: category function, defaults to unicodedata.category.
:type categorize: callable
:param group_class: class for range groups, defaults to RangeGroup
:type group_class: type
:returns: dictionary of categories and range groups
:rtype: dict of RangeGroup
'''
categories = collections.defaultdict(list)
last_category = None
last_range = None
for c in range(sys.maxunicode + 1):
category = categorize(chr(c))
if category != last_category:
last_category = category
last_range = [c, c + 1]
categories[last_category].append(last_range)
else:
last_range[1] += 1
categories = {k: group_class(v) for k, v in categories.items()}
categories.update({
k: merge(*map(categories.__getitem__, g))
for k, g in itertools.groupby(sorted(categories), key=lambda k: k[0])
})
return categories
|
{
"content_hash": "8293f4e61f73cd1949085d396a87378d",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 79,
"avg_line_length": 31.453416149068325,
"alnum_prop": 0.6070300157977883,
"repo_name": "ergoithz/unicategories",
"id": "67e5cb468ee4d4f38365b3ae13a625bdbb0b2594",
"size": "5064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unicategories/tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13860"
}
],
"symlink_target": ""
}
|
import good as G
from memsql_framework.util.attr_dict import AttrDict
class Flavor(AttrDict):
schema = G.Schema({
"flavor_id": basestring,
"memory": int,
"cpu": int,
"disk": int
})
def __init__(self, **params):
sanitized_data = Flavor.schema(params)
super(Flavor, self).__init__(sanitized_data)
def bigger_than(self, cpu, memory, disk):
return self.cpu > cpu or self.memory > memory or self.disk > disk
def __str__(self):
return "Flavor(%s, %s, %s)" % (self.cpu, self.memory, self.disk)
@property
def memory_mb(self):
return self.memory * 1024
@property
def disk_mb(self):
return self.disk * 1024
FLAVORS = [
Flavor(flavor_id="small", memory=16, cpu=4, disk=32),
Flavor(flavor_id="medium", memory=24, cpu=6, disk=48),
Flavor(flavor_id="large", memory=32, cpu=8, disk=64),
Flavor(flavor_id="xlarge", memory=60, cpu=12, disk=120)
]
|
{
"content_hash": "3954d0e0c19c6c1c4bef2eb5f61581f7",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 73,
"avg_line_length": 27.685714285714287,
"alnum_prop": 0.5975232198142415,
"repo_name": "memsql/memsql-mesos",
"id": "c9ac5b85eeea8a475cff936985caf11ed099d4e8",
"size": "969",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "memsql_framework/scheduler/flavors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "689"
},
{
"name": "HTML",
"bytes": "502"
},
{
"name": "JavaScript",
"bytes": "30373"
},
{
"name": "Makefile",
"bytes": "3551"
},
{
"name": "Python",
"bytes": "103531"
},
{
"name": "Shell",
"bytes": "2545"
}
],
"symlink_target": ""
}
|
"""Tests for the file entry implementation using pybde."""
import unittest
from dfvfs.lib import definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import context
from dfvfs.resolver import resolver
from dfvfs.vfs import bde_file_entry
from dfvfs.vfs import bde_file_system
from tests import test_lib as shared_test_lib
class BDEFileEntryTest(shared_test_lib.BaseTestCase):
"""Tests the BDE file entry."""
_BDE_PASSWORD = 'bde-TEST'
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_path = self._GetTestFilePath(['bdetogo.raw'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._bde_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_BDE, parent=test_os_path_spec)
self._file_system = bde_file_system.BDEFileSystem(
self._resolver_context, self._bde_path_spec)
self._file_system.Open()
def tearDown(self):
"""Cleans up the needed objects used throughout the test."""
self._resolver_context.Empty()
def testIntialize(self):
"""Test the __init__ function."""
file_entry = bde_file_entry.BDEFileEntry(
self._resolver_context, self._file_system, self._bde_path_spec)
self.assertIsNotNone(file_entry)
# TODO: test raises.
def testCreationTime(self):
"""Test the creation_time property."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertIsNotNone(file_entry.creation_time)
def testDataStreams(self):
"""Test the data streams property."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.number_of_data_streams, 1)
data_stream_names = []
for data_stream in file_entry.data_streams:
data_stream_names.append(data_stream.name)
self.assertEqual(data_stream_names, [''])
def testSize(self):
"""Test the size property."""
# Currently the BDE volume needs to be unlocked before its size can
# be determined.
resolver.Resolver.key_chain.SetCredential(
self._bde_path_spec, 'password', self._BDE_PASSWORD)
unlocked_file_system = bde_file_system.BDEFileSystem(
self._resolver_context, self._bde_path_spec)
unlocked_file_system.Open()
file_entry = unlocked_file_system.GetFileEntryByPathSpec(
self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.size, 67108864)
resolver.Resolver.key_chain.SetCredential(
self._bde_path_spec, 'password', None)
def testSubFileEntries(self):
"""Test the sub file entries property."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.number_of_sub_file_entries, 0)
expected_sub_file_entry_names = []
sub_file_entry_names = []
for sub_file_entry in file_entry.sub_file_entries:
sub_file_entry_names.append(sub_file_entry.name)
self.assertEqual(
len(sub_file_entry_names), len(expected_sub_file_entry_names))
self.assertEqual(
sorted(sub_file_entry_names), expected_sub_file_entry_names)
def testGetDataStream(self):
"""Tests the GetDataStream function."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
data_stream_name = ''
data_stream = file_entry.GetDataStream(data_stream_name)
self.assertIsNotNone(data_stream)
self.assertEqual(data_stream.name, data_stream_name)
data_stream = file_entry.GetDataStream('bogus')
self.assertIsNone(data_stream)
def testGetFileEntryByPathSpec(self):
"""Tests the GetFileEntryByPathSpec function."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
def testGetParentFileEntry(self):
"""Tests the GetParentFileEntry function."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
parent_file_entry = file_entry.GetParentFileEntry()
self.assertIsNone(parent_file_entry)
def testIsAllocated(self):
"""Test the IsAllocated function."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertTrue(file_entry.IsAllocated())
def testIsDevice(self):
"""Test the IsDevice functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertFalse(file_entry.IsDevice())
def testIsDirectory(self):
"""Test the IsDirectory functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertFalse(file_entry.IsDirectory())
def testIsFile(self):
"""Test the IsFile functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertTrue(file_entry.IsFile())
def testIsLink(self):
"""Test the IsLink functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertFalse(file_entry.IsLink())
def testIsLocked(self):
"""Tests the IsLocked function."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertTrue(file_entry.IsLocked())
def testIsPipe(self):
"""Test the IsPipe functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertFalse(file_entry.IsPipe())
def testIsRoot(self):
"""Test the IsRoot functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertTrue(file_entry.IsRoot())
def testIsSocket(self):
"""Test the IsSocket functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertFalse(file_entry.IsSocket())
def testIsVirtual(self):
"""Test the IsVirtual functions."""
file_entry = self._file_system.GetFileEntryByPathSpec(self._bde_path_spec)
self.assertIsNotNone(file_entry)
self.assertTrue(file_entry.IsVirtual())
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "8b7055ad8108e150c28eb626cf6c3442",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 78,
"avg_line_length": 32.72682926829268,
"alnum_prop": 0.7138172603964823,
"repo_name": "joachimmetz/dfvfs",
"id": "4c438df01cb0c0f19521a022a697669460672a72",
"size": "6755",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/vfs/bde_file_entry.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14212"
},
{
"name": "Makefile",
"bytes": "122"
},
{
"name": "PowerShell",
"bytes": "1021"
},
{
"name": "Python",
"bytes": "2176548"
},
{
"name": "Shell",
"bytes": "19355"
}
],
"symlink_target": ""
}
|
import translator
from .. import utils
from collections import OrderedDict
import numpy as np
class PolishCrossTranslator(translator.Translator):
"""Polish cross, Ch optional as argument, input: q1 -> A, c3 -> Z"""
key = dict(zip(list('qweasdzxc'), range(9)))
def __init__(self, using_ch=True):
self.setUsingCh(using_ch)
def setUsingCh(self, using_ch):
if (using_ch):
self.alphabet = utils.alphabetWithCh
else:
self.alphabet = utils.alphabet
def parseInput(self, cipher):
cipher = cipher.lower()
cipher.replace(',', ' ')
return cipher.split(" ")
def translate(self, cipher):
ret = ""
for i in self.parseInput(cipher):
if (len(i) == 2 and self.key.has_key(i[0])):
ret += self.alphabet[self.key[i[0]] * 3 + int(i[1]) - 1] # moc velky
else:
if (i == ""):
ret += " "
else:
ret += i
return ret
def encode(self, cipher):
ret = ""
key = list('qweasdzxc')
for i in cipher:
if (not str(i).upper() in self.alphabet):
ret += i
else:
index = self.alphabet.index(str(i).upper())
ret += key[int(index / 3)] + str((index % 3) + 1) + " "
return ret[:-1]
def graphicEncode(self, cipher, three_by_three_grid=False):
"""Splits input to words, draws letters in words over each other.
If three_by_three_grid argument is False, 9x3 grid with individual letters in the polish cross will be used"""
final_array = []
seq = [['q', 'w', 'e'], ['a', 's', 'd'], ['z', 'x', 'c']]
for line in utils.line_split(cipher):
l = []
for word in self.encode(line).split(" "):
if (three_by_three_grid):
c = np.zeros([3, 3])
else:
c = np.zeros([3, 9])
for polish_char in word.split(" "):
for a, x in enumerate(seq):
for b, y in enumerate(x):
if (y == polish_char[0]):
if (three_by_three_grid):
c[a][b] = 1
else:
c[a][b * 3 + int(polish_char[1]) - 1] = 1
l.append(c)
final_array.append(l)
return utils.array_concat(final_array)
|
{
"content_hash": "6d56f2a0a680084b70ad6b5438bbe21c",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 113,
"avg_line_length": 27.164383561643834,
"alnum_prop": 0.5940494200706001,
"repo_name": "PrehistoricTeam/pycrypt",
"id": "ce4658c22c5ec8be77f4b7996d3fc6038526d603",
"size": "1983",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycrypt/translators/polishcrosstranslator.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "1320"
},
{
"name": "Python",
"bytes": "242676"
}
],
"symlink_target": ""
}
|
"""Module for application exceptions."""
class TermiusException(Exception):
"""Base exception class."""
class DoesNotExistException(TermiusException):
"""Raise it when model can not be found in storage."""
class TooManyEntriesException(TermiusException):
"""Raise it when there are more models then you think."""
class ArgumentRequiredException(ValueError):
"""Raise it when one of required CLI argument is missed."""
class InvalidArgumentException(ValueError):
"""Raise it when CLI argument have invalid value."""
class SkipField(TermiusException):
"""Raise it when needs to skip field."""
class OptionNotSetException(TermiusException):
"""Raise it when no option in section."""
class AuthyTokenIssue(TermiusException):
"""Raise it when API error caused by `authy_token`."""
class OutdatedVersion(TermiusException):
"""Raise it when API error caused by 490 HTTP status code."""
|
{
"content_hash": "01fd8665e554ba92d4e5f55bba730de6",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 65,
"avg_line_length": 25.216216216216218,
"alnum_prop": 0.7320471596998929,
"repo_name": "Crystalnix/serverauditor-sshconfig",
"id": "388f837a9de934e89111cf02b5330e2e13fa829c",
"size": "957",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "termius/core/exceptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "197778"
},
{
"name": "Shell",
"bytes": "80519"
}
],
"symlink_target": ""
}
|
from .sysvinit import InitdStatus
class RcdStatus(InitdStatus):
"""
Same as ``initd_status`` but for BSD (/etc/rc.d) systems. Unlike Linux/init.d,
BSD init scripts are well behaved and as such their output can be trusted.
"""
command = """
for SERVICE in `find /etc/rc.d /usr/local/etc/rc.d -type f`; do
$SERVICE status 2> /dev/null || $SERVICE check 2> /dev/null
echo "`basename $SERVICE`=$?"
done
"""
default = dict
|
{
"content_hash": "23af5056546cdd5226316b7cb46cf109",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 82,
"avg_line_length": 28.823529411764707,
"alnum_prop": 0.6,
"repo_name": "Fizzadar/pyinfra",
"id": "0ebeb5aabfa7e3140dff5a333e75f7bbd2d5e728",
"size": "490",
"binary": false,
"copies": "1",
"ref": "refs/heads/2.x",
"path": "pyinfra/facts/bsdinit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "57"
},
{
"name": "Python",
"bytes": "861601"
},
{
"name": "Shell",
"bytes": "3448"
}
],
"symlink_target": ""
}
|
from pyjamas.builder.XMLFile import XMLFile
from pyjamas import Factory
from pyjamas import ui
from pyjamas.ui.MultiListener import MultiListener
from pyjamas.HTTPRequest import HTTPRequest
from pyjamas.ui.Tooltip import TooltipListener
from pyjamas.ui.CSS import StyleSheetCssFile
# All event listeners with a tuple that comprises of the listener add
# function and the additional (to 'self') parameters that are expected
# for the listener. E.g., def onClick(self, sender): ...
# See also pyjamas.ui.MultiListener.MultiListener.combinations
eventListeners = dict(
onClick = ("addClickListener", "sender"),
onDoubleClick = ("addDoubleClickListener", "sender"),
onChange = ("addChangeListener", "sender"),
onFocus = ("addFocusListener", "sender"),
onLostFocus = ("addFocusListener", "sender"),
onLoad = ("addLoadListener", "sender"),
onError = ("addLoadListener", "sender"),
onKeyDown = ("addKeyboardListener", "sender", "keycode", "modifiers"),
onKeyUp = ("addKeyboardListener", "sender", "keycode", "modifiers"),
onKeyPress = ("addKeyboardListener", "sender", "keycode", "modifiers"),
onMouseDown = ("addMouseListener", "sender", "x", "y"),
onMouseUp = ("addMouseListener", "sender", "x", "y"),
onMouseMove = ("addMouseListener", "sender", "x", "y"),
onMouseEnter = ("addMouseListener", "sender"),
onMouseLeave = ("addMouseListener", "sender"),
onScroll = ("addScrollListener", "sender", "row", "col"),
onCellClicked = ("addTableListener", "sender", "row", "col"),
onTabSelected = ("addTabListener", "sender", "tabIndex"),
onBeforeTabSelected = ("addTabListener", "sender", "tabIndex"),
onTreeItemSelected = ("addTreeListener", "sender"),
)
class BuilderState(object):
def __init__(self, builder, eventTarget):
self.builder = builder
self.eventTarget = eventTarget
class Builder(object):
def __init__(self, text=None, addcss=False):
self.builder_text = None
self.css = None
self.setText(text)
if not addcss:
return
if not self.properties:
return
cssfile = self.properties.get('cssfile', None)
if not cssfile:
return
self.css = StyleSheetCssFile(cssfile)
print "setting CSS stylesheet", cssfile
def setText(self, text):
if text is None:
self.widgets_by_name = {}
self.widget_instances = {}
self.widget_order = {}
self.widgets_by_class = {}
self.properties = None
self.components = None
self.builder_text = None
if self.css:
self.css.remove()
self.css = None
return
text = str(text) # XMLFile only accepts str not unicode!
if text == self.builder_text: # don't redo the xml file if same
return
self.builder_text = text
self.widgets_by_name = {}
self.widget_instances = {}
self.widget_order = {}
self.widgets_by_class = {}
self.properties, self.components = XMLFile(text).parse()
def createInstance(self, instancename,
eventTarget=None, targetItem=None, index=None):
widget_instances = {}
widgets_by_name = {}
widgets_by_class = {}
widget_order = []
def addItem(comp, props, childs, parentInstance, eventTarget):
klsname = comp['name']
modname = comp.get('module')
if modname is None:
modname = '.'.join(["pyjamas.ui", klsname])
kls = Factory.lookupClass('.'.join([modname, klsname]))
args = {}
wprops = {}
if props.has_key("common"):
wprops.update(props['common'])
if props.has_key("layout"):
wprops.update(props['layout'])
if props.has_key("widget"):
wprops.update(props['widget'])
for n in kls._getProps():
name = n[ui.PROP_NAME]
if not wprops.has_key(name):
continue
fname = n[ui.PROP_FNAM]
if wprops[name] == '':
continue
args[fname] = wprops[name]
# create item with properties including weird ones
# which can't fit into the name value structure
item = kls(**args)
if hasattr(item, "_setWeirdProps"):
item._setWeirdProps(wprops, BuilderState(self, eventTarget))
tooltip = wprops.get('tooltip')
if tooltip is not None:
item.addMouseListener(TooltipListener(tooltip))
identifier = comp['id']
widget_order.append(identifier)
widgets_by_name[identifier] = klsname
widget_instances[identifier] = item
l = widgets_by_class.get(klsname, [])
l.append(identifier)
widgets_by_class[klsname] = l
#if parentInstance is not None:
# context = parentInstance.getIndexedChild(comp['index'])
# context.add(item.componentInstance)
if modname == 'pyjamas.ui.TabPanel': # yuk! HACK!
tabs = props.get('tabs', None)
print "tab props", tabs
print "element full props", props.get('elements', None)
for (i, child) in enumerate(childs):
if not child[0].has_key("type") or child[0]["type"] is None:
continue
childitem = addItem(child[0], child[1], child[2], item,
eventTarget)
if childitem is None:
continue
index = child[0]["index"]
if modname == 'pyjamas.ui.TabPanel': # yuk! HACK!
index = (index, tabs[index])
print "childitem", index, childitem
item.addIndexedItem(index, childitem)
if not "elements" in props:
props["elements"] = {}
if not index in props["elements"]:
props["elements"][index] = {}
# add child (by name) to item
cname = child[0]["id"]
setattr(item, cname, childitem)
elemprops = props['elements'][index]
print "elemprops", childitem, item, index, elemprops
item.setElementProperties(childitem, elemprops)
# make the event target the recipient of all events
if eventTarget is not None and props.has_key("events"):
added_already = []
#print props["events"]
for listener_name, listener_fn in props["events"].items():
if listener_name in added_already or not listener_fn:
continue
args = {}
args[listener_name] = listener_fn
fname = eventListeners[listener_name][0]
listener = MultiListener(eventTarget, **args)
setattr(item, "_%sListener" % fname, listener)
#print "add listener", listener_name, fname
listen_add_fn = getattr(item, fname)
listen_add_fn(listener)
return item
for frame, props, childs in self.components:
if frame["id"] != instancename:
continue
if index is not None:
frame["index"] = index
item = addItem(frame, props, childs, targetItem, eventTarget)
#left = frame.get("left")
#top = frame.get("top")
#if left is not None and top is not None:
# item.applicationVO.frame.setPopupPosition(left, top)
#if frame.get("visible", True):
# item.show()
#else:
# item.hide()
self.widget_instances[instancename] = widget_instances
self.widgets_by_name[instancename] = widgets_by_name
self.widgets_by_class[instancename] = widgets_by_class
self.widget_order[instancename] = widget_order
return item
return None
class HTTPUILoader:
def __init__(self, app):
self.app = app
def load(self, xml_file):
HTTPRequest().asyncGet(xml_file, self)
def onCompletion(self, text):
self.app.onUILoaded(text)
def onError(self, text, code):
self.app.onUILoadError(text, code)
def onTimeout(self, text):
self.app.onUILoadingTimeout(text)
|
{
"content_hash": "5367c1e32c6a1e266c6f4979b6d8a532",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 76,
"avg_line_length": 38.10132158590309,
"alnum_prop": 0.5545149728292288,
"repo_name": "minghuascode/pyj",
"id": "19d796d8b39ba9b006111742f6c4a5480ce941e1",
"size": "8772",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "library/pyjamas/builder/Builder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "107608"
},
{
"name": "JavaScript",
"bytes": "116371"
},
{
"name": "PHP",
"bytes": "5473"
},
{
"name": "Python",
"bytes": "7572605"
},
{
"name": "Shell",
"bytes": "24231"
}
],
"symlink_target": ""
}
|
import operator
import argparse
import json
import sys
def find_name(p):
ret = p.get('n')
if ret is None:
ret = p.get('nm')
return ret
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument(
'-o', '--outfile', metavar='FILE',
type=argparse.FileType('wb'), default=sys.stdout)
parser.add_argument('infile', type=argparse.FileType('rb'))
args = parser.parse_args(args)
placerun = json.load(args.infile)
placerun['placements'].sort(key=find_name)
for placement in placerun['placements']:
placement['p'].sort()
json.dump(placerun, args.outfile, indent=2)
if __name__ == '__main__':
main(sys.argv[1:])
|
{
"content_hash": "c45f7808e4edf0473764eae3aa5f1a0a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 63,
"avg_line_length": 25.814814814814813,
"alnum_prop": 0.6284074605451937,
"repo_name": "geoffrosen/vaginal-microbiome",
"id": "5100cf75fa1f8299d6351c2c7726ef2b304fb16d",
"size": "719",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bin/pplacer/scripts/sort_placefile.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "465317"
},
{
"name": "Shell",
"bytes": "9847"
}
],
"symlink_target": ""
}
|
'''
All the consts and hardcode are here
'''
RESPONSES = {
'empty_add_command': 'There should be at least one Username to add. For example "/add @username" or "/add username".',
'empty_remove_command': 'There is nobody to remove, your following list is empty',
'remove_keyboard_message': 'Choose someone to remove',
'empty_send_command': 'There should be a message after "/send". For example "/send I have a proof that there are reptilians in the government".',
'empty_contact': 'The user is not in the Telegram yet.',
'before_sticker_send': 'Someone has just gave me the sticker',
'photo_caption': 'Somebody has just shown me that',
'message_boilerplate': 'Somebody told me, that "{}"',
'important_message': 'Important message for all the users - "{}"',
'new_follower': "Hey bro, I've heard that one more someone started following you"
}
COMMANDS = {
'start',
'remove',
'send',
'update',
'send_all'
}
|
{
"content_hash": "dc048e1c7617554f0b6de648616f6cef",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 150,
"avg_line_length": 40.208333333333336,
"alnum_prop": 0.6652849740932643,
"repo_name": "vz10/secretBot",
"id": "22ddb44a6f812f3a68b4bfd79ec520d4b6487f49",
"size": "965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "consts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15142"
}
],
"symlink_target": ""
}
|
from pyranges.statistics import *
|
{
"content_hash": "bcd3b708c7114a76dee3cc0d5e7bf72f",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 33,
"avg_line_length": 34,
"alnum_prop": 0.8235294117647058,
"repo_name": "biocore-ntnu/pyranges",
"id": "3676e13e26bbbd178db563b73361cd2ed8e564c7",
"size": "34",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyranges/stats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "556921"
},
{
"name": "R",
"bytes": "360"
},
{
"name": "Shell",
"bytes": "345"
}
],
"symlink_target": ""
}
|
'''
New Integration Test for License.
@Antony Weijiang
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import os
import time
import datetime
import uuid
import json
import urllib2
test_stub = test_lib.lib_get_test_stub()
#ding_url = "https://oapi.dingtalk.com/robot/send?access_token=d4a90949d4e4a0b1dc0dbb57989a58480795d67b82fb86ce848b801602cabe76"
#def sendMsg_to_Ding():
# header = {
# "Content-Type": "application/json",
# "Charset": "UTF-8"
# }
# data = {
# "at": {
# "isAtAll": True
# },
# "msgtype": "text",
# "text": {
# "content": "global config has changed"
# }
# }
# sendData = json.dumps(data)
# sendData = sendData.encode("utf-8")
# request = urllib2.Request(url=ding_url, data=sendData, headers=header)
# opener = urllib2.urlopen(request)
# test_util.test_logger(opener.read())
def environment(management_ip):
test_lib.lib_execute_ssh_cmd(management_ip, 'root', 'password', 'zstack-cli LogInByAccount accountName=admin password=password', 180)
output_result = test_lib.lib_execute_ssh_cmd(management_ip, 'root', 'password', 'zstack-cli GetLicenseInfo | grep -i "licensetype" | awk -F ":" \'{print $2}\' | cut -d"\\"" -f2', 180)
if output_result.strip() == "Community":
default_name="/home/default_Community"
if os.path.isfile(default_name):
return default_name
else:
test_util.test_fail("can not find default global config: default_Community in /home directory")
elif output_result.strip() == "Paid":
default_name = "/home/default_Paid"
if os.path.isfile(default_name):
return default_name
else:
test_util.test_fail("can not find default global config: default_Paid in /home directory")
else:
test_util.test_skip("skip global config case.because of Testing only covers community and enterprise")
def check(file1_path,file2_path):
output_result=test_stub.execute_shell_in_process_stdout('diff %s %s' %(file1_path,file2_path),'/tmp/%s' % uuid.uuid1().get_hex())
if output_result[0] != 0:
if output_result[1].strip():
#sendMsg_to_Ding()
test_util.test_fail("global config has changes ,more detalis as follow\n %s" %(output_result[1]))
def verify_config(management_ip):
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
test_util.test_logger("%s" %(tmp_file))
test_lib.lib_execute_ssh_cmd(management_ip, 'root', 'password', 'zstack-cli LogInByAccount accountName=admin password=password', 180)
test_lib.lib_execute_ssh_cmd(management_ip,'root','password','zstack-cli QueryGlobalConfig sortBy=name > %s' %(tmp_file), 180)
default_path=environment(management_ip)
test_util.test_logger("default global config path:%s" %(default_path))
test_util.test_logger("check two file diff:%s, %s" %(default_path,tmp_file))
check(default_path,tmp_file)
def test():
test_util.test_logger('start query global config')
mn_ip = res_ops.query_resource(res_ops.MANAGEMENT_NODE)[0].hostName
test_util.test_logger("%s" %(mn_ip))
test_util.test_logger('Test Community Environment')
test_stub.reload_default_license()
verify_config(mn_ip)
test_util.test_logger('Check Global Config with Commuity Environment pass')
test_util.test_logger('Test Paid Environment')
test_util.test_logger('Load Prepaid license with 1 day and 1 CPU')
file_path = test_stub.gen_license('woodpecker', 'woodpecker@zstack.io', '1', 'Prepaid', '1', '')
test_stub.load_license(file_path)
verify_config(mn_ip)
test_util.test_logger('Check Global Config with Paid Environment pass')
test_util.test_pass('Check Global Config Pass')
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict)
|
{
"content_hash": "46339dbc4b1a89b5a794f4d8e30e31f6",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 187,
"avg_line_length": 40.71134020618557,
"alnum_prop": 0.6664978475563433,
"repo_name": "zstackio/zstack-woodpecker",
"id": "946d0945c21c558479c1848e4228488df33af936",
"size": "3949",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integrationtest/vm/license/test_global_config.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import frappe
import frappe.utils
from frappe.utils.oauth import get_oauth2_authorize_url, get_oauth_keys, login_via_oauth2, login_via_oauth2_id_token, login_oauth_user as _login_oauth_user, redirect_post_login
import json
from frappe import _
from frappe.auth import LoginManager
from frappe.integrations.doctype.ldap_settings.ldap_settings import LDAPSettings
from frappe.utils.password import get_decrypted_password
from frappe.utils.html_utils import get_icon_html
from frappe.integrations.oauth2_logins import decoder_compat
from frappe.website.utils import get_home_page
no_cache = True
def get_context(context):
redirect_to = frappe.local.request.args.get("redirect-to")
if frappe.session.user != "Guest":
if not redirect_to:
if frappe.session.data.user_type=="Website User":
redirect_to = get_home_page()
else:
redirect_to = "/app"
if redirect_to != 'login':
frappe.local.flags.redirect_location = redirect_to
raise frappe.Redirect
# get settings from site config
context.no_header = True
context.for_test = 'login.html'
context["title"] = "Login"
context["provider_logins"] = []
context["disable_signup"] = frappe.utils.cint(frappe.db.get_value("Website Settings", "Website Settings", "disable_signup"))
context["logo"] = frappe.get_hooks("app_logo_url")[-1]
context["app_name"] = frappe.get_system_settings("app_name") or _("Frappe")
providers = [i.name for i in frappe.get_all("Social Login Key", filters={"enable_social_login":1}, order_by="name")]
for provider in providers:
client_id, base_url = frappe.get_value("Social Login Key", provider, ["client_id", "base_url"])
client_secret = get_decrypted_password("Social Login Key", provider, "client_secret")
provider_name = frappe.get_value("Social Login Key", provider, "provider_name")
icon = None
icon_url = frappe.get_value("Social Login Key", provider, "icon")
if icon_url:
if provider_name != "Custom":
icon = "<img src='{0}' alt={1}>".format(icon_url, provider_name)
else:
icon = get_icon_html(icon_url, small=True)
if (get_oauth_keys(provider) and client_secret and client_id and base_url):
context.provider_logins.append({
"name": provider,
"provider_name": provider_name,
"auth_url": get_oauth2_authorize_url(provider, redirect_to),
"icon": icon
})
context["social_login"] = True
ldap_settings = LDAPSettings.get_ldap_client_settings()
context["ldap_settings"] = ldap_settings
login_label = [_("Email")]
if frappe.utils.cint(frappe.get_system_settings("allow_login_using_mobile_number")):
login_label.append(_("Mobile"))
if frappe.utils.cint(frappe.get_system_settings("allow_login_using_user_name")):
login_label.append(_("Username"))
context['login_label'] = ' {0} '.format(_('or')).join(login_label)
return context
@frappe.whitelist(allow_guest=True)
def login_via_google(code, state):
login_via_oauth2("google", code, state, decoder=decoder_compat)
@frappe.whitelist(allow_guest=True)
def login_via_github(code, state):
login_via_oauth2("github", code, state)
@frappe.whitelist(allow_guest=True)
def login_via_facebook(code, state):
login_via_oauth2("facebook", code, state, decoder=decoder_compat)
@frappe.whitelist(allow_guest=True)
def login_via_frappe(code, state):
login_via_oauth2("frappe", code, state, decoder=decoder_compat)
@frappe.whitelist(allow_guest=True)
def login_via_office365(code, state):
login_via_oauth2_id_token("office_365", code, state, decoder=decoder_compat)
@frappe.whitelist(allow_guest=True)
def login_via_token(login_token):
sid = frappe.cache().get_value("login_token:{0}".format(login_token), expires=True)
if not sid:
frappe.respond_as_web_page(_("Invalid Request"), _("Invalid Login Token"), http_status_code=417)
return
frappe.local.form_dict.sid = sid
frappe.local.login_manager = LoginManager()
redirect_post_login(desk_user = frappe.db.get_value("User", frappe.session.user, "user_type")=="System User")
|
{
"content_hash": "2b0861bf36ff53386be2354410042124",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 176,
"avg_line_length": 38.01904761904762,
"alnum_prop": 0.7254509018036072,
"repo_name": "saurabh6790/frappe",
"id": "1ce25a81d93766b9fb8e7f3ac0845ba79dfd8d18",
"size": "4093",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "frappe/www/login.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63276"
},
{
"name": "HTML",
"bytes": "218921"
},
{
"name": "JavaScript",
"bytes": "2152738"
},
{
"name": "Less",
"bytes": "36947"
},
{
"name": "Makefile",
"bytes": "99"
},
{
"name": "Python",
"bytes": "3261616"
},
{
"name": "SCSS",
"bytes": "223084"
},
{
"name": "Shell",
"bytes": "3358"
},
{
"name": "Vue",
"bytes": "49860"
}
],
"symlink_target": ""
}
|
import re
from django.utils.safestring import mark_safe
import oembed
from oembed.constants import URL_RE, STANDALONE_URL_RE
from oembed.exceptions import OEmbedException
from oembed.parsers.base import BaseParser
class TextBlockParser(BaseParser):
def parse_data(self, text, maxwidth, maxheight, template_dir, context,
urlize_all_links):
"""
Parses a block of text indiscriminately
"""
# create a dictionary of user urls -> rendered responses
replacements = {}
user_urls = set(re.findall(URL_RE, text))
for user_url in user_urls:
try:
resource = oembed.site.embed(user_url, maxwidth=maxwidth, maxheight=maxheight)
except OEmbedException:
if urlize_all_links:
replacements[user_url] = '<a href="%(LINK)s">%(LINK)s</a>' % {'LINK': user_url}
else:
context['minwidth'] = min(maxwidth, resource.width)
context['minheight'] = min(maxheight, resource.height)
replacement = self.render_oembed(
resource,
user_url,
template_dir=template_dir,
context=context
)
replacements[user_url] = replacement.strip()
# go through the text recording URLs that can be replaced
# taking note of their start & end indexes
user_urls = re.finditer(URL_RE, text)
matches = []
for match in user_urls:
if match.group() in replacements:
matches.append([match.start(), match.end(), match.group()])
# replace the URLs in order, offsetting the indices each go
for indx, (start, end, user_url) in enumerate(matches):
replacement = replacements[user_url]
difference = len(replacement) - len(user_url)
# insert the replacement between two slices of text surrounding the
# original url
text = text[:start] + replacement + text[end:]
# iterate through the rest of the matches offsetting their indices
# based on the difference between replacement/original
for j in xrange(indx + 1, len(matches)):
matches[j][0] += difference
matches[j][1] += difference
return mark_safe(text)
def extract_urls(self, text):
urls = set()
url_list = []
for url in re.findall(URL_RE, text):
if url not in urls:
urls.add(url)
url_list.append(url)
return url_list
class TextParser(TextBlockParser):
def parse_data(self, text, maxwidth, maxheight, template_dir, context,
urlize_all_links):
"""
Parses a block of text rendering links that occur on their own line
normally but rendering inline links using a special template dir
"""
block_parser = TextBlockParser()
lines = text.splitlines()
parsed = []
for line in lines:
if STANDALONE_URL_RE.match(line):
user_url = line.strip()
try:
resource = oembed.site.embed(user_url, maxwidth=maxwidth, maxheight=maxheight)
context['minwidth'] = min(maxwidth, resource.width)
context['minheight'] = min(maxheight, resource.height)
except OEmbedException:
if urlize_all_links:
line = '<a href="%(LINK)s">%(LINK)s</a>' % {'LINK': user_url}
else:
context['minwidth'] = min(maxwidth, resource.width)
context['minheight'] = min(maxheight, resource.height)
line = self.render_oembed(
resource,
user_url,
template_dir=template_dir,
context=context)
else:
line = block_parser.parse(line, maxwidth, maxheight, 'inline',
context, urlize_all_links)
parsed.append(line)
return mark_safe('\n'.join(parsed))
|
{
"content_hash": "7ed64c1e623d88a222fb6728f6d6727c",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 99,
"avg_line_length": 39.36036036036036,
"alnum_prop": 0.5296406500343328,
"repo_name": "akvo/djangoembed",
"id": "9f054283d907905b5efb6d7d4dad7b4eb7c7c1bd",
"size": "4369",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "oembed/parsers/text.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13295"
},
{
"name": "JavaScript",
"bytes": "19227"
},
{
"name": "Python",
"bytes": "147338"
}
],
"symlink_target": ""
}
|
from goscale.cms_plugins import GoscaleCMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
import models
GOSCALE_FORMS_PLUGIN_TEMPLATES = getattr(settings, 'GOSCALE_FORMS_PLUGIN_TEMPLATES', (
('form.html', _('Form')),
('form_popup.html', _('Form in a lightbox')),
)) + getattr(settings, 'GOSCALE_FORMS_CUSTOM_PLUGIN_TEMPLATES', ())
class FormPlugin(GoscaleCMSPluginBase):
"""
Feed plugin for GoScale
"""
model = models.Form
name = _("Google Form")
plugin_templates = GOSCALE_FORMS_PLUGIN_TEMPLATES
render_template = GOSCALE_FORMS_PLUGIN_TEMPLATES[0][0]
fieldsets = [
[_('Form options'), {
'fields': ['url', 'form_class']
}]
]
plugin_pool.register_plugin(FormPlugin)
|
{
"content_hash": "0cdff88a308ac833ceffd520ec6e1156",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 86,
"avg_line_length": 30.814814814814813,
"alnum_prop": 0.6778846153846154,
"repo_name": "sternoru/goscalecms",
"id": "5f341796571b22a1dd2fb1493e1b5c7822d3abc1",
"size": "832",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "goscale/plugins/forms/cms_plugins.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "28440"
},
{
"name": "JavaScript",
"bytes": "78761"
},
{
"name": "Python",
"bytes": "348404"
},
{
"name": "Shell",
"bytes": "5096"
}
],
"symlink_target": ""
}
|
import multiprocessing
from .baseparallel import BaseParallelRunner
class MultiprocessingRunner(BaseParallelRunner):
"""
Task runner class based on :class:`multiprocessing.Pool`.
"""
def __init__(self, num_proc=2):
self.num_proc = num_proc
self.pool = multiprocessing.Pool(num_proc)
def submit_tasks(self):
self.cached_sorted_nodes = self.sorted_nodes()
self.cached_predecessors = dict((n, self.graph.predecessors(n))
for n in self.cached_sorted_nodes)
self.results = {}
self.submit_ready_tasks()
def submit_ready_tasks(self):
results = self.results
for node in self.cached_sorted_nodes:
predecessors = self.cached_predecessors[node]
if node not in results and \
all(p in results and results[p].ready() for p in predecessors):
results[node] = self.pool.apply_async(
self.run_func, [self.nodetaskmap[node]])
def wait_tasks(self):
while True:
for r in self.results.values():
# This would raise an error if there is one in subprocesses
r.get()
if set(self.nodetaskmap) == set(self.results):
break
self.submit_ready_tasks()
|
{
"content_hash": "a2aec1fcf3a1139ff5e7fad76a9acf84",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 78,
"avg_line_length": 34.02564102564103,
"alnum_prop": 0.5855312735493594,
"repo_name": "tkf/buildlet",
"id": "e3e9f4cb2ba274602ae095a47f116f8613aa71b7",
"size": "1327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "buildlet/runner/multiprocessingpool.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "88618"
}
],
"symlink_target": ""
}
|
import os, re
import commands
from time import time
from TimerCommand import TimerCommand
import SiteMover
from futil import *
from PilotErrors import PilotErrors
from pUtil import tolog, readpar, getSiteInformation, extractFilePaths, getExperiment
from FileStateClient import updateFileState
from SiteInformation import SiteInformation
class xrdcpSiteMover(SiteMover.SiteMover):
""" SiteMover that uses xrdcp for both get and put """
# no registration is done
copyCommand = "xrdcp"
checksum_command = "adler32"
has_mkdir = False
has_df = False
has_getsize = False
has_md5sum = True
has_chmod = False
timeout = 3600
def __init__(self, setup_path, *args, **kwrds):
self._setup = setup_path.strip()
self.__isSetuped = False
self._defaultSetup = None
self.__experiment = None
def get_timeout(self):
return self.timeout
def log(self, errorLog):
tolog(errorLog)
def getLocalROOTSetup(self, si):
""" Build command to prepend the xrdcp command [xrdcp will in general not be known in a given site] """
return si.getLocalROOTSetup()
def getSetup(self):
""" Return the setup string (pacman setup os setup script) for the copy command used by the mover """
_setup_str = ""
self._setup = self._setup.strip()
tolog("self setup: %s" % self._setup)
if self._setup and self._setup != "" and self._setup.strip() != "":
if not self._setup.endswith(";"):
self._setup += ";"
if not "alias" in self._setup:
if "atlasLocalSetup.sh" in self._setup and "--quiet" not in self._setup:
self._setup = self._setup.replace("atlasLocalSetup.sh", "atlasLocalSetup.sh --quiet")
if self._setup.startswith("export") or self._setup.startswith("source"):
_setup_str = "%s" % self._setup
else:
_setup_str = "source %s" % self._setup
else:
_setup_str = self._setup
if _setup_str != "":
tolog("Using setup: %s" % (_setup_str))
return _setup_str
def verifySetupCommand(self, _setupStr):
""" Make sure the setup command exists """
statusRet = 0
outputRet={}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
# remove any '-signs
_setupStr = _setupStr.replace("'", "")
self.log("Will verify: %s" % (_setupStr))
if _setupStr != "" and "source " in _setupStr:
# first extract the file paths from the source command(s)
setupPaths = extractFilePaths(_setupStr)
# only run test if string begins with an "/"
if setupPaths:
# verify that the file paths actually exists
for setupPath in setupPaths:
if "-" in setupPath:
continue
if os.path.exists(setupPath):
self.log("File %s has been verified" % (setupPath))
else:
outputRet["errorLog"] = errorLog = "No such file or directory: %s" % (setupPath)
self.log('!!WARNING!!2991!! %s' % (errorLog))
statusRet = PilotErrors.ERR_NOSUCHFILE
break
else:
# nothing left to test
pass
else:
self.log("Nothing to verify in setup: %s (either empty string or no source command)" % (_setupStr))
return statusRet, outputRet
def verifySetupProxy(self, _setupStr, experiment):
#check do we have a valid proxy
# get the experiment object
thisExperiment = getExperiment(experiment)
status, output = thisExperiment.verifyProxy(envsetup=_setupStr)
return status, output
def verifySetup(self, _setupStr, experiment, proxycheck=False):
statusRet, outputRet = self.verifySetupCommand(_setupStr)
if statusRet != 0:
#self.prepareReport('RFCP_FAIL', self._variables['report'])
outputRet["report"]["clientState"] = "RFCP_FAIL"
return statusRet, outputRet
command = _setupStr
if command != "" and not command.endswith(';'):
command = command + ";"
command += " which " + self.copyCommand
self.log("Execute command: %s" % command)
status, output = commands.getstatusoutput(command)
self.log("Status: %s, Output: %s" % (status, output))
if status != 0:
self.log(self.copyCommand +" is not found in envsetup: " + _setupStr)
#self.prepareReport('RFCP_FAIL', self._variables['report'])
outputRet["report"]["clientState"] = "RFCP_FAIL"
outputRet["errorLog"] = output
return status, outputRet
if proxycheck:
status, outputLog = self.verifySetupProxy(_setupStr, experiment)
if status != 0:
outputRet["errorLog"] = outputLog
outputRet["report"]["clientState"] = 'PROXYFAIL'
return status, outputRet
return status, outputRet
def setup(self, experiment):
""" setup env """
if self.__isSetuped:
return 0, None
self.__experiment = experiment
thisExperiment = getExperiment(experiment)
self.useTracingService = thisExperiment.useTracingService()
si = getSiteInformation(experiment)
self._defaultSetup = self.getLocalROOTSetup(si)
_setupStr = self._defaultSetup #self.getSetup()
# get the user proxy if available
envsetupTest = _setupStr.strip()
if envsetupTest != "" and not envsetupTest.endswith(';'):
envsetupTest += ";"
if os.environ.has_key('X509_USER_PROXY'):
envsetupTest += " export X509_USER_PROXY=%s;" % (os.environ['X509_USER_PROXY'])
self.log("to verify site setup: %s " % envsetupTest)
status, output = self.verifySetup(envsetupTest, experiment)
self.log("site setup verifying: status: %s, output: %s" % (status, output["errorLog"]))
if status == 0:
self._setup = envsetupTest
self.__isSetuped = True
return status, output
else:
if self._defaultSetup:
#try to use default setup
self.log("Try to use default envsetup")
envsetupTest = self._defaultSetup.strip()
if envsetupTest != "" and not envsetupTest.endswith(';'):
envsetupTest += ";"
if os.environ.has_key('X509_USER_PROXY'):
envsetupTest += " export X509_USER_PROXY=%s;" % (os.environ['X509_USER_PROXY'])
self.log("verify default setup: %s " % envsetupTest)
status, output = self.verifySetup(envsetupTest, experiment)
self.log("default setup verifying: status: %s, output: %s" % (status, output["errorLog"]))
if status == 0:
self._setup = envsetupTest
self.__isSetuped = True
return status, output
return status, output
def fixStageInPath(self, path):
"""Fix the path"""
statusRet = 0
outputRet={}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
siteInformation = SiteInformation()
cpt = siteInformation.getCopyTool(stageIn=True)
tolog("Site mover will use get command: %s, %s" % (cpt))
# figure out which copyprefix to use (use the PFN to figure out where the file is and then use the appropriate copyprefix)
# e.g. copyprefix=srm://srm-eosatlas.cern.ch,srm://srm-atlas.cern.ch^root://eosatlas.cern.ch/,root://castoratlas-xrdssl/
# PFN=srm://srm-eosatlas.cern.ch/.. use copyprefix root://eosatlas.cern.ch/ to build the TURL src_loc_pfn
# full example:
# Using copyprefixin = srm://srm-eosatlas.cern.ch,srm://srm-atlas.cern.ch^root://eosatlas.cern.ch/,root://castoratlas-xrdssl/
# PFN=srm://srm-eosatlas.cern.ch/eos/atlas/atlasdatadisk/rucio/mc12_8TeV/8d/c0/EVNT.01212395._000004.pool.root.1
# TURL=root://eosatlas.cern.ch//eos/atlas/atlasdatadisk/rucio/mc12_8TeV/8d/c0/EVNT.01212395._000004.pool.root.1
ret_path = siteInformation.getCopyPrefixPath(path, stageIn=True)
if not ret_path.startswith("root:"):
errorLog = "Failed to use copyprefix to convert the current path to local path."
tolog("!!WARNING!!1777!! %s" % (errorLog))
outputRet["errorLog"] = errorLog
outputRet["report"]["clientState"] = 'PSTAGE_FAIL'
statusRet = PilotErrors.ERR_STAGEINFAILED
tolog("PFN=%s" % (path))
tolog("TURL=%s" % (ret_path))
outputRet['path'] = ret_path
return statusRet, outputRet
def getStageInMode(self, lfn, prodDBlockToken, transferType):
# should the root file be copied or read directly by athena?
status = 0
output={}
output["errorLog"] = None
output["report"] = {}
output["report"]["clientState"] = None
output["transfer_mode"] = None
isRootFileName = self.isRootFileName(lfn)
siteInformation = SiteInformation()
directIn, transfer_mode = siteInformation.getDirectInAccessMode(prodDBlockToken, isRootFileName, transferType)
if transfer_mode:
output["transfer_mode"] = transfer_mode
if directIn:
output["report"]["clientState"] = 'FOUND_ROOT'
output["report"]['relativeStart'] = None
output["report"]['transferStart'] = None
tolog("getStageInMode:() directIn=True")
return PilotErrors.ERR_DIRECTIOFILE, output
return 0, output
def stageInFile(self, source, destination):
"""StageIn the file. should be implementated by different site mover."""
statusRet = 0
outputRet = {}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
self.log("StageIn files started.")
_cmd_str = '%s xrdcp -np %s %s' % (self._setup, source, destination)
# update job setup script
thisExperiment = getExperiment(self.__experiment)
# add the full stage-out command to the job setup script
to_script = _cmd_str.replace(destination, "`pwd`/%s" % os.path.basename(destination))
to_script = to_script.lstrip(' ') # remove any initial spaces
if to_script.startswith('/'):
to_script = 'source ' + to_script
thisExperiment.updateJobSetupScript(os.path.dirname(destination), to_script=to_script)
self.log('Executing command: %s' % (_cmd_str))
s = -1
o = '(not defined)'
t0 = os.times()
outputRet["report"]['relativeStart'] = time()
outputRet["report"]['transferStart'] = time()
try:
timerCommand = TimerCommand(_cmd_str)
s, o = timerCommand.run(timeout=self.timeout)
except Exception, e:
tolog("!!WARNING!!2990!! Exception caught by stageInFile(): %s" % (str(e)))
o = str(e)
t1 = os.times()
t = t1[4] - t0[4]
self.log("Command finished after %f s: %s" % (t, o.replace('\n', ' ')))
if s == 0:
self.log("Stagein succeeded")
else:
self.log("!!WARNING!!2990!! Command failed: %s" % (_cmd_str))
o = o.replace('\n', ' ')
#check_syserr(s, o)
self.log("!!WARNING!!2990!! get_data failed. Status=%s Output=%s" % (s, str(o)))
# remove the local file before any get retry is attempted
_status = self.removeLocal(destination)
if not _status:
self.log("!!WARNING!!1112!! Failed to remove local file, get retry will fail")
statusRet = PilotErrors.ERR_STAGEINFAILED
outputRet["report"]["clientState"] = 'COPY_FAIL'
return statusRet, outputRet
def verifyStageIN(self, sourceFile, sourceSize, sourceChecksum, destFile):
"""Verify file stagin successfull"""
statusRet = 0
outputRet={}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
outputRet["report"]['validateStart'] = time()
self.log("Verify file Staging: source: %s, sourceSize: %s, sourceChecksum: %s, destFile: %s" % (sourceFile, sourceSize, sourceChecksum, destFile))
if sourceChecksum == 0 and sourceSize ==0:
return statusRet, outputRet
# get the checksum type (md5sum or adler32)
if sourceChecksum != 0 and sourceChecksum != "":
csumtype = self.getChecksumType(sourceChecksum)
else:
csumtype = "default"
self.log("Getting destination file(%s) information." % destFile)
status, output = self.getLocalFileInfo(destFile, checksumType=csumtype)
if status != 0:
self.log("Failed to get local file information")
outputRet["report"]["clientState"] = "FILE_INFO_FAIL"
outputRet["errorLog"] = output["errorLog"]
_status = self.removeLocal(destFile)
self.log("Remove local file.")
if not _status:
self.log("!!WARNING!!1112!! Failed to remove local file, get retry will fail")
return status, outputRet
destSize = output["size"]
destChecksum = output["checksum"]
self.log("Destination file information: file: %s, size: %s, checksum: %s" % (destFile, destSize, destChecksum))
# compare remote and local file size
if long(sourceSize) != 0 and long(destSize) != long(sourceSize):
errorLog = "Remote and local file sizes do not match for %s (%s != %s)" %\
(os.path.basename(sourceFile), str(destSize), str(sourceSize))
self.log("!!WARNING!!2990!! %s" % (errorLog))
outputRet["errorLog"] = errorLog
outputRet["report"]["clientState"] = "WRONG_SIZE"
status = self.removeLocal(destFile)
if not status:
self.log("!!WARNING!!1112!! Failed to remove local file, get retry will fail")
return PilotErrors.ERR_GETWRONGSIZE, outputRet
# compare remote and local file checksum
if sourceChecksum and str(destChecksum) != str(sourceChecksum) and not self.isDummyChecksum(sourceChecksum):
outputRet["errorLog"] = errorLog = "Remote and local checksums (of type %s) do not match for %s (%s != %s)" %\
(csumtype, os.path.basename(sourceFile), destChecksum, sourceChecksum)
self.log("!!WARNING!!2990!! %s" % (errorLog))
# remove the local file before any get retry is attempted
_status = self.removeLocal(destFile)
if not _status:
self.log("!!WARNING!!1112!! Failed to remove local file, get retry will fail")
if csumtype == "adler32":
outputRet["report"]["clientState"] = "AD_MISMATCH"
return PilotErrors.ERR_GETADMISMATCH, outputRet
else:
outputRet["report"]["clientState"] = "MD5_MISMATCH"
return PilotErrors.ERR_GETMD5MISMATCH, outputRet
self.log("Verify staging done.")
outputRet["report"]["clientState"] = "DONE"
return statusRet, outputRet
def stageIn(self, source, destination, sourceSize, sourceChecksum, experiment):
"""Stage in the source file"""
statusRet = 0
outputRet ={}
outputRet["errorLog"] = None
outputRet["report"] = None
status, output = self.setup(experiment)
if status !=0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
status, output = self.fixStageInPath(source)
if status != 0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
source = output['path']
status, output = self.stageInFile(source, destination)
if status !=0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
status, output = self.verifyStageIN(source, sourceSize, sourceChecksum, destination)
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
def getLocalFileInfo(self, fileName, checksumType="default", date=None):
""" Return exit code (0 if OK), file size and checksum of a local file, as well as as date string if requested """
# note that date is mutable
statusRet = 0
outputRet = {}
outputRet["errorLog"] = ""
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
outputRet["size"] = 0
outputRet["checksum"] = ""
outputRet["checksumType"] = checksumType
self.log("Getting local File(%s) info." % fileName)
# does the file exist?
if not os.path.isfile(fileName):
if fileName.find("DBRelease") >= 0 and os.path.exists(os.path.dirname(fileName)):
outputRet["errorLog"] = errorLog = "DBRelease file missing: %s" % (fileNameame)
self.log("!!WARNING!!2999!! %s" % (errorLog))
return PilotErrors.ERR_MISSDBREL, outputRet
else:
outputRet["errorLog"] = errorLog = "No such file or directory: %s" % (fileName)
self.log("!!WARNING!!2999!! %s" % (errorLog))
return PilotErrors.ERR_MISSINGLOCALFILE, outputRet
# get the modification time if needed and store it in the mutable object
if date:
date = SiteMover.getModTime(os.path.dirname(fileName), os.path.basename(fileName))
# get the file size
try:
self.log("Executing getsize() for file: %s" % (fileName))
outputRet["size"] = fsize = str(os.path.getsize(fileName))
except OSError, e:
outputRet["errorLog"] = errorLog = "Could not get file size: %s" % str(e)
tolog("!!WARNING!!2999!! %s" % (errorLog))
return PilotErrors.ERR_FAILEDSIZELOCAL, outputRet
else:
if fsize == "0":
outputRet["errorLog"] = errorLog = "Encountered zero file size for file %s" % (fileName)
self.log("!!WARNING!!2999!! %s" % (errorLog))
return PilotErrors.ERR_ZEROFILESIZE, outputRet
else:
self.log("Got file size: %s" % (fsize))
# get the checksum
if checksumType == "adler32" or checksumType == "default":
self.log("Executing adler32() for file: %s" % (fileName))
outputRet["checksum"] = fchecksum = SiteMover.SiteMover.adler32(fileName)
if fchecksum == '00000001': # "%08x" % 1L
outputRet["errorLog"] = errorLog = "Adler32 failed (returned 1)"
self.log("!!WARNING!!2999!! %s" % (errorLog))
return PilotErrors.ERR_FAILEDADLOCAL, outputRet
else:
self.log("Got adler32 checksum: %s" % (fchecksum))
else:
_cmd = '%s %s' % (CMD_CHECKSUM, fileName)
self.log("Executing command: %s" % (_cmd))
try:
s, o = commands.getstatusoutput(_cmd)
except Exception, e:
s = -1
o = str(e)
self.log("!!WARNING!!2999!! Exception caught in getstatusoutput: %s" % (o))
if s != 0:
o = o.replace('\n', ' ')
check_syserr(s, o)
outputRet["errorLog"] = errorLog = "Error running checksum command (%s): %s" % (CMD_CHECKSUM, o)
self.log("!!WARNING!!2999!! %s" % (errorLog))
return PilotErrors.ERR_FAILEDMD5LOCAL, outputRet
outputRet["checksum"] = fchecksum = o.split()[0]
self.log("Got checksum: %s" % (fchecksum))
return 0, outputRet
def fixStageOutPath(self, path):
"""Fix the path"""
statusRet = 0
outputRet={}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
siteInformation = SiteInformation()
cpt = siteInformation.getCopyTool(stageIn=False)
tolog("Site mover will use get command: %s, %s" % (cpt))
# figure out which copyprefix to use (use the PFN to figure out where the file is and then use the appropriate copyprefix)
# e.g. copyprefix=srm://srm-eosatlas.cern.ch,srm://srm-atlas.cern.ch^root://eosatlas.cern.ch/,root://castoratlas-xrdssl/
# PFN=srm://srm-eosatlas.cern.ch/.. use copyprefix root://eosatlas.cern.ch/ to build the TURL src_loc_pfn
# full example:
# Using copyprefixin = srm://srm-eosatlas.cern.ch,srm://srm-atlas.cern.ch^root://eosatlas.cern.ch/,root://castoratlas-xrdssl/
# PFN=srm://srm-eosatlas.cern.ch/eos/atlas/atlasdatadisk/rucio/mc12_8TeV/8d/c0/EVNT.01212395._000004.pool.root.1
# TURL=root://eosatlas.cern.ch//eos/atlas/atlasdatadisk/rucio/mc12_8TeV/8d/c0/EVNT.01212395._000004.pool.root.1
ret_path = siteInformation.getCopyPrefixPath(path, stageIn=False)
if not ret_path.startswith("root:"):
errorLog = "Failed to use copyprefix to convert the current path to local path."
tolog("!!WARNING!!1777!! %s" % (errorLog))
outputRet["errorLog"] = errorLog
outputRet["report"]["clientState"] = 'PSTAGE_FAIL'
statusRet = PilotErrors.ERR_STAGEINFAILED
tolog("PFN=%s" % (path))
tolog("TURL=%s" % (ret_path))
outputRet['path'] = ret_path
return statusRet, outputRet
def stageOutFile(self, source, destination, token=None):
"""Stage out the file. Should be implementated by different site mover"""
statusRet = 0
outputRet = {}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
outputRet["output"] = None
command = "%s xrdcp -h" % (self._setup)
tolog("Execute command(%s) to decide whether -adler or --cksum adler32 to be used." % command)
status_local, output_local = commands.getstatusoutput(command)
tolog("status: %s, output: %s" % (status_local, output_local))
checksum_option = ""
if "-adler" in output_local:
checksum_option = " -adler "
elif "--cksum" in output_local:
checksum_option = " --cksum adler32 "
#checksum_option = " -adler " # currently use this one. --cksum will fail on some sites
if checksum_option != "":
tolog("Use (%s) to get the checksum" % checksum_option)
else:
tolog("Cannot find -adler nor --cksum. will not use checksum")
#checksum_option = " -adler " # currently use this one. --cksum will fail on some sites
# surl is the same as putfile
_cmd_str = '%s xrdcp -np -f %s %s %s' % (self._setup, checksum_option, source, destination)
tolog("Executing command: %s" % (_cmd_str))
ec = -1
t0 = os.times()
o = '(not defined)'
outputRet["report"]['relativeStart'] = time()
outputRet["report"]['transferStart'] = time()
try:
timerCommand = TimerCommand(_cmd_str)
ec, o = timerCommand.run(timeout=self.timeout)
except Exception, e:
tolog("!!WARNING!!2999!! xrdcp threw an exception: %s" % (o))
o = str(e)
outputRet["report"]['validateStart'] = time()
t1 = os.times()
t = t1[4] - t0[4]
tolog("Command finished after %f s" % (t))
tolog("ec = %d, output = %s" % (ec, o.replace("\n"," ")))
if ec != 0:
tolog("!!WARNING!!2990!! Command failed: %s" % (_cmd_str))
#check_syserr(ec, o)
tolog('!!WARNING!!2990!! Stage Out failed: Status=%d Output=%s' % (ec, str(o.replace("\n"," "))))
status, output = self.errorToReport(o, t, source, stageMethod="stageOut")
if status == PilotErrors.ERR_FILEEXIST:
return status, output
# check if file was partially transferred, if so, remove it
_ec, removeOutput = self.removeRemoteFile(destination)
if not _ec :
self.log("Failed to remove file ") # i.e. do not retry stage-out
return status, output
else:
outputRet["output"] = o
return statusRet, outputRet
def getRemoteFileChecksum(self, full_surl, checksumType):
""" get checksum with xrdadler32 command """
remote_checksum = None
output = None
cmd = "%s xrdadler32 %s" % (self._setup, full_surl)
tolog("Executing command: %s" % (cmd))
try:
ec, output = commands.getstatusoutput(cmd)
except Exception, e:
tolog("Warning: (Exception caught) xrdadler32 failed: %s" % (e))
output = None
else:
if ec != 0 or "[fail]" in output:
tolog("Warning: xrdadler32 failed: %d, %s" % (ec, output))
else:
tolog("output: %s" % output)
try:
remote_checksum = output.split()[-2]
except:
tolog("!!WARNING!!1998!! Cannot extract checksum from output: %s" % (output))
if not remote_checksum.isalnum():
tolog("!!WARNING!!1998!! Failed to extract alphanumeric checksum string from output: %s" % (output))
remote_checksum = None
return remote_checksum
def getRemoteFileChecksumFromOutput(self, output):
""" get checksum from xrdcp --chksum command output"""
remote_checksum = None
# get remote checksum from the command output
if "xrootd" in output or "XRootD" in output or "adler32" in output:
status = False
# define the search patterns
if "md5:" in output:
checksum_pstr = r"md5: ([a-zA-Z0-9]+)"
checksum_pattern = re.compile(checksum_pstr)
status = True
elif "adler32:" in output:
checksum_pstr = r"adler32: ([a-zA-Z0-9]+)"
checksum_pattern = re.compile(checksum_pstr)
status = True
else:
tolog("!!WARNING!!2999!! Checksum info not found in xrdcp output: %s" % (output))
if status:
# grab the checksum from the output
_checksum = re.findall(checksum_pattern, output)
if len(_checksum) > 0:
remote_checksum = _checksum[0]
# note: there's a bug in xrdcp which will generate non-fixed length adler checksums; checksums can be
# of length 7. In that case add a "0" to the beginning of the string
if "adler32:" in output:
# verify string size length
if len(remote_checksum) == 7:
tolog("!!WARNING!!1111!! Adding 0 to beginning of checksum (xrdcp returned a length 7 checksum): %s" % (remote_checksum))
remote_checksum = "0" + remote_checksum
elif len(remote_checksum) == 6:
tolog("!!WARNING!!1111!! Adding 00 to beginning of checksum (xrdcp returned a length 6 checksum): %s" % (remote_checksum))
remote_checksum = "00" + remote_checksum
elif len(remote_checksum) == 5:
tolog("!!WARNING!!1111!! Adding 000 to beginning of checksum (xrdcp returned a length 5 checksum): %s" % (remote_checksum))
remote_checksum = "000" + remote_checksum
elif len(remote_checksum) == 4:
tolog("!!WARNING!!1111!! Adding 0000 to beginning of checksum (xrdcp returned a length 4 checksum): %s" % (remote_checksum))
remote_checksum = "0000" + remote_checksum
tolog("Copy command returned checksum: %s" % (remote_checksum))
else:
tolog("!!WARNING!!2999!! checksum search failed: pattern (%s) not found in: %s" % (checksum_pstr, output))
remote_checksum = None
else:
tolog("!!WARNING!!2999!! Unexpected xrdcp output: %s" % (output))
return remote_checksum
def getRemoteFileSize(self, full_surl):
""" extract checksum and file size from xrd ls output """
remote_fsize = None
# For xrdcp site mover, not implementation.
return remote_fsize
def verifyStageOut(self, sourceFile, sourceSize, sourceChecksum, checksumType, destFile, destChecksum=None, destSize=None):
"""Verify file stagout successfull"""
statusRet = 0
outputRet = {}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
outputRet["verified"] = False
self.log("verifying stageout")
status = 0
if destChecksum == None:
status, output = self.getRemoteFileInfo(destFile, checksumType)
errorLog = output["errorLog"]
destSize = output["size"]
destChecksum = output["checksum"]
destChecksumType = output["checksumType"]
self.log("Remote checksum: %s" % str(destChecksum))
self.log("Local checksum: %s" % str(sourceChecksum))
if status == 0:
if destChecksum:
if str(sourceChecksum) != str(destChecksum):
outputRet["errorLog"] = errorLog = "Remote and local checksums (of type %s) do not match for %s (%s != %s)" %\
(checksumType, os.path.basename(destFile), destChecksum, sourceChecksum)
self.log("!!WARNING!!1800!! %s" % (errorLog))
if checksumType == "adler32":
outputRet["report"]["clientState"] = 'AD_MISMATCH'
return PilotErrors.ERR_PUTADMISMATCH, outputRet
else:
outputRet["report"]["clientState"] = 'MD5_MISMATCH'
return PilotErrors.ERR_PUTMD5MISMATCH, outputRet
else:
self.log("Remote and local checksums verified")
outputRet["verified"] = verified = True
else:
# if the checksum could not be verified (as is the case for non-dCache sites) test the file size instead
if destSize:
self.log("Local file size: %s" % (sourceSize))
if destSize and destSize != "" and sourceSize != "" and sourceSize:
if sourceSize != destSize:
outputRet["errorLog"] = errorLog = "Remote and local file sizes do not match for %s (%s != %s)" %\
(sourceFile, str(destSize), str(sourceSize))
self.log('!!WARNING!!2999!! %s' % (errorLog))
outputRet['report']["clientState"] = 'FS_MISMATCH'
return PilotErrors.ERR_PUTWRONGSIZE, outputRet
else:
self.log("Remote and local file sizes verified")
outputRet['verified'] = True
else:
self.log("Skipped file size test")
else:
self.log("Failed to get Remote file information: %s" % ())
if outputRet['verified'] != True:
# fail at this point
outputRet["errorLog"] = errorLog = "Neither checksum nor file size could be verified (failing job)"
self.log('!!WARNING!!2999!! %s' % (errorLog))
outputRet['report']["clientState"] = 'NOFILEVERIFICATION'
return PilotErrors.ERR_NOFILEVERIFICATION, outputRet
self.log("verifying stageout done.")
outputRet["report"]["clientState"] = "DONE"
return statusRet, outputRet
def removeRemoteFile(self, full_surl):
""" Remove remote file"""
# No function to remove the remote file
ec = -2
rs = "No function to remote the remote file"
return ec, rs
def getRemoteFileInfo(self, destFile, checksumType):
""" Get Remote file info. Should be implementated by different site mover"""
status = 0
outputRet = {}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
outputRet["size"] = None
outputRet["checksum"] = None
outputRet["checksumType"] = checksumType
checksum = None
fileSize = None
checksum = self.getRemoteFileChecksum(destFile, checksumType)
if checksum == None:
fileSize = self.getRemoteFileSize(destFile)
outputRet["size"] = fileSize
outputRet["checksum"] = checksum
return status, outputRet
def stageOut(self, source, destination, token, experiment):
"""Stage in the source file"""
statusRet = 0
outputRet ={}
outputRet["errorLog"] = None
outputRet["report"] = None
status, output = self.setup(experiment)
if status !=0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
status, output = self.getLocalFileInfo(source)
if status !=0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
sourceSize = output["size"]
sourceChecksum = output["checksum"]
checksumType = output["checksumType"]
if checksumType == "default":
checksumType = "adler32"
status, output = self.fixStageOutPath(destination)
if status != 0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
destination = output['path']
status, output = self.stageOutFile(source, destination, token)
if status !=0:
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
return statusRet, outputRet
destChecksum = self.getRemoteFileChecksumFromOutput(output["output"])
status, output = self.verifyStageOut(source, sourceSize, sourceChecksum, checksumType, destination, destChecksum=destChecksum, destSize=None)
statusRet = status
outputRet["errorLog"] = output["errorLog"]
outputRet["report"] = output["report"]
outputRet["size"] = sourceSize
outputRet["checksum"] = sourceChecksum
return statusRet, outputRet
def get_data(self, gpfn, lfn, path, fsize=0, fchecksum=0, guid=0, **pdict):
""" copy input file from SE to local dir """
error = PilotErrors()
pilotErrorDiag = ""
# Get input parameters from pdict
jobId = pdict.get('jobId', '')
workDir = pdict.get('workDir', '')
experiment = pdict.get('experiment', '')
transferType = pdict.get('transferType', '')
proxycheck = pdict.get('proxycheck', False)
# try to get the direct reading control variable (False for direct reading mode; file should not be copied)
useCT = pdict.get('usect', True)
prodDBlockToken = pdict.get('access', '')
# get the Rucio tracing report
report = self.getStubTracingReport(pdict['report'], 'xrdcp', lfn, guid)
status, output = self.getStageInMode(lfn, prodDBlockToken, transferType)
if output["transfer_mode"]:
updateFileState(lfn, workDir, jobId, mode="transfer_mode", state=output["transfer_mode"], ftype="input")
if status !=0:
self.prepareReport(output["report"], report)
return status, output["errorLog"]
if path == '': path = './'
fullname = os.path.join(path, lfn)
status, output = self.stageIn(gpfn, fullname, fsize, fchecksum, experiment)
if status == 0:
updateFileState(lfn, workDir, jobId, mode="file_state", state="transferred", ftype="input")
self.prepareReport(output["report"], report)
return status, output["errorLog"]
def put_data(self, source, destination, fsize=0, fchecksum=0, **pdict):
""" copy output file from disk to local SE """
# function is based on dCacheSiteMover put function
error = PilotErrors()
pilotErrorDiag = ""
# Get input parameters from pdict
alt = pdict.get('alt', False)
lfn = pdict.get('lfn', '')
guid = pdict.get('guid', '')
token = pdict.get('token', '')
scope = pdict.get('scope', '')
dsname = pdict.get('dsname', '')
analysisJob = pdict.get('analJob', False)
testLevel = pdict.get('testLevel', '0')
extradirs = pdict.get('extradirs', '')
experiment = pdict.get('experiment', '')
proxycheck = pdict.get('proxycheck', False)
prodSourceLabel = pdict.get('prodSourceLabel', '')
# get the site information object
si = getSiteInformation(experiment)
tolog("put_data received prodSourceLabel=%s" % (prodSourceLabel))
if prodSourceLabel == 'ddm' and analysisJob:
tolog("Treating PanDA Mover job as a production job during stage-out")
analysisJob = False
# get the Rucio tracing report
report = self.getStubTracingReport(pdict['report'], 'xrdcp', lfn, guid)
filename = os.path.basename(source)
# get all the proper paths
ec, pilotErrorDiag, tracer_error, dst_gpfn, lfcdir, surl = si.getProperPaths(error, analysisJob, token, prodSourceLabel, dsname, filename, scope=scope, alt=alt, sitemover=self) # quick workaround
if ec != 0:
reportState = {}
reportState["clientState"] = tracer_error
self.prepareReport(reportState, report)
return self.put_data_retfail(ec, pilotErrorDiag)
# get the RSE from ToA
try:
_RSE = self.getRSE(surl=surl)
except Exception, e:
tolog("Warning: Failed to get RSE: %s (can not add this info to tracing report)" % str(e))
else:
report['localSite'], report['remoteSite'] = (_RSE, _RSE)
tolog("RSE: %s" % (_RSE))
if testLevel == "1":
source = "thisisjustatest"
status, output = self.stageOut(source, surl, token, experiment)
if status !=0:
self.prepareReport(output["report"], report)
return self.put_data_retfail(status, output["errorLog"], surl)
reportState = {}
reportState["clientState"] = "DONE"
self.prepareReport(reportState, report)
return 0, pilotErrorDiag, surl, output["size"], output["checksum"], self.arch_type
def errorToReport(self, errorOutput, timeUsed, fileName, stageMethod='stageIN'):
status = 0
outputRet = {}
outputRet["errorLog"] = None
outputRet["report"] = {}
outputRet["report"]["clientState"] = None
if "File exists" in errorOutput or "SRM_FILE_BUSY" in errorOutput or "file already exists" in errorOutput:
pilotErrorDiag = "File already exist in the destination."
tolog("!!WARNING!!2990!! %s" % (pilotErrorDiag))
#self.prepareReport('FILE_EXIST', report)
outputRet["report"]["clientState"] = 'FILE_EXIST'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_FILEEXIST, outputRet
elif "Could not establish context" in errorOutput:
pilotErrorDiag = "Could not establish context: Proxy / VO extension of proxy has probably expired"
tolog("!!WARNING!!2990!! %s" % (pilotErrorDiag))
#self.prepareReport('CONTEXT_FAIL', report)
outputRet["report"]["clientState"] = 'CONTEXT_FAIL'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_NOPROXY, outputRet
elif "globus_xio:" in errorOutput:
pilotErrorDiag = "Globus system error: %s" % (errorOuput)
self.log("Globus system error encountered")
#self.prepareReport('GLOBUS_FAIL', report)
outputRet["report"]["clientState"] = 'GLOBUS_FAIL'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_GETGLOBUSSYSERR, outputRet
elif "No space left on device" in errorOutput:
pilotErrorDiag = "No available space left on local disk: %s" % (errorOutput)
tolog("No available space left on local disk")
#self.prepareReport('NO_SPACE', report)
outputRet["report"]["clientState"] = 'NO_SPACE'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_NOLOCALSPACE, outputRet
elif "No such file or directory" in errorOutput:
if "DBRelease" in fileName:
pilotErrorDiag = "Missing DBRelease file: %s" % (fileName)
tolog("!!WARNING!!2990!! %s" % (pilotErrorDiag))
#self.prepareReport('NO_DBREL', report)
outputRet["report"]["clientState"] = 'NO_DBREL'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_MISSDBREL, outputRet
else:
pilotErrorDiag = "No such file or directory: %s" % (fileName)
tolog("!!WARNING!!2990!! %s" % (pilotErrorDiag))
#self.prepareReport('NO_FILE_DIR', report)
outputRet["report"]["clientState"] = 'NO_FILE'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_NOSUCHFILE, outputRet
else:
if timeUsed >= self.timeout:
pilotErrorDiag = "Copy command self timed out after %d s" % (timeUsed)
tolog("!!WARNING!!2990!! %s" % (pilotErrorDiag))
if stageMethod == "stageIN":
#self.prepareReport('GET_TIMEOUT', report)
outputRet["report"]["clientState"] = 'GET_TIMEOUT'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_GETTIMEOUT, pilotErrorDiag
else:
#self.prepareReport('CP_TIMEOUT', report)
outputRet["report"]["clientState"] = 'CP_TIMEOUT'
outputRet["errorLog"] = pilotErrorDiag
return PilotErrors.ERR_PUTTIMEOUT, outputRet
else:
if len(errorOutput) == 0:
pilotErrorDiag = "Copy command returned error code %d but no output" % (s)
else:
pilotErrorDiag = errorOutput
#self.prepareReport('COPY_ERROR', report)
outputRet["report"]["clientState"] = 'COPY_ERROR'
outputRet["errorLog"] = pilotErrorDiag
if stageMethod == "stageIN":
return PilotErrors.ERR_STAGEINFAILED, outputRet
else:
return PilotErrors.ERR_STAGEOUTFAILED, outputRet
|
{
"content_hash": "9c333306c9d72ddb504d5ac5fd2a351f",
"timestamp": "",
"source": "github",
"line_count": 1007,
"max_line_length": 203,
"avg_line_length": 44.01191658391261,
"alnum_prop": 0.5717734657039711,
"repo_name": "PanDAWMS/pilot",
"id": "a98ed312e9c5735971013be1cc6a2ad8b0e100ee",
"size": "44677",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "xrdcpSiteMover.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4990965"
},
{
"name": "Shell",
"bytes": "23530"
}
],
"symlink_target": ""
}
|
from webob.exc import HTTPPreconditionFailed, HTTPNotFound, HTTPConflict, \
HTTPBadRequest
from webob import Response
from lunr.api.controller.base import BaseController
from lunr.db import NoResultFound
from lunr.db.models import Node, VolumeType
from lunr.db.helpers import filter_update_params
class NodeController(BaseController):
def index(self, request):
"""
GET /v1.0/{account_id}/nodes
List nodes
"""
q = self.db.query(Node)
available_filters = set(['name', 'status', 'volume_type_name'])
filters = dict((k, v) for k, v in request.params.items() if k in
available_filters)
if filters:
q = q.filter_by(**filters)
return Response([dict(r) for r in q.all()])
def _validate_volume_type(self, params):
try:
volume_type_name = params['volume_type_name']
except KeyError:
raise HTTPBadRequest("Must specify 'volume_type_name'")
volume_type = self.db.query(VolumeType).get(volume_type_name)
if not volume_type or volume_type.status != "ACTIVE":
raise HTTPPreconditionFailed("Invalid volume type '%s'" %
volume_type_name)
return volume_type
def create(self, request):
"""
POST /v1.0/{account_id}/nodes
Create volume
"""
params, meta_params = filter_update_params(request, Node)
if not params.get('name'):
raise HTTPPreconditionFailed("Must specify a 'name' parameter")
self._validate_volume_type(params)
try:
params['size'] = int(params.get('size', 0))
except ValueError:
raise HTTPPreconditionFailed("'size' parameter must be an integer")
params['meta'] = meta_params
node = self.db.query(Node).filter_by(name=params['name']).first()
if not node or node.status in ('DELETED', 'ERROR'):
# create or update
name = params.pop('name')
params['status'] = params.get('status', 'ACTIVE')
node, created = self.db.update_or_create(Node, updates=params,
name=name)
else:
raise HTTPConflict("Node '%s' already exists" % params['name'])
self.db.refresh(node)
return Response(dict(node))
def delete(self, request):
"""
DELETE /v1.0/{account_id}/nodes/{id}
Delete volume
"""
update_params = {'status': 'DELETED'}
num_updated = self.db.query(Node).filter_by(
id=self.id).update(update_params)
self.db.commit()
if not num_updated:
raise HTTPNotFound("Cannot delete non-existent node '%s'" %
self.id)
n = self.db.query(Node).get(self.id)
return Response(dict(n))
def show(self, request):
"""
GET /v1.0/{account_id}/nodes/{id}
Show volume info
"""
node = self.db.query(Node).get(self.id)
if not node:
raise HTTPNotFound("Cannot show non-existent node '%s'" %
self.id)
# FIXME: create a helper to do a get with the sumfunc built-in
node.calc_storage_used()
return Response(dict(node))
def update(self, request):
"""
POST /v1.0/{account_id}/nodes/{id}
Update volume info
"""
update_params, meta_params = filter_update_params(request, Node)
if meta_params:
node = self.db.query(Node).get(self.id)
node.meta.update(meta_params)
update_params['meta'] = node.meta
num_updated = self.db.query(Node).filter_by(
id=self.id).update(update_params)
self.db.commit()
if not num_updated:
raise HTTPNotFound("Cannot update non-existent node '%s'" %
self.id)
n = self.db.query(Node).filter_by(id=self.id).one()
return Response(dict(n))
|
{
"content_hash": "87486e28c390e6682c048962d6247bcf",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 79,
"avg_line_length": 35.982300884955755,
"alnum_prop": 0.5607476635514018,
"repo_name": "audip/lunr",
"id": "ddc6d375ad0316f1b4ef9b5e6b4b7a01e210767d",
"size": "4660",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lunr/api/controller/node.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1014822"
},
{
"name": "Shell",
"bytes": "6589"
}
],
"symlink_target": ""
}
|
import collections
import datetime
import mongomock # Used for utcnow - please see https://github.com/mongomock/mongomock#utcnow
import six
import six.moves
import threading
lock = threading.RLock()
class ServerStore(object):
"""Object holding the data for a whole server (many databases)."""
def __init__(self):
self._databases = {}
def __getitem__(self, db_name):
try:
return self._databases[db_name]
except KeyError:
db = self._databases[db_name] = DatabaseStore()
return db
def __contains__(self, db_name):
return self[db_name].is_created
def list_created_database_names(self):
return [name for name, db in self._databases.items() if db.is_created]
class DatabaseStore(object):
"""Object holding the data for a database (many collections)."""
def __init__(self):
self._collections = {}
def __getitem__(self, col_name):
try:
return self._collections[col_name]
except KeyError:
col = self._collections[col_name] = CollectionStore(col_name)
return col
def __contains__(self, col_name):
return self[col_name].is_created
def list_created_collection_names(self):
return [name for name, col in self._collections.items() if col.is_created]
def create_collection(self, name):
col = self[name]
col.create()
return col
def rename(self, name, new_name):
col = self._collections.pop(name, CollectionStore(new_name))
col.name = new_name
self._collections[new_name] = col
@property
def is_created(self):
return any(col.is_created for col in self._collections.values())
class CollectionStore(object):
"""Object holding the data for a collection."""
def __init__(self, name):
self._documents = collections.OrderedDict()
self.indexes = {}
self._is_force_created = False
self.name = name
self._ttl_indexes = {}
def create(self):
self._is_force_created = True
@property
def is_created(self):
return self._documents or self.indexes or self._is_force_created
def drop(self):
self._documents = collections.OrderedDict()
self.indexes = {}
self._ttl_indexes = {}
self._is_force_created = False
def create_index(self, index_name, index_dict):
self.indexes[index_name] = index_dict
if index_dict.get('expireAfterSeconds') is not None:
self._ttl_indexes[index_name] = index_dict
def drop_index(self, index_name):
self._remove_expired_documents()
# The main index object should raise a KeyError, but the
# TTL indexes have no meaning to the outside.
del self.indexes[index_name]
self._ttl_indexes.pop(index_name, None)
@property
def is_empty(self):
self._remove_expired_documents()
return not self._documents
def __contains__(self, key):
self._remove_expired_documents()
return key in self._documents
def __getitem__(self, key):
self._remove_expired_documents()
return self._documents[key]
def __setitem__(self, key, val):
with lock:
self._documents[key] = val
def __delitem__(self, key):
del self._documents[key]
def __len__(self):
self._remove_expired_documents()
return len(self._documents)
@property
def documents(self):
self._remove_expired_documents()
for doc in six.itervalues(self._documents):
yield doc
def _remove_expired_documents(self):
for index in six.itervalues(self._ttl_indexes):
self._expire_documents(index)
def _expire_documents(self, index):
# TODO(juannyg): use a caching mechanism to avoid re-expiring the documents if
# we just did and no document was added / updated
# Ignore non-integer values
try:
expiry = int(index['expireAfterSeconds'])
except ValueError:
return
# Ignore commpound keys
if len(index['key']) > 1:
return
# "key" structure = list of (field name, direction) tuples
ttl_field_name = index['key'][0][0]
ttl_now = mongomock.utcnow()
expired_ids = [
doc['_id'] for doc in six.itervalues(self._documents)
if self._value_meets_expiry(doc.get(ttl_field_name), expiry, ttl_now)
]
for exp_id in expired_ids:
del self[exp_id]
def _value_meets_expiry(self, val, expiry, ttl_now):
val_to_compare = _get_min_datetime_from_value(val)
try:
return (ttl_now - val_to_compare).total_seconds() >= expiry
except TypeError:
return False
def _get_min_datetime_from_value(val):
if not val:
return datetime.datetime.max
if isinstance(val, list):
return six.moves.reduce(_min_dt, [datetime.datetime.max] + val)
return val
def _min_dt(dt1, dt2):
try:
return dt1 if dt1 < dt2 else dt2
except TypeError:
return dt1
|
{
"content_hash": "f3db55150bea1f15dfb169ad01d3d734",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 94,
"avg_line_length": 28.7,
"alnum_prop": 0.6035617499032133,
"repo_name": "vmalloc/mongomock",
"id": "f2dc448d69bcc810af4468323524e6cfda52cf51",
"size": "5166",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mongomock/store.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "359"
},
{
"name": "Python",
"bytes": "207694"
}
],
"symlink_target": ""
}
|
"""
This module implements classes and methods for processing LAMMPS output
files (log and dump).
"""
import glob
import re
from io import StringIO
import numpy as np
import pandas as pd
from monty.io import zopen
from monty.json import MSONable
from pymatgen.io.lammps.data import LammpsBox
__author__ = "Kiran Mathew, Zhi Deng"
__copyright__ = "Copyright 2018, The Materials Virtual Lab"
__version__ = "1.0"
__maintainer__ = "Zhi Deng"
__email__ = "z4deng@eng.ucsd.edu"
__date__ = "Aug 1, 2018"
class LammpsDump(MSONable):
"""
Object for representing dump data for a single snapshot.
"""
def __init__(self, timestep, natoms, box, data):
"""
Base constructor.
Args:
timestep (int): Current timestep.
natoms (int): Total number of atoms in the box.
box (LammpsBox): Simulation box.
data (pd.DataFrame): Dumped atomic data.
"""
self.timestep = timestep
self.natoms = natoms
self.box = box
self.data = data
@classmethod
def from_string(cls, string):
"""
Constructor from string parsing.
Args:
string (str): Input string.
"""
lines = string.split("\n")
timestep = int(lines[1])
natoms = int(lines[3])
box_arr = np.loadtxt(StringIO("\n".join(lines[5:8])))
bounds = box_arr[:, :2]
tilt = None
if "xy xz yz" in lines[4]:
tilt = box_arr[:, 2]
x = (0, tilt[0], tilt[1], tilt[0] + tilt[1])
y = (0, tilt[2])
bounds -= np.array([[min(x), max(x)], [min(y), max(y)], [0, 0]])
box = LammpsBox(bounds, tilt)
data_head = lines[8].replace("ITEM: ATOMS", "").split()
data = pd.read_csv(StringIO("\n".join(lines[9:])), names=data_head, delim_whitespace=True)
return cls(timestep, natoms, box, data)
@classmethod
def from_dict(cls, d):
"""
Args:
d (dict): Dict representation
Returns:
LammpsDump
"""
items = {"timestep": d["timestep"], "natoms": d["natoms"]}
items["box"] = LammpsBox.from_dict(d["box"])
items["data"] = pd.read_json(d["data"], orient="split")
return cls(**items)
def as_dict(self):
"""
Returns: MSONable dict
"""
d = {}
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
d["timestep"] = self.timestep
d["natoms"] = self.natoms
d["box"] = self.box.as_dict()
d["data"] = self.data.to_json(orient="split")
return d
def parse_lammps_dumps(file_pattern):
"""
Generator that parses dump file(s).
Args:
file_pattern (str): Filename to parse. The timestep wildcard
(e.g., dump.atom.'*') is supported and the files are parsed
in the sequence of timestep.
Yields:
LammpsDump for each available snapshot.
"""
files = glob.glob(file_pattern)
if len(files) > 1:
pattern = r"%s" % file_pattern.replace("*", "([0-9]+)")
pattern = pattern.replace("\\", "\\\\")
files = sorted(files, key=lambda f: int(re.match(pattern, f).group(1)))
for fname in files:
with zopen(fname, "rt") as f:
dump_cache = []
for line in f:
if line.startswith("ITEM: TIMESTEP"):
if len(dump_cache) > 0:
yield LammpsDump.from_string("".join(dump_cache))
dump_cache = [line]
else:
dump_cache.append(line)
yield LammpsDump.from_string("".join(dump_cache))
def parse_lammps_log(filename="log.lammps"):
"""
Parses log file with focus on thermo data. Both one and multi line
formats are supported. Any incomplete runs (no "Loop time" marker)
will not be parsed.
Notes:
SHAKE stats printed with thermo data are not supported yet.
They are ignored in multi line format, while they may cause
issues with dataframe parsing in one line format.
Args:
filename (str): Filename to parse.
Returns:
[pd.DataFrame] containing thermo data for each completed run.
"""
with zopen(filename, "rt") as f:
lines = f.readlines()
begin_flag = (
"Memory usage per processor =",
"Per MPI rank memory allocation (min/avg/max) =",
)
end_flag = "Loop time of"
begins, ends = [], []
for i, l in enumerate(lines):
if l.startswith(begin_flag):
begins.append(i)
elif l.startswith(end_flag):
ends.append(i)
def _parse_thermo(lines):
multi_pattern = r"-+\s+Step\s+([0-9]+)\s+-+"
# multi line thermo data
if re.match(multi_pattern, lines[0]):
timestep_marks = [i for i, l in enumerate(lines) if re.match(multi_pattern, l)]
timesteps = np.split(lines, timestep_marks)[1:]
dicts = []
kv_pattern = r"([0-9A-Za-z_\[\]]+)\s+=\s+([0-9eE\.+-]+)"
for ts in timesteps:
data = {}
data["Step"] = int(re.match(multi_pattern, ts[0]).group(1))
data.update({k: float(v) for k, v in re.findall(kv_pattern, "".join(ts[1:]))})
dicts.append(data)
df = pd.DataFrame(dicts)
# rearrange the sequence of columns
columns = ["Step"] + [k for k, v in re.findall(kv_pattern, "".join(timesteps[0][1:]))]
df = df[columns]
# one line thermo data
else:
df = pd.read_csv(StringIO("".join(lines)), delim_whitespace=True)
return df
runs = []
for b, e in zip(begins, ends):
runs.append(_parse_thermo(lines[b + 1 : e]))
return runs
|
{
"content_hash": "9bb5162572a52832828ca377a5c1594b",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 98,
"avg_line_length": 30.93121693121693,
"alnum_prop": 0.5427642832706124,
"repo_name": "vorwerkc/pymatgen",
"id": "038f1244535f866139f1ace4d27e672079cdb127",
"size": "5940",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pymatgen/io/lammps/outputs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "87"
},
{
"name": "CSS",
"bytes": "7572"
},
{
"name": "Cython",
"bytes": "38792"
},
{
"name": "HTML",
"bytes": "12642493"
},
{
"name": "Python",
"bytes": "8941675"
},
{
"name": "Roff",
"bytes": "1407429"
},
{
"name": "Shell",
"bytes": "12010"
}
],
"symlink_target": ""
}
|
from django.db import models
# Create your models here.
class Event(models.Model):
first_date = models.DateField(verbose_name='Primer Dia', blank=True, null=True)
second_date = models.DateField(verbose_name='Segundo Dia', blank=True, null=True)
third_date = models.DateField(verbose_name='Tercer Dia', blank=True, null=True)
title = models.CharField(max_length=50, blank=True, null=True)
place = models.CharField(max_length=100, blank=True, null=True)
image = models.ImageField(upload_to='event/', verbose_name='Imagen principal')
registration = models.URLField(verbose_name='Link de registro')
banner = models.ImageField(upload_to='event/banner/', verbose_name='Banner', blank=True, null=True)
logo = models.ImageField(upload_to='event/logo/', verbose_name='Logo', blank=True, null=True)
def __str__(self):
return self.title
class SponsorCategory(models.Model):
number_order = models.IntegerField(verbose_name='Posicion')
name = models.CharField(max_length=25)
def __str__(self):
return self.name
class Sponsor(models.Model):
name = models.CharField(max_length=50)
image = models.ImageField(upload_to='sponsors/')
categori = models.ForeignKey(SponsorCategory)
url = models.URLField()
def __str__(self):
return self.name + ' categoria ' + self.categori.name
class Person(models.Model):
name = models.CharField(max_length=50)
image = models.ImageField(upload_to='person/')
visible = models.BooleanField()
def __str__(self):
return self.name
class Facilitator(Person):
bio = models.TextField(max_length=500)
twitter = models.CharField(max_length=25)
def __str__(self):
return self.name
class Mentor(Person):
bio = models.TextField(max_length=500)
position = models.CharField(max_length=500)
def __str__(self):
return self.name
class Judges(Person):
bio = models.TextField(max_length=500)
position = models.CharField(max_length=500)
def __srt__(self):
return self.name
class Organizer(Person):
number_order = models.IntegerField(unique=True, verbose_name='Numero de orden')
def __srt__(self):
return self.name
class Collaborator(Person):
pass
def __str__(self):
return self.name
|
{
"content_hash": "b5bb2f53720a23b16eb106c55634f2f2",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 103,
"avg_line_length": 28.23170731707317,
"alnum_prop": 0.6781857451403888,
"repo_name": "elpargo/startupweekenddo",
"id": "3dc2ee9b41de58b32547ee32e020bc501170119b",
"size": "2315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/collaborators/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4709"
},
{
"name": "HTML",
"bytes": "17088"
},
{
"name": "JavaScript",
"bytes": "1496"
},
{
"name": "Python",
"bytes": "19187"
}
],
"symlink_target": ""
}
|
"""
Copyright 2018-03-02 Alberto Hata
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import shutil
import time
import random
import numpy as np
import numpy.random as npr
import argparse
import cv2
import torch
import rospy
import message_filters
from cv_bridge import CvBridge, CvBridgeError
from sensor_msgs.msg import Image
import re
from graphviz import Digraph, Source
from shapely.geometry import box
# Root directory of the project
import os
import sys
ROOT_DIR = os.path.abspath(os.path.join(os.path.realpath(__file__), '../..'))
LIB_PATH = os.path.join(ROOT_DIR, "msdn")
print (LIB_PATH)
sys.path.append(LIB_PATH)
dir(sys.modules[__name__])
from faster_rcnn import network
from faster_rcnn.MSDN import Hierarchical_Descriptive_Model
from faster_rcnn.utils.timer import Timer
from faster_rcnn.fast_rcnn.config import cfg
from faster_rcnn.datasets.visual_genome_loader import visual_genome
from faster_rcnn.utils.HDN_utils import get_model_name, group_features
from visualize_cv import draw_bbox_label_msdn
import std_msgs.msg
from turtlebot2i_scene_graph.msg import SceneGraph
from turtlebot2i_safety.msg import VelocityScale
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="0"
TIME_IT = cfg.TIME_IT
parser = argparse.ArgumentParser('Options for training Hierarchical Descriptive Model in pytorch')
#Training parameters
parser.add_argument('--lr', type=float, default=0.001, metavar='LR', help='base learning rate for training')
parser.add_argument('--max_epoch', type=int, default=10, metavar='N', help='max iterations for training')
parser.add_argument('--momentum', type=float, default=0.99, metavar='M', help='percentage of past parameters to store')
parser.add_argument('--log_interval', type=int, default=1000, help='Interval for Logging')
parser.add_argument('--step_size', type=int, default = 1, help='Step size for reduce learning rate')
parser.add_argument('--resume_training', action='store_true', default = True, help='Resume training from the model [resume_model]')
parser.add_argument('--resume_model', type=str, default=os.path.join(ROOT_DIR, "models/100_epoch_best.h5"), help='The model we resume')
parser.add_argument('--load_RPN', action='store_true', help='To end-to-end train from the scratch')
parser.add_argument('--enable_clip_gradient', action='store_true', help='Whether to clip the gradient')
parser.add_argument('--use_normal_anchors', action='store_true', help='Whether to use kmeans anchors')
# structure settings
parser.add_argument('--disable_language_model', action='store_true', help='To disable the Lanuage Model ')
parser.add_argument('--mps_feature_len', type=int, default=1024, help='The expected feature length of message passing')
parser.add_argument('--dropout', action='store_true', help='To enables the dropout')
parser.add_argument('--MPS_iter', type=int, default=1, help='Iterations for Message Passing')
parser.add_argument('--gate_width', type=int, default=128, help='The number filters for gate functions in GRU')
parser.add_argument('--nhidden_caption', type=int, default=512, help='The size of hidden feature in language model')
parser.add_argument('--nembedding', type=int, default=256, help='The size of word embedding')
parser.add_argument('--rnn_type', type=str, default='LSTM_normal', help='Select the architecture of RNN in caption model[LSTM_im | LSTM_normal]')
parser.add_argument('--caption_use_bias', action='store_true', default=True, help='Use the flap to enable the bias term to caption model')
parser.add_argument('--caption_use_dropout', action='store_const', const=0.5, default=0.5, help='Set to use dropout in caption model')
parser.add_argument('--enable_bbox_reg', dest='region_bbox_reg', action='store_true')
parser.add_argument('--disable_bbox_reg', dest='region_bbox_reg', action='store_false')
parser.set_defaults(region_bbox_reg=True)
parser.add_argument('--use_kernel_function', action='store_true')
# Environment Settings
parser.add_argument('--seed', type=int, default=1, help='set seed to some constant value to reproduce experiments')
parser.add_argument('--saved_model_path', type=str, default = './models/RPN/RPN_region_best.h5', help='The Model used for initialize')
parser.add_argument('--dataset_option', type=str, default='normal', help='The dataset to use (small | normal | fat)')
parser.add_argument('--output_dir', type=str, default='./models/HDN_resume', help='Location to output the model')
parser.add_argument('--model_name', type=str, default='HDN', help='The name for saving model.')
parser.add_argument('--nesterov', action='store_true', help='Set to use the nesterov for SGD')
parser.add_argument('--finetune_language_model', action='store_true', help='Set to disable the update of other parameters')
parser.add_argument('--optimizer', type=int, default=0, help='which optimizer used for optimize language model [0: SGD | 1: Adam | 2: Adagrad]')
class ros_msdn:
def __init__(self):
# To set the model name automatically
args = parser.parse_args()
print args
args = get_model_name(args)
print 'Model name: {}'.format(args.model_name)
self.check = True
# To set the random seed
random.seed(args.seed)
torch.manual_seed(args.seed + 1)
torch.cuda.manual_seed(args.seed + 2)
print("Loading training params"),
self.train_set = visual_genome('normal', 'train')
print("Done.")
self.train_loader = torch.utils.data.DataLoader(self.train_set, batch_size=1, shuffle=True, num_workers=8, pin_memory=True)
end = time.time()
# Model declaration
self.net = Hierarchical_Descriptive_Model(nhidden=args.mps_feature_len,
n_object_cats=self.train_set.num_object_classes,
n_predicate_cats=self.train_set.num_predicate_classes,
n_vocab=self.train_set.voc_size,
voc_sign=self.train_set.voc_sign,
max_word_length=self.train_set.max_size,
MPS_iter=args.MPS_iter,
use_language_loss=not args.disable_language_model,
object_loss_weight=self.train_set.inverse_weight_object,
predicate_loss_weight=self.train_set.inverse_weight_predicate,
dropout=args.dropout,
use_kmeans_anchors=not args.use_normal_anchors,
gate_width = args.gate_width,
nhidden_caption = args.nhidden_caption,
nembedding = args.nembedding,
rnn_type=args.rnn_type,
rnn_droptout=args.caption_use_dropout, rnn_bias=args.caption_use_bias,
use_region_reg = args.region_bbox_reg,
use_kernel = args.use_kernel_function)
params = list(self.net.parameters())
for param in params:
print param.size()
print self.net
# To group up the features
vgg_features_fix, vgg_features_var, rpn_features, hdn_features, language_features = group_features(self.net)
# Setting the state of the training model
self.net.cuda()
self.net.train()
network.set_trainable(self.net, False)
# loading model for inference
print 'Resume training from: {}'.format(args.resume_model)
if len(args.resume_model) == 0:
raise Exception('[resume_model] not specified')
network.load_net(args.resume_model, self.net)
args.train_all = True
optimizer_select = 2
optimizer = network.get_optimizer(args.lr,optimizer_select, args,
vgg_features_var, rpn_features, hdn_features, language_features)
target_net = self.net
self.net.eval()
print ('Model Loading time: ', time.time() - end)
# Set topics
self.bridge = CvBridge()
self.dot = Digraph(comment='warehouse', format='svg')
self.regions_dot = Digraph(comment='regions', format='svg')
self.image_sub = message_filters.Subscriber('/turtlebot2i/camera/rgb/raw_image', Image)
self.image_depth_sub = message_filters.Subscriber('/turtlebot2i/camera/depth/raw_image', Image)
self.ts = message_filters.TimeSynchronizer([self.image_sub, self.image_depth_sub], queue_size=1)
print ('calling callback')
self.ts.registerCallback(self.callback)
self.scenegraph_pub = rospy.Publisher('/turtlebot2i/scene_graph', SceneGraph, queue_size=10)
def callback(self, image, depth_image):
try:
print 'inside callback '
farClippingPlane = 3.5
nearClippingPlane = 0.0099999
cv_depth_image = self.bridge.imgmsg_to_cv2(depth_image,"passthrough")
cv_depth_image = cv2.flip(cv_depth_image, 0)
cv_depth_image = nearClippingPlane + (cv_depth_image * (farClippingPlane - nearClippingPlane))
cv_image = self.bridge.imgmsg_to_cv2(image, "rgb8")
predicates_frequency = {'behind' : 1, 'on': 1, 'has': 1000000, 'in_front_of' : 1, 'next_to': 2, 'beside': 2, 'with': 1, 'attach_to': 1, 'connected_to': 1, 'charges': 1, 'in_hands_of': 1}
all_classes = {'slidingdoor':0, 'wall':0, 'shelf':0, 'robot':0, 'human':0, 'conveyorbelt':0, 'dockstation':0, 'product':0, 'floor':0}
class_names = ['floor', 'wall', 'shelf', 'robot', 'human', 'conveyorbelt', 'dockstation', 'product', 'slidingdoor' ]
allowed_self_relationship = {'slidingdoor':[], 'wall':['beside','attach_to'], 'shelf':['beside','next_to'], 'robot':[], 'human':['in_front_of','behind'], 'conveyorbelt':[], 'dockstation':[], 'product':['beside','next_to'], 'floor':[]}
print("Describing.....")
if self.check == False:
self.dot.clear()
self.regions_dot.clear()
im, im_info = self.train_set.get_image_info(cv_image)
end = time.time()
region_caption, region_list, region_pred_boxes, region_logprobs, class_pred_boxes, class_scores,\
class_inds, subject_list, object_list, predicate_list, predicate_inds, predicate_scores = self.net.describe(im.unsqueeze(0), [im_info], top_N=[50])
class_idx = []
for class_ in all_classes.keys():
class_idx.append(self.train_set.word2idx[class_])
predicate_idx = []
for predicate in predicates_frequency.keys():
predicate_idx.append(self.train_set.word2idx[predicate])
classes_name = []
predicate_scores = predicate_scores.squeeze()[predicate_list]
subject_scores = class_scores[subject_list].squeeze()
object_scores = class_scores[object_list].squeeze()
relationship_scores = predicate_scores * (subject_scores + object_scores)/2.0
keep_indexes = np.where((subject_scores > 0.7) & (object_scores > 0.7) & (predicate_scores > 0.5))[0]
keep_classes = np.where(class_scores > 0.7)[0]
class_name_score = dict()
for i in keep_classes:
class_name = self.train_set._object_classes[class_inds[i]]
score = class_scores[i]
all_classes[class_name] += 1
if class_name != 'floor':
classes_name.append(str(class_name +'#'+ str(all_classes[class_name])))
class_name_score[str(class_name +'#'+ str(all_classes[class_name]))] = score
else:
classes_name.append(str(class_name))
class_name_score[str(class_name)] = score
#_ = draw_bbox_label_msdn(cv_image, class_pred_boxes[keep_classes], class_inds[keep_classes], class_scores[keep_classes])
classes_name = np.array(classes_name)
subject_scores = subject_scores[keep_indexes]
object_scores = object_scores[keep_indexes]
subject_list = subject_list[keep_indexes]
object_list = object_list[keep_indexes]
predicate_list = predicate_list[keep_indexes]
relationship_scores = relationship_scores[keep_indexes]
predicate_scores = predicate_scores[keep_indexes]
# subject_inds = class_inds[subject_list]
# object_inds = class_inds[object_list]
subjects_name = classes_name[subject_list]
objects_name = classes_name[object_list]
predicate_inds = predicate_inds.squeeze()[predicate_list]
#print (class_inds[subject_list[keep_indexes]])
relationship_dict = dict()
last_subject = ''
last_predicate = ''
temp_score_list = []
object_ids = []
for i in range (len(subjects_name)):
predicate = self.train_set._predicate_classes[predicate_inds[i]]
subject = subjects_name[i]
_object = objects_name[i]
if subject != _object:
if (subject == 'floor' and predicate != 'has') or (_object == 'floor' and predicate != 'on') or (subject[:-2] == 'wall' and predicate == 'in_front_of' and _object[:-2] == 'dockstation'):
print 'unwanted relationship', subject, '-> ' , predicate, ' -> ', _object
elif subject == 'floor' and predicate == 'has':
if subject not in relationship_dict.keys():
relationship_dict[subject] = dict()
relationship_dict[subject][predicate] = dict()
relationship_dict[subject][predicate][_object] = predicate_scores[i]
elif predicate not in relationship_dict[subject].keys():
relationship_dict[subject][predicate] = dict()
relationship_dict[subject][predicate][_object] = predicate_scores[i]
elif _object not in relationship_dict[subject][predicate].keys():
relationship_dict[subject][predicate][_object] = predicate_scores[i]
else:
relationship_dict[subject][predicate][_object] = predicate_scores[i]
elif _object in relationship_dict.keys() and predicate in relationship_dict[_object].keys()\
and subject in relationship_dict[_object][predicate].keys() and predicate_scores[i] > relationship_dict[_object][predicate][subject]:
if subject not in relationship_dict.keys():
relationship_dict[subject] = dict()
relationship_dict[subject][predicate] = dict()
relationship_dict[subject][predicate][_object] = predicate_scores[i]
elif predicate not in relationship_dict[subject].keys():
relationship_dict[subject][predicate] = dict()
relationship_dict[subject][predicate][_object] = predicate_scores[i]
elif _object not in relationship_dict[subject][predicate].keys():
relationship_dict[subject][predicate][_object] = predicate_scores[i]
del relationship_dict[_object][predicate][subject]
if len(relationship_dict[_object][predicate]) == 0:
del relationship_dict[_object][predicate]
if len(relationship_dict[_object]) == 0:
del relationship_dict[_object]
elif subject == last_subject:
if predicate == last_predicate:
temp_score_list.append(predicate_scores[i])
object_ids.append(i)
else:
if len(temp_score_list) > 1:
sorted_scores = np.array(temp_score_list).argsort()[::-1]
indx = np.array(object_ids)[sorted_scores][0]
else:
indx = object_ids[-1]
#print 'Saving relationship 1', subject, '-> ' , last_predicate, ' -> ', objects_name[indx]
relationship_dict[subject][last_predicate][objects_name[indx]] = predicate_scores[indx]
relationship_dict[subject][predicate] = dict()
temp_score_list = [predicate_scores[i]]
if subject[:-2] == _object[:-2] and predicate not in allowed_self_relationship[subject[:-2]]:
object_ids = []
last_predicate = ''
last_subject = ''
else:
object_ids = [i]
last_predicate = predicate
else:
relationship_dict[subject] = dict()
relationship_dict[subject][predicate] = dict()
if last_subject != '':
if len(temp_score_list) > 1:
sorted_scores = np.array(temp_score_list).argsort()[::-1]
indx = np.array(object_ids)[sorted_scores][0]
else:
indx = object_ids[-1]
#print 'Saving relationship 2', last_subject, '-> ' , last_predicate, ' -> ', objects_name[indx]
relationship_dict[last_subject][last_predicate][objects_name[indx]] = predicate_scores[indx]
if subject[:-2] == _object[:-2] and predicate not in allowed_self_relationship[subject[:-2]]:
object_ids = []
last_predicate = ''
last_subject = ''
temp_score_list = []
else:
last_subject = subject
last_predicate = predicate
temp_score_list = [predicate_scores[i]]
object_ids = [i]
if last_subject != '':
if len(temp_score_list) > 1:
sorted_scores = np.array(temp_score_list).argsort()[::-1]
indx = np.array(object_ids)[sorted_scores][0]
else:
indx = object_ids[-1]
#print 'Saving relationship 3', last_subject, '-> ' , last_predicate, ' -> ', objects_name[indx]
relationship_dict[last_subject][last_predicate][objects_name[indx]] = predicate_scores[indx]
print ('Time taken to decribe: ', time.time() - end)
self.dot.node_attr['shape']='record'
robot_label = "turtlebot2i"
#self.dot.node('robot', label=robot_label)
self.dot.node('warehouse', label='warehouse')
floor_label = "{floor|Score: 0.7}"
if 'floor' in class_name_score.keys():
floor_label = '%s|Score: %.2f'%( 'floor', class_name_score['floor'])
self.dot.node('floor', label=floor_label)
self.dot.edge('warehouse','floor')
list_nodes = ['warehouse', 'floor']
for subject in relationship_dict.keys():
for predicate in relationship_dict[subject].keys():
for _object in relationship_dict[subject][predicate].keys():
if subject not in list_nodes:
node_label = '%s|Score: %.2f'%( subject, class_name_score[subject])
self.dot.node(subject, label=node_label)
list_nodes.append(subject)
if _object not in list_nodes:
node_label = '%s|Score: %.2f'%( _object, class_name_score[_object])
self.dot.node(_object, label=node_label)
list_nodes.append(_object)
self.dot.edge(subject, _object, label=predicate)
print 'Subject : ', subject, ' Predicate: ', predicate,' Object: ', _object, ' Score: ',relationship_dict[subject][predicate][_object]
print 'END PRINTING Relationships...'
sorted_regions = region_logprobs.argsort()[::-1]
regions_dict = dict()
regions_prob_dict = dict()
sorted_region_keys = []
for i in sorted_regions:
if region_logprobs[i] > -0.5:
region_idx = region_caption[i]
common = list( frozenset(region_idx) & frozenset(class_idx))
#print 'Common classes: ', common
if len(common) == 2:
class_1 = self.train_set.idx2word[common[0]]
class_2 = self.train_set.idx2word[common[1]]
if all_classes[class_1] != 0 and all_classes[class_2] != 0:
key = frozenset([class_1, class_2])
#print key
if key not in regions_prob_dict.keys():
regions_prob_dict[key] = region_logprobs[i]
regions_dict[key] = region_caption[i]
sorted_region_keys.append(key)
elif regions_prob_dict[key] < region_logprobs[i]:
regions_prob_dict[key] = region_logprobs[i]
regions_dict[key] = region_caption[i]
sorted_region_keys.append(key)
elif len(common) == 1:
class_1 = self.train_set.idx2word[common[0]]
if all_classes[class_1] != 0:
key = frozenset([class_1])
#print key
if key not in regions_prob_dict.keys():
regions_prob_dict[key] = region_logprobs[i]
regions_dict[key] = region_caption[i]
sorted_region_keys.append(key)
elif all_classes[class_1] > 1:
j = 1
while j < all_classes[class_1]:
key = frozenset([class_1+'#'+str(j)])
#print key
if key not in regions_prob_dict.keys():
regions_prob_dict[key] = region_logprobs[i]
regions_dict[key] = region_caption[i]
sorted_region_keys.append(key)
elif regions_prob_dict[key] < region_logprobs[i]:
regions_prob_dict[key] = region_logprobs[i]
regions_dict[key] = region_caption[i]
sorted_region_keys.append(key)
j+=1
self.regions_dot.node_attr['shape']='record'
captions_list = []
for key in sorted_region_keys:
region_idx = regions_dict[key]
log_prob = regions_prob_dict[key]
caption = ""
space = ""
for indx in region_idx:
word = self.train_set.idx2word[indx]
if word != "<unknown>" and word != "<start>" and word != "<end>":
caption += space + word
space = " "
node_label = "%s|Log probability: %.6f"%(caption, log_prob)
repetition_check = True
if caption not in captions_list:
self.regions_dot.node(caption, label=node_label)
repetition_check = False
if len(captions_list) > 0:
self.regions_dot.edge(captions_list[-1], caption)
if repetition_check == False:
captions_list.append(caption)
#print caption, log_prob
self.dot.render('scene_graph.gv', view = self.check)
self.regions_dot.render('region_graph.gv', view = self.check)
#s = Source(self.dot, filename="scene_graph", format="png")
#s1 = Source(self.regions_dot, filename="region_graph", format="png")
# #if self.check == False:
# s.view()
# s1.view()
if self.check == True:
self.check = False
print 'END PRINTING Regions...'
except CvBridgeError as e:
print(e)
if __name__ == '__main__':
print 'start'
rospy.init_node('msdn_py')
detector = ros_msdn()
rospy.spin()
|
{
"content_hash": "999e6a20e1fa4ef1550d49189d2344fb",
"timestamp": "",
"source": "github",
"line_count": 510,
"max_line_length": 246,
"avg_line_length": 50.476470588235294,
"alnum_prop": 0.5545196752515247,
"repo_name": "EricssonResearch/scott-eu",
"id": "eb170f2a0efc341ffd2932e5c15d66a88474d913",
"size": "25767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simulation-ros/src/turtlebot2i/turtlebot2i_msdn/src/ros_msdn.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "557472"
},
{
"name": "C#",
"bytes": "53898"
},
{
"name": "C++",
"bytes": "209194"
},
{
"name": "CMake",
"bytes": "68861"
},
{
"name": "CSS",
"bytes": "7068"
},
{
"name": "Cuda",
"bytes": "13073"
},
{
"name": "Dockerfile",
"bytes": "13106"
},
{
"name": "Groovy",
"bytes": "5714"
},
{
"name": "HTML",
"bytes": "48278"
},
{
"name": "Java",
"bytes": "1097268"
},
{
"name": "JavaScript",
"bytes": "134624"
},
{
"name": "Kotlin",
"bytes": "120642"
},
{
"name": "Lua",
"bytes": "257969"
},
{
"name": "MATLAB",
"bytes": "910"
},
{
"name": "Makefile",
"bytes": "32165"
},
{
"name": "Objective-C",
"bytes": "3527"
},
{
"name": "Prolog",
"bytes": "33238"
},
{
"name": "Python",
"bytes": "1758831"
},
{
"name": "Scala",
"bytes": "2494"
},
{
"name": "Shell",
"bytes": "9731"
}
],
"symlink_target": ""
}
|
class Region:
# parent city
capital = None
# name
name = "No Man's Land"
name_de = "Niemandland"
# tiles
tiles = []
def __init__(self, capital, radius=3, name=None):
self.capital = capital
if name:
self.name = name
else:
self.name = self.get_default_name()
def get_default_name(self):
capital_name = self.capital.get_name()
return capital_namme + " Lands"
|
{
"content_hash": "5f38f1f47deda530f97e69b85e3c35b1",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 50,
"avg_line_length": 18.333333333333332,
"alnum_prop": 0.6467532467532467,
"repo_name": "herrschr/prey-game",
"id": "fa37f5e38741402b6a592ea930932d202e6b8ce3",
"size": "385",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pocketthrone/entities/region.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "109218"
},
{
"name": "Shell",
"bytes": "22"
}
],
"symlink_target": ""
}
|
import uuid
from Firefly import aliases, logging
from Firefly.automation.triggers import Triggers
from Firefly.const import COMMAND_NOTIFY, SERVICE_NOTIFICATION, TYPE_AUTOMATION, API_ALEXA_VIEW, API_FIREBASE_VIEW, API_INFO_REQUEST
from Firefly.helpers.action import Action
from Firefly.helpers.conditions import Conditions
from Firefly.helpers.events import Command, Event, Request
from Firefly.helpers.automation.automation_interface import AutomationInterface
# TODO(zpriddy): These should be in const file
LABEL_ACTIONS = 'actions'
LABEL_CONDITIONS = 'conditions'
LABEL_DELAYS = 'delays'
LABEL_DEVICES = 'devices'
LABEL_MESSAGES = 'messages'
LABEL_TRIGGERS = 'triggers'
LABEL_TRIGGER_ACTION = 'trigger_actions'
INTERFACE_LABELS = [LABEL_ACTIONS, LABEL_CONDITIONS, LABEL_DELAYS, LABEL_DEVICES, LABEL_MESSAGES, LABEL_TRIGGERS, LABEL_TRIGGER_ACTION]
from typing import Callable, Any
class Automation(object):
def __init__(self, firefly, package: str, event_handler: Callable, metadata: dict = {}, interface: dict = {}, **kwargs):
self.actions = {}
self.command_map = {}
self.conditions = {}
self.delays = {}
self.devices = {}
self.event_handler = event_handler
self.firefly = firefly
self.interface = interface
self.messages = {}
self.metadata = metadata
self.package = package
self.triggers = {}
self.trigger_actions = {}
# TODO(zpriddy): Should should be a shared function in a lib somewhere.
# Alias and id functions
ff_id = kwargs.get('ff_id')
alias = kwargs.get('alias')
# If alias given but no ID look at config files for ID.
if not ff_id and alias:
if aliases.get_device_id(alias):
ff_id = aliases.get_device_id(alias)
elif ff_id and not alias:
if aliases.get_alias(ff_id):
alias = aliases.get_alias(ff_id)
# If no ff_id ID given -> generate random ID.
if not ff_id:
ff_id = str(uuid.uuid4())
self.id = ff_id
self.alias = alias if alias else ff_id
self.new_interface = AutomationInterface(firefly, self.id, self.interface)
self.new_interface.build_interface()
#self.build_interfaces()
def event(self, event: Event, **kwargs):
logging.info('[AUTOMATION] %s - Receiving event: %s' % (self.id, event))
# Check each triggerList in triggers.
for trigger_index, trigger in self.new_interface.triggers.items():
if trigger.check_triggers(event):
# Check if there are conditions with the same index, if so check them.
if self.new_interface.conditions.get(trigger_index):
if not self.new_interface.conditions.get(trigger_index).check_conditions(self.firefly):
logging.info('[AUTOMATION] failed condition checks.')
continue
# Call the event handler passing in the trigger_index and return.
logging.info('[AUTOMATION] no conditions. executing event handler.')
return self.event_handler(event, trigger_index, **kwargs)
def request(self, request: Request) -> Any:
"""Function to request data from the ff_id.
The returned data can be in any format. Common formats should be:
str, int, dict
Args:
request (Request): Request object
Returns:
Requested Data
"""
logging.debug('[AUTOMATION] %s: Got Request %s' % (self.id, request))
if request.request == API_INFO_REQUEST:
return self.get_api_info()
if request.request == API_FIREBASE_VIEW:
return self.get_firebase_views()
if request.request == API_ALEXA_VIEW:
return self.get_alexa_view()
return None
def get_api_info(self, **kwargs):
return {}
def get_firebase_views(self, **kwargs):
return {}
def get_alexa_view(self, **kwargs):
logging.info('[AUTOMATION] no alexa view')
return {}
def export(self, **kwargs):
"""
Export ff_id config with options current values to a dictionary.
Args:
Returns:
(dict): A dict of the ff_id config.
"""
export_data = {
'alias': self.alias, # 'commands': self.command_map.keys(),
'ff_id': self.id,
'interface': self.new_interface.export(),
'metadata': self.metadata,
'package': self.package,
'type': self.type
}
return export_data
def command(self, command, **kwargs):
"""
Function that is called to send a command to a ff_id.
Commands can be used to reset times or other items if the automation needs it.
Args:
command (Command): The command to be sent in a Command object
Returns:
(bool): Command successful.
"""
logging.debug('%s: Got Command: %s' % (self.id, command.command))
if command.command in self.command_map.keys():
try:
self.command_map[command.command](**command.args)
return True
except:
return False
return False
def build_interfaces(self, **kwargs):
"""
builds the interfaces (actions, conditions, delays, triggers) using the metadata and config information.
Args:
**kwargs:
Returns:
"""
meta_interfaces = self.metadata.get('interface')
if not meta_interfaces:
return
for label in INTERFACE_LABELS:
interface_data = meta_interfaces.get(label)
if not interface_data:
continue
if label == LABEL_ACTIONS:
self.build_actions_interface(interface_data)
if label == LABEL_TRIGGERS:
self.build_triggers_interface(interface_data)
if label == LABEL_CONDITIONS:
self.build_conditions_interface(interface_data)
if label == LABEL_DELAYS:
self.build_delays_interface(interface_data)
if label == LABEL_DEVICES:
self.build_devices_interface(interface_data)
if label == LABEL_MESSAGES:
self.build_messages_interface(interface_data)
if label == LABEL_TRIGGER_ACTION:
self.build_trigger_actions_interface(interface_data)
def build_actions_interface(self, interface_data: dict, **kwargs):
for action_index in interface_data.keys():
self.actions[action_index] = []
# TODO(zpriddy): Do we want to keep the add_action function?
if not self.interface.get(LABEL_ACTIONS):
continue
for action in self.interface.get(LABEL_ACTIONS).get(action_index):
self.actions[action_index].append(Action(**action))
def build_triggers_interface(self, interface_data: dict, **kwargs):
for trigger_index in interface_data.keys():
if not self.interface.get(LABEL_TRIGGERS):
continue
self.triggers[trigger_index] = Triggers(self.firefly, self.id)
self.triggers[trigger_index].import_triggers(self.interface.get(LABEL_TRIGGERS).get(trigger_index))
def build_trigger_actions_interface(self, interface_data: dict, **kwargs):
for trigger_action_index in interface_data.keys():
if not self.interface.get(LABEL_TRIGGER_ACTION):
continue
self.trigger_actions[trigger_action_index] = self.interface.get(LABEL_TRIGGER_ACTION).get(trigger_action_index)
def build_conditions_interface(self, interface_data: dict, **kwargs):
for condition_index in interface_data.keys():
self.conditions[condition_index] = None
if not self.interface.get(LABEL_CONDITIONS):
continue
if not self.interface.get(LABEL_CONDITIONS).get(condition_index):
continue
self.conditions[condition_index] = Conditions(**self.interface.get(LABEL_CONDITIONS).get(condition_index))
def build_delays_interface(self, interface_data: dict, **kwargs):
for delay_index in interface_data.keys():
if not self.interface.get(LABEL_DELAYS):
continue
self.delays[delay_index] = self.interface.get(LABEL_DELAYS).get(delay_index)
def build_devices_interface(self, interface_data: dict, **kwargs):
for device_index in interface_data.keys():
if not self.interface.get(LABEL_DEVICES):
continue
self.devices[device_index] = self.interface.get(LABEL_DEVICES).get(device_index)
def build_messages_interface(self, interface_data: dict, **kwargs):
for message_index in interface_data.keys():
if not self.interface.get(LABEL_MESSAGES):
continue
self.messages[message_index] = self.interface.get(LABEL_MESSAGES).get(message_index)
def export_interface(self, **kwargs):
interface = {}
interface[LABEL_TRIGGERS] = {}
for trigger_index, trigger in self.triggers.items():
if trigger is None:
continue
interface[LABEL_TRIGGERS][trigger_index] = trigger.export()
interface[LABEL_ACTIONS] = {}
for action_index, action in self.actions.items():
interface[LABEL_ACTIONS][action_index] = [a.export() for a in action]
interface[LABEL_CONDITIONS] = {}
for condition_index, condition in self.conditions.items():
if condition is None:
continue
interface[LABEL_CONDITIONS][condition_index] = condition.export()
interface[LABEL_MESSAGES] = {}
for message_index, message in self.messages.items():
if message is None:
continue
interface[LABEL_MESSAGES][message_index] = message
interface[LABEL_DELAYS] = {}
for delay_index, delay in self.delays.items():
if delay is None:
continue
interface[LABEL_DELAYS][delay_index] = delay
interface[LABEL_DEVICES] = {}
for device_index, device in self.devices.items():
if device is None:
continue
interface[LABEL_DEVICES][device_index] = device
interface[LABEL_TRIGGER_ACTION] = {}
for trigger_action_index, trigger_action in self.trigger_actions.items():
if trigger_action is None:
continue
interface[LABEL_TRIGGER_ACTION][trigger_action_index] = trigger_action
return interface
def add_command(self, command: str, function: Callable) -> None:
"""
Adds a command to the list of supported ff_id commands.
Args:
command (str): The string of the command
function (Callable): The function to be executed.
"""
self.command_map[command] = function
def execute_actions(self, action_index: str, **kwargs) -> bool:
if not self.new_interface.actions.get(action_index):
return False
for action in self.new_interface.actions.get(action_index):
action.execute_action(self.firefly)
return True
def send_messages(self, message_index: str, **kwargs) -> bool:
if not self.new_interface.messages.get(message_index):
return False
notify = Command(SERVICE_NOTIFICATION, self.id, COMMAND_NOTIFY, message=self.new_interface.messages.get(message_index))
self.firefly.send_command(notify)
return True
def event_handler(self, event: Event = None, trigger_index="", **kwargs):
logging.error('EVENT HANDLER NOT CREATED')
@property
def type(self):
return TYPE_AUTOMATION
|
{
"content_hash": "de49262202e9582c5f971867928e7c36",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 135,
"avg_line_length": 35.25816993464052,
"alnum_prop": 0.6762443229214942,
"repo_name": "Firefly-Automation/Firefly",
"id": "2c16b7ebe1649fb4529272899a0eebb21a17ef60",
"size": "10789",
"binary": false,
"copies": "1",
"ref": "refs/heads/firefly3",
"path": "Firefly/helpers/automation/automation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "639413"
},
{
"name": "Shell",
"bytes": "10447"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('SessionManagement', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='shift',
name='staff',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='UserManagement.Attendent'),
),
]
|
{
"content_hash": "96826f933054f981de905903de7339fb",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 112,
"avg_line_length": 24.736842105263158,
"alnum_prop": 0.6425531914893617,
"repo_name": "SkillSmart/ConferenceManagementSystem",
"id": "921e0b8cae9571dc5befdaacdb1c903caa31ff83",
"size": "543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SessionManagement/migrations/0002_auto_20170302_2009.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1259"
},
{
"name": "C",
"bytes": "487034"
},
{
"name": "C++",
"bytes": "26115"
},
{
"name": "CSS",
"bytes": "53731"
},
{
"name": "HTML",
"bytes": "172113"
},
{
"name": "JavaScript",
"bytes": "126279"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "217460"
},
{
"name": "Tcl",
"bytes": "1237789"
}
],
"symlink_target": ""
}
|
import os, sys
import httplib, urllib, json, requests
# Replace or verify the region.
#
# You must use the same region in your REST API call as you used to obtain your subscription keys.
# For example, if you obtained your subscription keys from the westus region, replace
# "westcentralus" in the URI below with "westus".
#
# NOTE: Free trial subscription keys are generated in the westcentralus region, so if you are using
# a free trial subscription key, you should not need to change this region.
def returnarray():
subscription_key = 'bec9c58578e046ef8c57ecb2a8a1795d'
headers = {
# Request headers.
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': subscription_key,
}
params = urllib.urlencode({
# Request parameters. The language setting "unk" means automatically detect the language.
'language': 'unk',
'detectOrientation ': 'true',
})
try:
# DropboxAuth.setup()
# Execute the REST API call and get the response.
body = ""
filename = '/home/itsuncheng/Desktop/pharmacist/public/pic/test123.jpg'
# with open(img_filename, 'rb') as f:
# img_data = f.read()
f = open(filename, "rb")
body = f.read()
f.close()
conn = httplib.HTTPSConnection('westcentralus.api.cognitive.microsoft.com')
conn.request("POST", "/vision/v1.0/ocr?%s" % params, body, headers)
response = conn.getresponse()
data = response.read()
parsed = json.loads(data)
print ("Response:")
substring = [3, 0, 2, 5, 3, 0, 0, 3]
index = 0
a0 = []
a1 = []
a2 = []
scanobject = []
for x in [11, 13, 34, 7, 16, 18, 15, 14]:
result = ""
for y in range(substring[index], len(parsed["regions"][0]["lines"][x]["words"])):
# print(parsed["regions"][0]["lines"][x]["words"][y]["text"])
# if index == 5:
result += parsed["regions"][0]["lines"][x]["words"][y]["text"]
if index <= 1:
if index == 1:
if result.encode('utf-8') == "小姐":
a0.append(0)
else:
a0.append(1)
else:
a0.append(result)
elif index <= 3:
a1.append(result)
else:
if index == 5:
a2.append(result[1:len(result)-1])
elif index == 6:
for i in range(0, len(result) - 1):
if(result[i:i+1] == ","):
breakindex = i
break
a2.append(result[0:breakindex])
a2.append(result[breakindex + 1:len(result)])
else:
a2.append(result)
index += 1
scanobject.append(a0)
scanobject.append(a1)
scanobject.append(a2)
print(scanobject[0][0])
print(scanobject[0][1])
print(scanobject[1][0])
print(scanobject[1][1])
print(scanobject[2][0])
print(scanobject[2][1])
print(scanobject[2][2])
print(scanobject[2][3])
print(scanobject[2][4])
conn.close()
except Exception as e:
print('Error:')
print(e)
return scanobject
# returnarray()
|
{
"content_hash": "0b248b0fa14a22d355d025f3a2c67474",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 99,
"avg_line_length": 30.821428571428573,
"alnum_prop": 0.5165121668597914,
"repo_name": "itsuncheng/pharmacist",
"id": "0accea7ae74c11df36d86c204d96b584f7a4cc46",
"size": "3510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "passarray.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25635"
},
{
"name": "HTML",
"bytes": "9026106"
},
{
"name": "Hack",
"bytes": "144066"
},
{
"name": "JavaScript",
"bytes": "138727"
},
{
"name": "PHP",
"bytes": "1755267"
},
{
"name": "Python",
"bytes": "11747"
}
],
"symlink_target": ""
}
|
import os
import pytest
from optimus.exceptions import ServerConfigurationError, InvalidHostname
from optimus.interfaces.runserver import server_interface
def test_server_interface_success(tmpdir, fixtures_settings, starter_basic_settings):
"""
Interface should return cherrypy pre-configured and with correct application
config to mount.
"""
hostname = "localhost:8001"
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
project_path = os.path.join(destination, "project")
# Get basic settings and its computed path
settings = starter_basic_settings(project_path)
builddir_path = settings.PUBLISH_DIR
# Create expected publish directory
os.makedirs(builddir_path)
server_env = server_interface(settings, hostname, index="foo.html")
assert server_env["cherrypy"] is not None
assert server_env["app_conf"] == {
"/": {
"tools.staticdir.index": "foo.html",
"tools.staticdir.on": True,
"tools.staticdir.dir": builddir_path,
},
}
def test_server_interface_no_builddir(
tmpdir, fixtures_settings, starter_basic_settings
):
"""
Interface should raise an exception when publish directory does not exists.
"""
hostname = "localhost:8001"
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
project_path = os.path.join(destination, "project")
# Get basic settings and its computed path
settings = starter_basic_settings(project_path)
with pytest.raises(ServerConfigurationError):
server_interface(settings, hostname)
def test_server_interface_invalid_hostname(
tmpdir, fixtures_settings, starter_basic_settings
):
"""
Interface should raise an exception when given hostname is invalid.
"""
hostname = "localhost:nope"
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
project_path = os.path.join(destination, "project")
# Get basic settings and its computed path
settings = starter_basic_settings(project_path)
with pytest.raises(InvalidHostname):
server_interface(settings, hostname)
|
{
"content_hash": "24f0fa52641b1739a506fda61174c6cf",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 85,
"avg_line_length": 28.743589743589745,
"alnum_prop": 0.6935771632471008,
"repo_name": "sveetch/Optimus",
"id": "224c28b324fc4c004e6a7a4f8adc7a682444e176",
"size": "2266",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/1000_interfaces/05_runserver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14380"
},
{
"name": "HTML",
"bytes": "16553"
},
{
"name": "JavaScript",
"bytes": "101904"
},
{
"name": "Makefile",
"bytes": "1564"
},
{
"name": "Python",
"bytes": "245913"
},
{
"name": "Ruby",
"bytes": "855"
},
{
"name": "Smarty",
"bytes": "8827"
}
],
"symlink_target": ""
}
|
import mock
from django.core.urlresolvers import reverse
from django.test import tag
from django.utils import timezone
from taggit.models import Tag
from .models import Flag
from .tests import BaseAdminTestCase
from submissions.tests import SubmissionsViewsBaseTestCase
from usermgmt import models as usermodels
class BaseFlagTestCase(BaseAdminTestCase, SubmissionsViewsBaseTestCase):
@classmethod
def setUpTestData(cls):
super(BaseFlagTestCase, cls).setUpTestData()
@tag('as_user')
class TestListAllFlagsViewAsUser(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse('administration:list_all_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_social_mod')
class TestListAllFlagsViewAsSocialMod(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.get(reverse('administration:list_all_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_content_mod')
class TestListAllFlagsViewAsContentMod(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse('administration:list_all_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_superuser')
class TestListAllFlagsViewAsSuperuser(BaseFlagTestCase):
def test_lists_flags(self):
Flag(
flag_type=Flag.SOCIAL,
flagged_by=self.user,
object_model=self.submission1,
subject='bad submission',
body_raw='bad to the bone, really').save()
Flag(
flag_type=Flag.CONTENT,
flagged_by=self.user,
object_model=self.submission2,
subject='kinda gross',
body_raw='who even does that').save()
self.client.login(username='superuser',
password='superuser pass')
response = self.client.get(reverse('administration:list_all_flags'))
self.assertContains(response, 'bad submission')
self.assertContains(response, 'kinda gross')
def test_lists_inactive_flags(self):
Flag(
flag_type=Flag.SOCIAL,
flagged_by=self.user,
object_model=self.submission1,
subject='bad submission',
body_raw='bad to the bone, really').save()
Flag(
flag_type=Flag.CONTENT,
flagged_by=self.user,
object_model=self.submission2,
resolved=timezone.now(),
subject='kinda gross',
body_raw='who even does that').save()
self.client.login(username='superuser',
password='superuser pass')
response = self.client.get(reverse('administration:list_all_flags'), {
'all': 1,
})
self.assertContains(response, 'bad submission')
self.assertContains(response, 'kinda gross')
@tag('as_user')
class TestListSocialFlagsViewAsUser(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse(
'administration:list_social_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_social_mod')
class TestListSocialFlagsViewAsSocialMod(BaseFlagTestCase):
def test_lists_social_flags(self):
Flag(
flag_type=Flag.SOCIAL,
flagged_by=self.user,
object_model=self.submission1,
subject='bad submission',
body_raw='bad to the bone, really').save()
Flag(
flag_type=Flag.CONTENT,
flagged_by=self.user,
object_model=self.submission2,
subject='kinda gross',
body_raw='who even does that').save()
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.get(
reverse('administration:list_social_flags'))
self.assertContains(response, 'bad submission')
self.assertNotContains(response, 'kinda gross')
@tag('as_content_mod')
class TestListSocialFlagsViewAsContentMod(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse(
'administration:list_social_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_superuser')
class TestListSocialFlagsViewAsSuperuser(BaseFlagTestCase):
def test_lists_content_flags(self):
Flag(
flag_type=Flag.SOCIAL,
flagged_by=self.user,
object_model=self.submission1,
subject='bad submission',
body_raw='bad to the bone, really').save()
Flag(
flag_type=Flag.CONTENT,
flagged_by=self.user,
object_model=self.submission2,
subject='kinda gross',
body_raw='who even does that').save()
self.client.login(username='superuser',
password='superuser pass')
response = self.client.get(
reverse('administration:list_social_flags'))
self.assertContains(response, 'bad submission')
self.assertNotContains(response, 'kinda gross')
@tag('as_user')
class TestListContentFlagsViewAsUser(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse(
'administration:list_content_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_social_mod')
class TestListContentFlagsViewAsSocialMod(BaseFlagTestCase):
def test_forbidden(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.get(reverse(
'administration:list_content_flags'))
self.assertEqual(response.status_code, 403)
@tag('as_content_mod')
class TestListContentFlagsViewAsContentMod(BaseFlagTestCase):
def test_lists_content_flags(self):
Flag(
flag_type=Flag.SOCIAL,
flagged_by=self.user,
object_model=self.submission1,
subject='bad submission',
body_raw='bad to the bone, really').save()
Flag(
flag_type=Flag.CONTENT,
flagged_by=self.user,
object_model=self.submission2,
subject='kinda gross',
body_raw='who even does that').save()
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(
reverse('administration:list_content_flags'))
self.assertNotContains(response, 'bad submission')
self.assertContains(response, 'kinda gross')
@tag('as_superuser')
class TestListContentFlagsViewAsSuperuser(BaseFlagTestCase):
def test_lists_content_flags(self):
Flag(
flag_type=Flag.SOCIAL,
flagged_by=self.user,
object_model=self.submission1,
subject='bad submission',
body_raw='bad to the bone, really').save()
Flag(
flag_type=Flag.CONTENT,
flagged_by=self.user,
object_model=self.submission2,
subject='kinda gross',
body_raw='who even does that').save()
self.client.login(username='superuser',
password='superuser pass')
response = self.client.get(
reverse('administration:list_content_flags'))
self.assertNotContains(response, 'bad submission')
self.assertContains(response, 'kinda gross')
class TestCreateFlagView(BaseFlagTestCase):
def test_requires_content_type_and_id(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse('administration:create_flag'))
self.assertEqual(response.status_code, 403)
def test_restricted_content_types(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse('administration:create_flag'), {
'content_type': 'auth:user',
'object_id': 1,
})
self.assertEqual(response.status_code, 403)
def test_cant_flag_own_object(self):
self.client.login(username='foo',
password='a good password')
response = self.client.get(reverse('administration:create_flag'), {
'content_type': 'submissions:submission',
'object_id': self.submission1.id,
})
self.assertEqual(response.status_code, 403)
def test_renders_form(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse('administration:create_flag'), {
'content_type': 'submissions:submission',
'object_id': self.submission1.id,
})
self.assertContains(response, 'Flag submission')
self.assertContains(response, 'Submission 1 by ~foo')
def test_creates_flag_with_owner(self):
self.client.login(username='user',
password='user pass')
response = self.client.post(
reverse('administration:create_flag'), {
'content_type': 'submissions:submission',
'object_id': self.submission1.id,
'flag_type': Flag.CONTENT,
'subject': 'it is bad',
'body_raw': 'i did not like it, *no sir*',
}, follow=True)
self.assertContains(response, "User McUserface's flag")
self.assertContains(response, 'Submission 1 by ~foo')
def test_creates_flag_with_user(self):
self.client.login(username='user',
password='user pass')
response = self.client.post(
reverse('administration:create_flag'), {
'content_type': 'usermgmt:profile',
'object_id': self.social_mod.profile.id,
'flag_type': Flag.CONTENT,
'subject': 'it is bad',
'body_raw': 'i did not like it, *no sir*',
}, follow=True)
self.assertContains(response, "User McUserface's flag")
def test_creates_flag_without_owner(self):
tag = Tag(name='Test tag')
tag.save()
self.client.login(username='user',
password='user pass')
response = self.client.post(
reverse('administration:create_flag'), {
'content_type': 'taggit:tag',
'object_id': tag.id,
'flag_type': Flag.CONTENT,
'subject': 'it is bad',
'body_raw': 'i did not like it, *no sir*',
}, follow=True)
self.assertContains(response, "User McUserface's flag")
def test_remove_immutable_assignment_99(self):
self.client.login(username='user',
password='user pass')
response = self.client.post(
reverse('administration:create_flag'), {
'content_type': 'usermgmt:profile',
'object_id': self.bar.id,
'flag_type': Flag.CONTENT,
'subject': 'it is bad',
'body_raw': 'i did not like it, *no sir*',
}, follow=True)
self.assertContains(response, "User McUserface's flag")
class ExistingFlagBaseTestCase(BaseFlagTestCase):
@classmethod
def setUpTestData(cls):
super(ExistingFlagBaseTestCase, cls).setUpTestData()
cls.active_content_flag = Flag(
flagged_by=cls.bar,
object_model=cls.submission1,
flagged_object_owner=cls.user,
flag_type=Flag.CONTENT,
subject='user + submission1 + content + active',
body_raw='Test flag')
cls.active_content_flag.save()
cls.active_social_flag = Flag(
flagged_by=cls.bar,
object_model=cls.foo.profile,
flagged_object_owner=cls.foo,
flag_type=Flag.SOCIAL,
subject='foo + foo.profile + social + active',
body_raw='Test flag')
cls.active_social_flag.save()
class ModelTestCase(ExistingFlagBaseTestCase):
def test_str(self):
self.assertEqual(
self.active_content_flag.__str__(),
'user + submission1 + content + active (against Submission 1 by '
'~foo (id:1))')
def test_unicode(self):
self.assertEqual(
self.active_content_flag.__unicode__(),
'user + submission1 + content + active (against Submission 1 by '
'~foo (id:1))')
@tag('as_user')
class TestViewFlagViewAsUser(ExistingFlagBaseTestCase):
def test_forbidden_unless_participant(self):
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_social_flag.id
}))
self.assertEqual(response.status_code, 403)
def test_renders_flag_if_participant(self):
self.active_content_flag.participants.add(self.user)
self.client.login(username='user',
password='user pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_content_flag.id
}))
self.assertContains(response, 'user + submission1 + content + active')
@tag('as_social_mod')
class TestViewFlagViewAsSocialMod(ExistingFlagBaseTestCase):
def test_content_flag_forbidden_unless_participant(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_content_flag.id
}))
self.assertEqual(response.status_code, 403)
def test_renders_content_flag_if_participant(self):
self.active_content_flag.participants.add(self.social_mod)
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_content_flag.id
}))
self.assertContains(response, 'user + submission1 + content + active')
def test_renders_social_flag(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_social_flag.id
}))
self.assertContains(response, 'foo + foo.profile + social + active')
@tag('as_content_mod')
class TestViewFlagViewAsContentMod(ExistingFlagBaseTestCase):
def test_social_flag_forbidden_unless_participant(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_social_flag.id
}))
self.assertEqual(response.status_code, 403)
def test_renders_social_flag_if_participant(self):
self.active_social_flag.participants.add(self.content_mod)
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_social_flag.id
}))
self.assertContains(response, 'foo + foo.profile + social + active')
def test_renders_content_flag(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_content_flag.id
}))
self.assertContains(response, 'user + submission1 + content + active')
@tag('as_superuser')
class TestViewFlagViewAsSuperuser(ExistingFlagBaseTestCase):
def test_renders_flag(self):
self.client.login(username='superuser',
password='superuser pass')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_content_flag.id
}))
self.assertContains(response, 'user + submission1 + content + active')
response = self.client.get(reverse('administration:view_flag', kwargs={
'flag_id': self.active_social_flag.id
}))
self.assertContains(response, 'foo + foo.profile + social + active')
class TestListParticipatingFlagsView(ExistingFlagBaseTestCase):
def test_renders_no_flags(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse(
'administration:list_participating_flags'))
self.assertContains(response, 'No flags to display')
def test_renders_own_flags(self):
self.active_social_flag.participants.add(self.content_mod)
self.active_content_flag.participants.add(self.content_mod)
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.get(reverse(
'administration:list_participating_flags'))
self.assertContains(response, 'foo + foo.profile + social + active')
self.assertContains(response, 'user + submission1 + content + active')
@tag('as_user')
class TestJoinFlagViewAsUser(ExistingFlagBaseTestCase):
def test_forbidden(self):
self.client.login(username='user',
password='user pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response, 'not authorized to access this page')
@tag('as_social_mod')
class TestJoinFlagViewAsSocialMod(ExistingFlagBaseTestCase):
def test_joining_content_flag_forbidden(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_cant_join_resolved_flag(self):
self.active_social_flag.resolved = timezone.now()
self.active_social_flag.save()
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_warn_if_already_participant(self):
self.active_social_flag.participants.add(self.social_mod)
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response,
'You are already a participant in this flag')
def test_join_flag(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response,
'You are now a participant in this flag')
@tag('as_content_mod')
class TestJoinFlagViewAsContentMod(ExistingFlagBaseTestCase):
def test_joining_social_flag_forbidden(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_cant_join_resolved_flag(self):
self.active_content_flag.resolved = timezone.now()
self.active_content_flag.save()
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_warn_if_already_participant(self):
self.active_content_flag.participants.add(self.content_mod)
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertContains(response,
'You are already a participant in this flag')
def test_join_flag(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertContains(response,
'You are now a participant in this flag')
@tag('as_superuser')
class TestJoinFlagViewAsSuperuser(ExistingFlagBaseTestCase):
def test_cant_join_resolved_flag(self):
self.active_social_flag.resolved = timezone.now()
self.active_social_flag.save()
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_warn_if_already_participant(self):
self.active_social_flag.participants.add(self.superuser)
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response,
'You are already a participant in this flag')
@mock.patch.object(usermodels, 'Notification')
def test_join_flag(self, mock_notification):
self.active_social_flag.participants.add(self.user)
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:join_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response,
'You are now a participant in this flag')
self.assertTrue(mock_notification.called_once)
@tag('as_user')
class TestResolveFlagViewAsUser(ExistingFlagBaseTestCase):
def test_forbidden(self):
self.client.login(username='user',
password='user pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response, 'not authorized to access this page')
@tag('as_social_mod')
class TestResolveFlagViewAsSocialMod(ExistingFlagBaseTestCase):
def test_resolving_content_flag_forbidden(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_resolving_resolved_flag_forbidden(self):
self.active_social_flag.resolved = timezone.now()
self.active_social_flag.save()
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_cant_resolve_flag_not_participating_in(self):
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response,
'You must be participating in this flag')
def test_require_resolution(self):
self.active_social_flag.participants.add(self.social_mod)
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response, 'You must provide a resolution')
def test_flag_resolved(self):
self.active_social_flag.participants.add(self.social_mod)
self.client.login(username='social_mod',
password='social_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), {'resolution': 'done'}, follow=True)
self.assertContains(response, 'Flag resolved')
@tag('as_content_mod')
class TestResolveFlagViewAsContentMod(ExistingFlagBaseTestCase):
def test_resolving_social_flag_forbidden(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_resolving_resolved_flag_forbidden(self):
self.active_content_flag.resolved = timezone.now()
self.active_content_flag.save()
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_cant_resolve_flag_not_participating_in(self):
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertContains(response,
'You must be participating in this flag')
def test_require_resolution(self):
self.active_content_flag.participants.add(self.content_mod)
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertContains(response, 'You must provide a resolution')
def test_flag_resolved(self):
self.active_content_flag.participants.add(self.content_mod)
self.client.login(username='content_mod',
password='content_mod pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), {'resolution': 'done'}, follow=True)
self.assertContains(response, 'Flag resolved')
@tag('as_superuser')
class TestResolveFlagViewAsSuperuser(ExistingFlagBaseTestCase):
def test_resolving_resolved_flag_forbidden(self):
self.active_social_flag.resolved = timezone.now()
self.active_social_flag.save()
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertEqual(response.status_code, 403)
def test_cant_resolve_flag_not_participating_in(self):
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_content_flag.id,
}), follow=True)
self.assertContains(response,
'You must be participating in this flag')
def test_require_resolution(self):
self.active_social_flag.participants.add(self.superuser)
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), follow=True)
self.assertContains(response, 'You must provide a resolution')
@mock.patch.object(usermodels, 'Notification')
def test_flag_resolved(self, mock_notification):
self.active_social_flag.participants.add(self.superuser)
self.active_social_flag.participants.add(self.social_mod)
self.client.login(username='superuser',
password='superuser pass')
response = self.client.post(reverse(
'administration:resolve_flag', kwargs={
'flag_id': self.active_social_flag.id,
}), {'resolution': 'done'}, follow=True)
self.assertContains(response, 'Flag resolved')
self.assertTrue(mock_notification.called_once)
|
{
"content_hash": "78073795e9f8e30f69eb2850702eefd4",
"timestamp": "",
"source": "github",
"line_count": 760,
"max_line_length": 79,
"avg_line_length": 41.05263157894737,
"alnum_prop": 0.6023397435897436,
"repo_name": "OpenFurry/honeycomb",
"id": "d97c7255248a303e7fd42f605a8f9879f1b64b4e",
"size": "31200",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "administration/test_flag.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10435"
},
{
"name": "HTML",
"bytes": "206456"
},
{
"name": "JavaScript",
"bytes": "1248"
},
{
"name": "Makefile",
"bytes": "5175"
},
{
"name": "Python",
"bytes": "530905"
},
{
"name": "Shell",
"bytes": "712"
}
],
"symlink_target": ""
}
|
import json
import logging
import ochopod
import pykka
import time
import uuid
from flask import Flask, request
from kazoo.exceptions import ConnectionClosedError, NodeExistsError
from kazoo.client import KazooClient, KazooState
from kazoo.recipe.lock import LockTimeout
from ochopod.core.fsm import shutdown, spin_lock, Aborted, FSM
from pykka import ThreadingFuture, Timeout
from threading import Event
#: Our ochopod logger
logger = logging.getLogger('ochopod')
#: Root zookeeper node path (under which we store the pod data for each cluster). This path will prefix any node
#: we read or write (including the lock).
ROOT = '/ochopod/clusters'
#: We use the same tick for all our state-machines (namely one second). This quantity can be scaled up or
#: down depending on the actor
SAMPLING = 1.0
class ZK(FSM):
"""
Base layer dealing with zookeeper and in charge of writing the pod ephemeral node upon connection. The
reset() state will by default loop back to initial() and properly de-allocate the kazoo driver. Once connected
the machine will spin() until we raise something.
Please note we support an explicit reset request which will trip the machine. This is used from the CLI to
force a pod to completely disconnect/reconnect/reconfigure.
"""
def __init__(self, brokers, scope, tag, breadcrumbs, hints):
super(ZK, self).__init__()
self.breadcrumbs = breadcrumbs
self.connected = 0
self.brokers = brokers
self.force_reset = 0
self.hints = hints
self.hints['state'] = 'follower'
self.id = uuid.uuid4()
self.prefix = '%s/%s.%s' % (ROOT, scope, tag)
self.scope = scope
self.seq = None
self.tag = tag
def feedback(self, state):
#
# - forward the state change to the actor via a message
# - the specialized() hook will process this safely
#
self.actor_ref.tell(
{
'request': 'state change',
'state': state
})
def reset(self, data):
self.connected = 0
self.force_reset = 0
self.hints['state'] = 'follower'
logger.warning('%s : actor reset (%s)' % (self.path, data.cause))
if hasattr(data, 'zk'):
#
# - gracefully shut our client down
#
data.zk.stop()
logger.debug('%s : zk client stopped, releasing resources' % self.path)
data.zk.close()
if self.terminate:
super(ZK, self).reset(data)
return 'initial', data, 0
def initial(self, data):
#
# - setup a new kazoo client
#
cnx_string = ','.join(self.brokers)
logger.debug('%s : connecting @ %s' % (self.path, cnx_string))
data.zk = KazooClient(hosts=cnx_string, timeout=5.0, read_only=0, randomize_hosts=1)
data.zk.add_listener(self.feedback)
data.zk.start()
data.n = 0
return 'wait_for_cnx', data, 0
def wait_for_cnx(self, data):
if self.force_reset or self.terminate:
raise Aborted('resetting')
#
# - loop back if we haven't received a CONNECTED event from the driver
#
if not self.connected:
return 'wait_for_cnx', data, SAMPLING
#
# - the /pods node holds all our ephemeral per-container data (one container == one child node)
# - the /hash node stores the last recorded md5 hash (local pods + dependencies), which we use to
# flag any change amongst the pods or their dependencies
#
data.zk.ensure_path('%s/pods' % self.prefix)
data.zk.ensure_path('%s/hash' % self.prefix)
try:
#
# - register ourselves by creating an ephemeral
# - this is where we can store arbitrary information (e.g our breadcrumbs)
# - we ask for a sequence counter as well which we then keep (e.g in case of connection loss or reset
# we guarantee the pod won't get assigned a new index)
# - this is *critical* for some use-cases (e.g Kafka where the broker index must remain the same)
#
path = data.zk.create('%s/pods/%s.' % (self.prefix, self.id), ephemeral=True, sequence=True)
tokens = path.split('.')
if self.seq is None:
self.seq = int(tokens[-1])
self.breadcrumbs['seq'] = self.seq
js = json.dumps(self.breadcrumbs)
data.zk.set(path, js)
except NodeExistsError:
#
# - if the node is already there we just recovered from a zookeeper connection loss
# and /snapshot has not been phased out yet .. this is not an issue, simply pause a bit
# to re-attempt later
#
logger.debug('%s : pod %s is already there (probably a zk reconnect)' % (self.path, self.id))
return 'wait_for_cnx', data, 5.0 * SAMPLING
logger.debug('%s : registered as %s (#%d)' % (self.path, self.id, self.seq))
data.connected_at = time.time()
return 'spin', data, 0
def spin(self, data):
raise NotImplementedError
def specialized(self, msg):
assert 'request' in msg, 'bogus message received ?'
req = msg['request']
if req == 'state change':
#
# - we got a zk state change
# - we only use the switch to CONNECTED to go from wait_for_cnx() to spin()
# - ZK disconnects (LOST or SUSPENDED) are simply flagged when exceptions are raised
#
state = msg['state']
current = 'connected' if self.connected else 'disconnected'
logger.debug('%s : zk state change -> "%s" (%s)' % (self.path, str(state), current))
if self.connected and state != KazooState.CONNECTED:
logger.warning('%s : lost connection (%s) / forcing a reset' % (self.path, str(state)))
self.force_reset = 1
self.connected = 0
elif state == KazooState.CONNECTED:
self.connected = 1
elif req == 'reset':
#
# - we got a request to explicitly force a reset
# - this is typically invoked from the CLI
#
self.force_reset = 1
else:
super(ZK, self).specialized(msg)
class Coordinator(ZK):
"""
Leader lock implementation logic, based on :class:`ZK`. The spin() state will attempt to grab a lock (we
simply use the Kazoo recipe). If we obtain the lock we boot the controller actor (e.g the clustering model)
and then stay there by spin-locking on its latch. If the controller goes down for any reason (typically a
zookeeper error or a shutdown request) we'll reset (and disconnect from zookeeper).
"""
def __init__(self, brokers, scope, tag, port, breadcrumbs, model, hints):
super(Coordinator, self).__init__(brokers, scope, tag, breadcrumbs, hints)
self.model = model
self.path = 'coordinator'
self.port = port
def reset(self, data):
if hasattr(data, 'controller'):
#
# - don't forget to nuke our controller before resetting
#
shutdown(data.controller)
if hasattr(data, 'lock'):
#
# - make sure to remove the lock attribute
# - it's useless to release the lock as we'll release the client altogether
#
delattr(data, 'lock')
return super(Coordinator, self).reset(data)
def spin(self, data):
#
# - if the termination trigger is set, abort immediately
#
if self.force_reset or self.terminate:
raise Aborted('resetting')
#
# - attempt to fetch the lock
# - allocate it if not already done
# - it is *important* to just allocate one lock as there is a leak in kazoo
#
if not hasattr(data, 'lock'):
data.lock = data.zk.Lock('%s/coordinator' % self.prefix)
try:
#
# - attempt to lock within a 5 seconds timeout to avoid stalling in some cases
#
if data.lock.acquire(timeout=5.0 * SAMPLING):
return 'start_controller', data, 0
except LockTimeout:
pass
return 'spin', data, 0
def start_controller(self, data):
#
# - if the termination trigger is set, abort immediately
# - this is important as it is possible to somehow get the lock after a suspend (acquire() returns
# true in that case which is misleading)
#
if self.force_reset or self.terminate:
raise Aborted('resetting')
#
# - we have the lock (e.g we are the leader)
# - start the controller actor
#
data.latch = ThreadingFuture()
logger.debug('%s : lock acquired @ %s, now leading' % (self.path, self.prefix))
data.controller = self.model.start(data.zk, self.id, self.hints, self.scope, self.tag, self.port, data.latch)
return 'lock', data, 0
def lock(self, data):
#
# - if the termination trigger is set, abort immediately
#
if self.force_reset or self.terminate:
raise Aborted('resetting')
#
# - spin-lock on the controller latch
# - any catastrophic plug failure will be trapped that way
#
try:
Event()
out = data.latch.get(SAMPLING)
if isinstance(out, Exception):
raise out
except Timeout:
pass
return 'lock', data, 0
|
{
"content_hash": "7503bc86ff7e9141bd119ec4803a2f65",
"timestamp": "",
"source": "github",
"line_count": 291,
"max_line_length": 117,
"avg_line_length": 33.597938144329895,
"alnum_prop": 0.5839214482970236,
"repo_name": "titilambert/ochopod",
"id": "314759555e77ee63a8ab0bdd072500ab1ae9ad79",
"size": "10382",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ochopod/core/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "136775"
}
],
"symlink_target": ""
}
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 12);
|
{
"content_hash": "523b78a4e88485872501e29841a0fe8e",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 165,
"avg_line_length": 37.857142857142854,
"alnum_prop": 0.7056603773584905,
"repo_name": "antoinecarme/pyaf",
"id": "22b47c5d6b0ca1af9d4ae88d101084a6d27e28cd",
"size": "265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_None/trend_MovingMedian/cycle_30/ar_12/test_artificial_128_None_MovingMedian_30_12_100.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
import os
import sys
import os.path as op
from pathlib import Path
from glob import glob
import numpy as np
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
from scipy.io import loadmat, savemat
from scipy import stats, signal
# from scipy.stats import distributions as dist
# from scipy import fftpack
# import seaborn as sns?
try:
from showit import image
except ImportError:
pass
from .utils import find_index, find_range
from .freq import dB
|
{
"content_hash": "a27a9325b1313d93ccdb3fc02dabec67",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 47,
"avg_line_length": 18.76923076923077,
"alnum_prop": 0.7807377049180327,
"repo_name": "mmagnuski/mypy",
"id": "17ab5c4b2fc01a93d2400c54a1322ca1ad894510",
"size": "590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sarna/init.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103501"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.project.databases import views
urlpatterns = patterns(
'',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^launch$', views.LaunchInstanceView.as_view(), name='launch'),
url(r'^(?P<instance_id>[^/]+)/$', views.DetailView.as_view(),
name='detail'),
)
|
{
"content_hash": "2dd795082177cd67c98ac25162beda60",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 72,
"avg_line_length": 31.615384615384617,
"alnum_prop": 0.6642335766423357,
"repo_name": "spandanb/horizon",
"id": "d5d51ce6766137eee534fdfb2f531da428380179",
"size": "1021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack_dashboard/dashboards/project/databases/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from test_assets.public import TestAsset
from ..cloud_management import CloudManager, CloudProvider
from .test_profitbricks import TestProfitbricksAdapter
class TestCloudManager(TestProfitbricksAdapter):
def setUp(self):
self.adapter = CloudManager(TestAsset.MIGRATION_PLAN_MOCK['target_cloud'])
def test_invalid_cloud_settings(self):
with self.assertRaises(CloudManager.InvalidCloudSettingsException):
CloudManager({})
def test_unsupported_cloud_provider(self):
with self.assertRaises(CloudProvider.UnsupportedProviderException):
CloudManager({'provider': 'NOT_SUPPORTED'})
|
{
"content_hash": "85ba2dfa9cb27b6a22f70374255a32e2",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 82,
"avg_line_length": 35.611111111111114,
"alnum_prop": 0.7519500780031201,
"repo_name": "jdepoix/goto_cloud",
"id": "eef59e75615d66735d4f617a613d7e66784495ff",
"size": "641",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "goto_cloud/cloud_management/tests/test_cloud_management.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "354421"
},
{
"name": "Shell",
"bytes": "619"
}
],
"symlink_target": ""
}
|
import sqlite3
from blaze.sources.descriptors.byteprovider import ByteProvider
from blaze.byteproto import CONTIGUOUS, CHUNKED, STREAM, ACCESS_READ
#from blaze.datadescriptor import SqlDataDescriptor
from blaze.layouts.categorical import Simple
class SqliteSource(ByteProvider):
read_capabilities = STREAM
write_capabilities = STREAM
access_capabilities = ACCESS_READ
def __init__(self, data=None, dshape=None, params=None):
#assert (data is not None) or (dshape is not None) or \
#(params.get('storage'))
if 'storage' in params and params.storage:
self.conn = sqlite3.connect(params.storage)
else:
self.conn = sqlite3.connect(':memory:')
def register_custom_types(self, name, ty, con, decon):
sqlite3.register_adapter(ty, con)
sqlite3.register_converter(name, decon)
def read_desc(self, query):
return SqlDataDescriptor('sqlite_dd', None, self.conn)
def repr_data(self):
return '<Deferred>'
# Return the layout of the dataa
def default_layout(self):
return Simple()
#return Simple(self.conn.read_schema())
|
{
"content_hash": "4697e2ee1924b6dc6c3e5d8f7efdc99b",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 68,
"avg_line_length": 31.486486486486488,
"alnum_prop": 0.6746781115879829,
"repo_name": "cornsea/blaze",
"id": "bd329f379cb87f71f8ee140dd589bd188efe6a83",
"size": "1165",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "blaze/sources/sql.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [],
"symlink_target": ""
}
|
from rest_framework import serializers
from polls.models import Questionary, Question, Choice, Answer
class QuestionarySerializer(serializers.ModelSerializer):
class Meta:
model = Questionary
class QuestionSerializer(serializers.ModelSerializer):
class Meta:
model = Question
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
|
{
"content_hash": "2d06f2354118cd05dc713f9c4ad52131",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 62,
"avg_line_length": 22.363636363636363,
"alnum_prop": 0.7479674796747967,
"repo_name": "ewokcillo/django_workshop",
"id": "8971be22fe4cb7765964a533dad4cf7c8c1c9909",
"size": "507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "polling/polls/serializers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9326"
}
],
"symlink_target": ""
}
|
import datetime as dt
from flask.ext.login import UserMixin
from fpage.database import db, CRUDMixin
from fpage.extensions import bcrypt
class User(UserMixin, CRUDMixin, db.Model):
__tablename__ = 'users'
username = db.Column(db.String(80), unique=True, nullable=False)
email = db.Column(db.String(80), unique=True, nullable=False)
password = db.Column(db.String, nullable=False) # The hashed password
created_at = db.Column(db.DateTime(), nullable=False)
first_name = db.Column(db.String(30), nullable=True)
last_name = db.Column(db.String(30), nullable=True)
active = db.Column(db.Boolean())
is_admin = db.Column(db.Boolean())
unread_count = db.Column(db.Integer, nullable=True)
def __init__(self, username=None, email=None, password=None,
first_name=None, last_name=None,
active=True, is_admin=False):
self.username = username
self.email = email
if password:
self.set_password(password)
self.active = active
self.is_admin = is_admin
self.created_at = dt.datetime.utcnow()
self.first_name = first_name
self.last_name = last_name
def set_password(self, password):
self.password = bcrypt.generate_password_hash(password)
def check_password(self, password):
return bcrypt.check_password_hash(self.password, password)
@property
def iso_time(self):
return self.created_at.isoformat()
@property
def full_name(self):
return "{0} {1}".format(self.first_name, self.last_name)
def __repr__(self):
return '<User "{username}">'.format(username=self.username)
|
{
"content_hash": "cb4e575b66389966f0305005ce76aee8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 74,
"avg_line_length": 35,
"alnum_prop": 0.6517857142857143,
"repo_name": "Nikola-K/fpage",
"id": "f327d38e097d11e6f8e419cf5b579c77400d089a",
"size": "1704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fpage/user/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "207326"
},
{
"name": "JavaScript",
"bytes": "250318"
},
{
"name": "Python",
"bytes": "47445"
},
{
"name": "Shell",
"bytes": "110"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "istarrt.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "4ca7ec4872092f10d7f1153d070777c3",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 71,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.7105263157894737,
"repo_name": "torablien/istarrt",
"id": "edf215e4ae70e755cfb04cd0ce4546b20594290d",
"size": "250",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "132234"
},
{
"name": "HTML",
"bytes": "26476"
},
{
"name": "JavaScript",
"bytes": "19779"
},
{
"name": "Python",
"bytes": "10021"
}
],
"symlink_target": ""
}
|
import os
import re
import sys
import imp
import argparse
#------------------------------------------------------------------------------#
from .core import _version_numbers
from .config import config_types, load_mount_source_config, _existing_repo_names
from .components import shared_component_names
#------------------------------------------------------------------------------#
class ArgumentSetup:
#--------------------------------------------------------------------------#
def __init__(self):
self.with_repo_names = False
self.with_repo_paths = False
self.with_tag_labels = False
self.with_file_paths = False
self.with_obj_hashes = False
self.existing_repos = False
self.at_least_one_repo = self.with_repo_names
self.with_config_type = False
self.with_repo_options = False
self.with_mount_source = False
self.with_mount_point = False
self.with_shared_components = False
self.with_print_repo_name = False
#------------------------------------------------------------------------------#
class __RelfsArgumentParser(argparse.ArgumentParser):
#--------------------------------------------------------------------------#
def _port_number_value(self, arg):
msg_fmt = "'%s' is not a valid port number"
try:
port_num = int(arg)
except:
raise argparse.ArgumentTypeError(msg_fmt % str(arg))
if port_num <= 0 or port_num >= 2**16:
raise argparse.ArgumentTypeError(msg_fmt % str(arg))
return port_num
#--------------------------------------------------------------------------#
def _identifier_value(self, arg, name):
msg_fmt = "'%s' is not a valid " + name + " identifier"
ident = re.compile(r"^[^\d\W]\w*\Z", re.UNICODE)
if ident.match(arg) is None:
self.error(msg_fmt % arg)
return arg
#--------------------------------------------------------------------------#
def _repo_name_value(self, arg):
return self._identifier_value(arg, 'repository')
#--------------------------------------------------------------------------#
def _tag_name_value(self, arg):
return self._identifier_value(arg, 'tag')
#--------------------------------------------------------------------------#
def _obj_hash_value(self, arg):
msg_fmt = "'%s' is not a valid hash value"
ident = re.compile(r"^[0-9A-Fa-f]{6}[0-9A-Fa-f]*\Z", re.UNICODE)
if ident.match(arg) is None:
self.error(msg_fmt % arg)
return arg
#--------------------------------------------------------------------------#
def _default_mount_point(self):
return os.path.join(
os.environ.get("HOME", os.path.expanduser("~")),
"RelFs")
#--------------------------------------------------------------------------#
def _valid_mount_source(self, arg):
if not os.path.isdir(arg):
msg = "'%s' is not a directory path" % (arg)
raise argparse.ArgumentTypeError(msg)
dir_path = os.path.realpath(arg)
if not os.path.isfile(os.path.join(dir_path, ".relfs", "config")):
msg = "'%s' is not a relfs mountable directory" % (arg)
raise argparse.ArgumentTypeError(msg)
return dir_path
#--------------------------------------------------------------------------#
def _valid_mount_point(self, arg):
if not os.path.isdir(arg):
msg = "'%s' is not a directory path" % (arg)
raise argparse.ArgumentTypeError(msg)
dir_path = os.path.realpath(arg)
for entry in os.listdir(dir_path):
if entry != '.relfs':
msg = "directory '%s' is not empty" % (arg)
raise argparse.ArgumentTypeError(msg)
return dir_path
#--------------------------------------------------------------------------#
def __init__(self, arg_setup = ArgumentSetup(), **kw):
argparse.ArgumentParser.__init__(self, **kw)
self.arg_setup = arg_setup
self.add_argument(
"--version",
action="version",
version="%(prog)s relfs-"+".".join([str(x) for x in _version_numbers])
)
self.add_argument(
"--verbose", "-v",
dest="verbosity",
default=0,
action="count"
)
if arg_setup.with_repo_names:
if arg_setup.existing_repos:
self.add_argument(
"--repository", "-r",
type=self._repo_name_value,
nargs=1,
dest="repositories",
choices=_existing_repo_names(),
default=list(),
action="append"
)
elif arg_setup.with_repo_paths:
self.add_argument(
"--repository", "-r",
nargs=2,
dest="repositories",
metavar=('REPO-NAME', 'REPO-PATH'),
default=list(),
action="append"
)
else:
self.add_argument(
"--repository", "-r",
type=self._repo_name_value,
nargs=1,
dest="repositories",
metavar='REPO-NAME',
default=list(),
action="append"
)
if arg_setup.with_tag_labels:
self.add_argument(
"--tag", "-t",
type=self._tag_name_value,
nargs='?',
dest="tag_labels",
metavar='TAG-LABEL',
default=list(),
action="append"
)
if arg_setup.with_obj_hashes:
self.add_argument(
"--obj", "-o",
type=self._obj_hash_value,
nargs='?',
dest="obj_hashes",
metavar='OBJ-HASH',
default=list(),
action="append"
)
if arg_setup.with_file_paths:
self.add_argument(
"--file", "-f",
metavar='FILE-PATH',
nargs='?',
dest="file_paths",
default=list(),
action="append"
)
if arg_setup.with_config_type:
ct_group = self.add_mutually_exclusive_group()
ct_group.add_argument(
"--config-type", "-C",
nargs='?',
dest="config_type",
choices=config_types(),
default="user",
action="store"
)
for conf_typ in config_types():
ct_group.add_argument(
"--%s" % conf_typ,
dest="config_type",
action="store_const",
const=conf_typ
)
if arg_setup.with_repo_options:
self.add_argument(
"--compress", "-c",
type=bool,
metavar='BOOL',
nargs='?',
dest="compress",
default=True,
action="store"
)
if arg_setup.with_mount_source:
self.add_argument(
"-M", "--mount-source",
dest="mount_source",
type=self._valid_mount_source,
default=None,
action="store",
help="""Specifies the relational filesystem mount-source path"""
)
if arg_setup.with_mount_point:
self.add_argument(
"-m", "--mount-point",
dest="mount_point",
type=self._valid_mount_point,
default=self._default_mount_point(),
action="store",
help="""Specifies the relational filesystem mount-point path"""
)
if arg_setup.with_repo_names or\
arg_setup.with_tag_labels or\
arg_setup.with_obj_hashes or\
arg_setup.with_file_paths or\
arg_setup.with_mount_source:
mvar_list = list()
help_list = list()
if arg_setup.with_repo_names:
if arg_setup.existing_repos:
mvar_list += [
'@'+x.encode('ascii', 'ignore')
for x in _existing_repo_names()
]
help_list.append('repository name')
elif arg_setup.with_repo_paths:
mvar_list.append('@repo dir-path')
help_list.append('repository name and repository path')
else:
mvar_list.append('@repo')
help_list.append('repository name')
if arg_setup.with_tag_labels:
mvar_list.append(':tag-label')
help_list.append('tag label')
if arg_setup.with_obj_hashes:
mvar_list.append('^obj-hash')
help_list.append('obj hash')
if arg_setup.with_file_paths:
mvar_list.append('file-path')
help_list.append('file path')
self.add_argument(
"arguments",
metavar="|".join(mvar_list),
nargs='*',
type=str,
help=" or ".join(help_list)
)
if arg_setup.with_shared_components:
self.add_argument(
"--shared-components", "-S",
dest="shared_component_names",
choices=shared_component_names(),
default=[],
action="append"
)
if arg_setup.with_print_repo_name:
self.add_argument(
"--print-repo", "-pr",
dest="do_print_repo_name",
default=None,
action="store_true"
)
self.add_argument(
"--dont-print-repo", "-PR",
dest="do_print_repo_name",
action="store_false"
)
try:
imp.find_module('argparse2bco')
self.add_argument(
"--print-bash-complete",
action="store_true",
default=False
)
except ImportError: pass
#--------------------------------------------------------------------------#
def _normalize_list(self, lst):
result = set()
for item in lst:
if type(item) is list:
for subitem in self._normalize_list(item):
result.add(subitem)
else:
result.add(item)
return list(result)
#--------------------------------------------------------------------------#
def process_parsed_options(self, options):
options.arg_setup = self.arg_setup
try:
if options.print_bash_complete:
self.print_bash_complete(
'_complete_' + re.sub('[^0-9a-zA-Z]+', '_', self.prog),
sys.argv[0]
)
self.exit()
except AttributeError: pass
if self.arg_setup.with_repo_paths:
repos = list()
for i in xrange(0, len(options.arguments)):
try: this_arg = options.arguments[i]
except IndexError: break
try: next_arg = options.arguments[i+1]
except IndexError: next_arg = ""
if this_arg[0] == '@':
if not next_arg or next_arg[0] in ['@', ':', '^']:
self.error("a path is required after '%s'" % this_arg)
else:
repos.append([this_arg[1:], next_arg])
del(options.arguments[i+1])
else:
repos = [x[1:] for x in options.arguments if x[0] == '@']
tags = [x[1:] for x in options.arguments if x[0] == ':']
hashes = [x[1:] for x in options.arguments if x[0] == '^']
other = [x for x in options.arguments if x[0] not in ['@',':','^']]
files = [x for x in other if os.path.isfile(x)]
if self.arg_setup.with_mount_source:
sources = [x for x in other if self._valid_mount_source(x)]
sources = list(dict.fromkeys(sources))
if options.mount_source:
if len(sources) > 0:
self.error("too many mount source directories specified")
else:
if len(sources) < 1:
self.error("a mount source directory is required")
elif len(sources) > 1:
self.error("too many mount source directories specified")
else:
options.mount_source = sources[0]
try:
mnt_src_config = load_mount_source_config(options)
repos += mnt_src_config.repositories
except Exception as bla: print(bla)
if self.arg_setup.with_repo_names:
options.repositories += repos
if self.arg_setup.at_least_one_repo and\
len(options.repositories) == 0:
self.error("at least one repository name must be specified")
options.repositories = self._normalize_list(options.repositories)
if self.arg_setup.with_repo_paths:
options.repositories = {
self._repo_name_value(repo): path
for repo, path in options.repositories
}
else:
options.repositories = [
self._repo_name_value(repo)
for repo in options.repositories
]
elif len(repos) > 0:
self.error(
"unexpected repository name '%s' in argument list" % repos[0]
)
if self.arg_setup.with_tag_labels:
options.tag_labels += [self._tag_name_value(t) for t in tags]
elif len(tags) > 0:
self.error(
"unexpected tag '%s' in argument list" % tags[0]
)
if self.arg_setup.with_file_paths:
options.file_paths += files
elif len(files) > 0:
self.error(
"unexpected file path '%s' in argument list" % files[0]
)
if self.arg_setup.with_obj_hashes:
options.obj_hashes += hashes
elif len(hashes) > 0:
self.error(
"unexpected object hash '%s' in argument list" % hashes[0]
)
if self.arg_setup.with_file_paths:
options.file_paths = \
[os.path.realpath(p) for p in options.file_paths]
if self.arg_setup.with_repo_names:
if self.arg_setup.with_repo_paths:
options.repositories = {
repo_name: os.path.realpath(repo_path)
for repo_name, repo_path in options.repositories.items()
}
options.__dict__.pop("arguments", None)
Options = options.__class__
if self.arg_setup.with_print_repo_name:
def __opts_print_repo_names(options, hint = None):
if hint is None:
hint = len(options.repositories) > 1
if options.do_print_repo_name is not None:
return options.do_print_repo_name
else:
return hint
setattr(
options,
"print_repo_names",
__opts_print_repo_names.__get__(options, Options)
)
return options
#--------------------------------------------------------------------------#
#--------------------------------------------------------------------------#
def parse_args(self):
return self.process_parsed_options(
argparse.ArgumentParser.parse_args(self)
)
#--------------------------------------------------------------------------#
def print_bash_complete(self, function_name, command):
import argparse2bco
argparse2bco.print_bash_complete_script(
self,
function_name,
command
)
#------------------------------------------------------------------------------#
def make_argument_parser(command, description, arg_setup = ArgumentSetup()):
argparser = __RelfsArgumentParser(
arg_setup = arg_setup,
prog=command,
description=description,
epilog="""
Copyright (c) Matúš Chochlík.
Permission is granted to copy, distribute and/or modify this document
under the terms of the Boost Software License, Version 1.0.
(See a copy at http://www.boost.org/LICENSE_1_0.txt)
"""
)
return argparser
#------------------------------------------------------------------------------#
|
{
"content_hash": "464b22193b75deb6743c294d86ad8e6f",
"timestamp": "",
"source": "github",
"line_count": 469,
"max_line_length": 82,
"avg_line_length": 36.57995735607676,
"alnum_prop": 0.4371648402891117,
"repo_name": "matus-chochlik/various",
"id": "9231a8c4678f8545ff109c36ea387ce5fcde8ae3",
"size": "17255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "relfs/relfs/arguments.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "44686"
},
{
"name": "C++",
"bytes": "76296"
},
{
"name": "Makefile",
"bytes": "8751"
},
{
"name": "Python",
"bytes": "295676"
},
{
"name": "QML",
"bytes": "19387"
},
{
"name": "QMake",
"bytes": "3981"
},
{
"name": "Roff",
"bytes": "13504"
},
{
"name": "Shell",
"bytes": "88324"
},
{
"name": "TeX",
"bytes": "199528"
},
{
"name": "Vim script",
"bytes": "1417"
},
{
"name": "XSLT",
"bytes": "2225"
}
],
"symlink_target": ""
}
|
import pyglet, math
from pyglet.window import key
import bullet, physicalobject, resources
class Player(physicalobject.PhysicalObject):
"""Physical object that responds to user input"""
def __init__(self, *args, **kwargs):
super(Player, self).__init__(img=resources.player_image, *args, **kwargs)
# Create a child sprite to show when the ship is thrusting
self.engine_sprite = pyglet.sprite.Sprite(img=resources.engine_image, *args, **kwargs)
self.engine_sprite.visible = False
# Set some easy-to-tweak constants
self.thrust = 300.0
self.rotate_speed = 200.0
self.bullet_speed = 700.0
# Player should not collide with own bullets
self.reacts_to_bullets = False
# Tell the game handler about any event handlers
self.key_handler = key.KeyStateHandler()
self.event_handlers = [self, self.key_handler]
def update(self, dt):
# Do all the normal physics stuff
super(Player, self).update(dt)
if self.key_handler[key.LEFT]:
self.rotation -= self.rotate_speed * dt
if self.key_handler[key.RIGHT]:
self.rotation += self.rotate_speed * dt
if self.key_handler[key.UP]:
# Note: pyglet's rotation attributes are in "negative degrees"
angle_radians = -math.radians(self.rotation)
force_x = math.cos(angle_radians) * self.thrust * dt
force_y = math.sin(angle_radians) * self.thrust * dt
self.velocity_x += force_x
self.velocity_y += force_y
# If thrusting, update the engine sprite
self.engine_sprite.rotation = self.rotation
self.engine_sprite.x = self.x
self.engine_sprite.y = self.y
self.engine_sprite.visible = True
else:
# Otherwise, hide it
self.engine_sprite.visible = False
def on_key_press(self, symbol, modifiers):
if symbol == key.SPACE:
self.fire()
def fire(self):
# Note: pyglet's rotation attributes are in "negative degrees"
angle_radians = -math.radians(self.rotation)
# Create a new bullet just in front of the player
ship_radius = self.image.width / 2
bullet_x = self.x + math.cos(angle_radians) * ship_radius
bullet_y = self.y + math.sin(angle_radians) * ship_radius
new_bullet = bullet.Bullet(bullet_x, bullet_y, batch=self.batch)
# Give it some speed
bullet_vx = self.velocity_x + math.cos(angle_radians) * self.bullet_speed
bullet_vy = self.velocity_y + math.sin(angle_radians) * self.bullet_speed
new_bullet.velocity_x, new_bullet.velocity_y = bullet_vx, bullet_vy
# Add it to the list of objects to be added to the game_objects list
self.new_objects.append(new_bullet)
# Play the bullet sound
resources.bullet_sound.play()
def delete(self):
# We have a child sprite which must be deleted when this object
# is deleted from batches, etc.
self.engine_sprite.delete()
super(Player, self).delete()
|
{
"content_hash": "e638313580ab9e77214c103b2dbc745a",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 94,
"avg_line_length": 37.9277108433735,
"alnum_prop": 0.6216645489199492,
"repo_name": "Akagi201/learning-python",
"id": "3252b512e6726193d6380a2fd2268c318ce0fa63",
"size": "3148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyglet/mygame/game/player.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "125"
},
{
"name": "CSS",
"bytes": "82315"
},
{
"name": "HTML",
"bytes": "16738"
},
{
"name": "JavaScript",
"bytes": "253132"
},
{
"name": "Jupyter Notebook",
"bytes": "3666"
},
{
"name": "Less",
"bytes": "2022"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Procfile",
"bytes": "21"
},
{
"name": "Python",
"bytes": "336950"
},
{
"name": "Rich Text Format",
"bytes": "49342"
},
{
"name": "Shell",
"bytes": "4498"
}
],
"symlink_target": ""
}
|
''' Shared helpers for tests '''
import os
import uuid
def setup_tmp_dir():
''' Create temporary directory for tests '''
tmp_dir = '/tmp/tmp-%s' % (uuid.uuid4().hex)
source_dir = os.path.join(tmp_dir, 'source')
destination_dir = os.path.join(tmp_dir, 'destination')
os.makedirs(source_dir)
os.makedirs(destination_dir)
return tmp_dir, source_dir, destination_dir
def touch(path, time=None):
''' Touch (create / update modified time) for files '''
with open(path, 'a'):
os.utime(path, time)
|
{
"content_hash": "c595ac2c229968dbe787a440d86449e7",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 59,
"avg_line_length": 29.833333333333332,
"alnum_prop": 0.6424581005586593,
"repo_name": "praveenram/sync_folders",
"id": "6f392ce64e00d4071b26b8ce7bc3ec2f04f4070d",
"size": "537",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/base_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23105"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import pprint
import avro.schema
from avro.schema import Schema
import sys
import re
import logging
import six
from six.moves import urllib
from six.moves import range
from typing import Any, List, Set, Union, Text
from .sourceline import SourceLine, lineno_re, bullets, indent
_logger = logging.getLogger("salad")
class ValidationException(Exception):
pass
class ClassValidationException(ValidationException):
pass
def validate(expected_schema, # type: Schema
datum, # type: Any
identifiers=[], # type: List[Text]
strict=False, # type: bool
foreign_properties=set() # type: Set[Text]
):
# type: (...) -> bool
return validate_ex(
expected_schema, datum, identifiers, strict=strict,
foreign_properties=foreign_properties, raise_ex=False)
INT_MIN_VALUE = -(1 << 31)
INT_MAX_VALUE = (1 << 31) - 1
LONG_MIN_VALUE = -(1 << 63)
LONG_MAX_VALUE = (1 << 63) - 1
def friendly(v): # type: (Any) -> Any
if isinstance(v, avro.schema.NamedSchema):
return v.name
if isinstance(v, avro.schema.ArraySchema):
return "array of <%s>" % friendly(v.items)
elif isinstance(v, avro.schema.PrimitiveSchema):
return v.type
elif isinstance(v, avro.schema.UnionSchema):
return " or ".join([friendly(s) for s in v.schemas])
else:
return v
def vpformat(datum): # type: (Any) -> str
a = pprint.pformat(datum)
if len(a) > 160:
a = a[0:160] + "[...]"
return a
def validate_ex(expected_schema, # type: Schema
datum, # type: Any
identifiers=None, # type: List[Text]
strict=False, # type: bool
foreign_properties=None, # type: Set[Text]
raise_ex=True, # type: bool
strict_foreign_properties=False, # type: bool
logger=_logger # type: logging.Logger
):
# type: (...) -> bool
"""Determine if a python datum is an instance of a schema."""
if not identifiers:
identifiers = []
if not foreign_properties:
foreign_properties = set()
schema_type = expected_schema.type
if schema_type == 'null':
if datum is None:
return True
else:
if raise_ex:
raise ValidationException(u"the value is not null")
else:
return False
elif schema_type == 'boolean':
if isinstance(datum, bool):
return True
else:
if raise_ex:
raise ValidationException(u"the value is not boolean")
else:
return False
elif schema_type == 'string':
if isinstance(datum, six.string_types):
return True
elif isinstance(datum, bytes):
datum = datum.decode(u"utf-8")
return True
else:
if raise_ex:
raise ValidationException(u"the value is not string")
else:
return False
elif schema_type == 'bytes':
if isinstance(datum, str):
return True
else:
if raise_ex:
raise ValidationException(
u"the value `%s` is not bytes" % vpformat(datum))
else:
return False
elif schema_type == 'int':
if (isinstance(datum, six.integer_types)
and INT_MIN_VALUE <= datum <= INT_MAX_VALUE):
return True
else:
if raise_ex:
raise ValidationException(u"`%s` is not int" % vpformat(datum))
else:
return False
elif schema_type == 'long':
if ((isinstance(datum, six.integer_types))
and LONG_MIN_VALUE <= datum <= LONG_MAX_VALUE):
return True
else:
if raise_ex:
raise ValidationException(
u"the value `%s` is not long" % vpformat(datum))
else:
return False
elif schema_type in ['float', 'double']:
if (isinstance(datum, six.integer_types)
or isinstance(datum, float)):
return True
else:
if raise_ex:
raise ValidationException(
u"the value `%s` is not float or double" % vpformat(datum))
else:
return False
elif isinstance(expected_schema, avro.schema.EnumSchema):
if expected_schema.name == "Any":
if datum is not None:
return True
else:
if raise_ex:
raise ValidationException(u"'Any' type must be non-null")
else:
return False
if not isinstance(datum, six.string_types):
if raise_ex:
raise ValidationException(
u"value is a %s but expected a string" % (type(datum).__name__))
else:
return False
if datum in expected_schema.symbols:
return True
else:
if raise_ex:
raise ValidationException(u"the value %s is not a valid %s, expected %s%s" % (vpformat(datum), expected_schema.name,
"one of " if len(
expected_schema.symbols) > 1 else "",
"'" + "', '".join(expected_schema.symbols) + "'"))
else:
return False
elif isinstance(expected_schema, avro.schema.ArraySchema):
if isinstance(datum, list):
for i, d in enumerate(datum):
try:
sl = SourceLine(datum, i, ValidationException)
if not validate_ex(expected_schema.items, d, identifiers,
strict=strict,
foreign_properties=foreign_properties,
raise_ex=raise_ex,
strict_foreign_properties=strict_foreign_properties,
logger=logger):
return False
except ValidationException as v:
if raise_ex:
raise sl.makeError(
six.text_type("item is invalid because\n%s" % (indent(str(v)))))
else:
return False
return True
else:
if raise_ex:
raise ValidationException(u"the value %s is not a list, expected list of %s" % (
vpformat(datum), friendly(expected_schema.items)))
else:
return False
elif isinstance(expected_schema, avro.schema.UnionSchema):
for s in expected_schema.schemas:
if validate_ex(s, datum, identifiers, strict=strict, raise_ex=False,
strict_foreign_properties=strict_foreign_properties,
logger=logger):
return True
if not raise_ex:
return False
errors = [] # type: List[Text]
checked = []
for s in expected_schema.schemas:
if isinstance(datum, list) and not isinstance(s, avro.schema.ArraySchema):
continue
elif isinstance(datum, dict) and not isinstance(s, avro.schema.RecordSchema):
continue
elif isinstance(datum, (bool, six.integer_types, float, six.string_types)) and isinstance(s, (avro.schema.ArraySchema, avro.schema.RecordSchema)):
continue
elif datum is not None and s.type == "null":
continue
checked.append(s)
try:
validate_ex(s, datum, identifiers, strict=strict,
foreign_properties=foreign_properties,
raise_ex=True,
strict_foreign_properties=strict_foreign_properties,
logger=logger)
except ClassValidationException as e:
raise
except ValidationException as e:
errors.append(six.text_type(e))
if bool(errors):
raise ValidationException(bullets(["tried %s but\n%s" % (friendly(
checked[i]), indent(errors[i])) for i in range(0, len(errors))], "- "))
else:
raise ValidationException("value is a %s, expected %s" % (
type(datum).__name__, friendly(expected_schema)))
elif isinstance(expected_schema, avro.schema.RecordSchema):
if not isinstance(datum, dict):
if raise_ex:
raise ValidationException(u"is not a dict")
else:
return False
classmatch = None
for f in expected_schema.fields:
if f.name in ("class",):
d = datum.get(f.name)
if not d:
if raise_ex:
raise ValidationException(
u"Missing '%s' field" % (f.name))
else:
return False
if expected_schema.name != d:
if raise_ex:
raise ValidationException(
u"Expected class '%s' but this is '%s'" % (expected_schema.name, d))
else:
return False
classmatch = d
break
errors = []
for f in expected_schema.fields:
if f.name in ("class",):
continue
if f.name in datum:
fieldval = datum[f.name]
else:
try:
fieldval = f.default
except KeyError:
fieldval = None
try:
sl = SourceLine(datum, f.name, six.text_type)
if not validate_ex(f.type, fieldval, identifiers, strict=strict,
foreign_properties=foreign_properties,
raise_ex=raise_ex,
strict_foreign_properties=strict_foreign_properties,
logger=logger):
return False
except ValidationException as v:
if f.name not in datum:
errors.append(u"missing required field `%s`" % f.name)
else:
errors.append(sl.makeError(u"the `%s` field is not valid because\n%s" % (
f.name, indent(str(v)))))
for d in datum:
found = False
for f in expected_schema.fields:
if d == f.name:
found = True
if not found:
sl = SourceLine(datum, d, six.text_type)
if d not in identifiers and d not in foreign_properties and d[0] not in ("@", "$"):
if (d not in identifiers and strict) and (
d not in foreign_properties and strict_foreign_properties) and not raise_ex:
return False
split = urllib.parse.urlsplit(d)
if split.scheme:
err = sl.makeError(u"unrecognized extension field `%s`%s."
" Did you include "
"a $schemas section?" % (
d, " and strict_foreign_properties is True" if strict_foreign_properties else ""))
if strict_foreign_properties:
errors.append(err)
else:
logger.warn(err)
else:
err = sl.makeError(u"invalid field `%s`, expected one of: %s" % (
d, ", ".join("'%s'" % fn.name for fn in expected_schema.fields)))
if strict:
errors.append(err)
else:
logger.warn(err)
if bool(errors):
if raise_ex:
if classmatch:
raise ClassValidationException(bullets(errors, "* "))
else:
raise ValidationException(bullets(errors, "* "))
else:
return False
else:
return True
if raise_ex:
raise ValidationException(u"Unrecognized schema_type %s" % schema_type)
else:
return False
|
{
"content_hash": "4fb0706637753e35703525b5e135793a",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 158,
"avg_line_length": 39.02089552238806,
"alnum_prop": 0.477203182374541,
"repo_name": "mr-c/common-workflow-language",
"id": "2fab415960f31b3b936a0e0942cdfbb1e1bf31f9",
"size": "13072",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "v1.0/salad/schema_salad/validate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "95268"
},
{
"name": "Python",
"bytes": "1552"
},
{
"name": "Shell",
"bytes": "1230"
}
],
"symlink_target": ""
}
|
from PySide import QtCore, QtGui
from tools.modified.androguard.session import Session
from tools.modified.androguard.core import androconf
from tools.modified.androguard.gui.fileloading import FileLoadingThread
from tools.modified.androguard.gui.treewindow import TreeWindow
from tools.modified.androguard.gui.sourcewindow import SourceWindow
from tools.modified.androguard.gui.stringswindow import StringsWindow
from tools.modified.androguard.gui.helpers import class2func
import os
class MainWindow(QtGui.QMainWindow):
'''Main window:
self.central: QTabWidget in center area
self.dock: QDockWidget in left area
self.tree: TreeWindow(QTreeWidget) in self.dock
'''
def __init__(self, parent=None, input_file=None):
super(MainWindow, self).__init__(parent)
self.session = None
self.setupSession()
self.setupFileMenu()
self.setupViewMenu()
self.setupHelpMenu()
self.setupCentral()
self.setupEmptyTree()
self.setupDock()
self.setWindowTitle("Androguard GUI")
self.showStatus("Androguard GUI")
if input_file != None:
self.openFile(input_file)
def showStatus(self, msg):
'''Helper function called by any window to display a message
in status bar.
'''
androconf.debug(msg)
self.statusBar().showMessage(msg)
def about(self):
'''User clicked About menu. Display a Message box.'''
QtGui.QMessageBox.about(self, "About Androguard GUI",
"<p><b>Androguard GUI</b> is basically a GUI for Androguard :)." \
"<br>Have fun !</p>")
def setupSession(self):
self.session = Session()
self.fileLoadingThread = FileLoadingThread(self.session)
self.connect(self.fileLoadingThread,
QtCore.SIGNAL("loadedFile(bool)"),
self.loadedFile)
def loadedFile(self, success):
if not success:
self.showStatus("Analysis of %s failed :(" %
str(self.fileLoadingThread.file_path))
return
self.updateDockWithTree()
self.cleanCentral()
self.showStatus("Analysis of %s done!" %
str(self.fileLoadingThread.file_path))
def openFile(self, path=None):
'''User clicked Open menu. Display a Dialog to ask which file to open.'''
self.session.reset()
if not path:
path = QtGui.QFileDialog.getOpenFileName(self, "Open File",
'', "Android Files (*.apk *.jar *.dex *.odex *.dey);;Androguard Session (*.ag)")
path = str(path[0])
if path:
self.setupTree()
self.showStatus("Analyzing %s..." % str(path))
self.fileLoadingThread.load(path)
def addFile(self, path=None):
'''User clicked Open menu. Display a Dialog to ask which APK to open.'''
if not self.session.isOpen():
return
if not path:
path = QtGui.QFileDialog.getOpenFileName(self, "Add File",
'', "Android Files (*.apk *.jar *.dex *.odex *.dey)")
path = str(path[0])
if path:
self.showStatus("Analyzing %s..." % str(path))
self.fileLoadingThread.load(path)
def saveFile(self, path=None):
'''User clicked Save menu. Display a Dialog to ask whwre to save.'''
if not path:
path = QtGui.QFileDialog.getSaveFileName(self, "Save File",
'', "Androguard Session (*.ag)")
path = str(path[0])
if path:
self.showStatus("Saving %s..." % str(path))
self.saveSession(path)
def saveSession(self, path):
'''Save androguard session.'''
try:
self.session.save(path)
except RuntimeError, e:
androconf.error(str(e))
# http://stackoverflow.com/questions/2134706/hitting-maximum-recursion-depth-using-pythons-pickle-cpickle
androconf.error("Try increasing sys.recursionlimit")
os.remove(path)
androconf.warning("Session not saved")
def quit(self):
'''Clicked in File menu to exit or CTRL+Q to close main window'''
QtGui.qApp.quit()
def closeEvent(self, event):
'''Clicked [x] to close main window'''
event.accept()
def setupEmptyTree(self):
'''Setup empty Tree at startup. '''
if hasattr(self, "tree"):
del self.tree
self.tree = QtGui.QTreeWidget(self)
self.tree.header().close()
def setupDock(self):
'''Setup empty Dock at startup. '''
self.dock = QtGui.QDockWidget("Classes", self)
self.dock.setWidget(self.tree)
self.dock.setFeatures(QtGui.QDockWidget.NoDockWidgetFeatures)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.dock)
def setupTree(self):
androconf.debug("Setup Tree")
self.tree = TreeWindow(win=self, session=self.session)
self.tree.setWindowTitle("Tree model")
self.dock.setWidget(self.tree)
def setupCentral(self):
'''Setup empty window supporting tabs at startup. '''
self.central = QtGui.QTabWidget()
self.central.setTabsClosable(True)
self.central.tabCloseRequested.connect(self.tabCloseRequestedHandler)
self.central.currentChanged.connect(self.currentTabChanged)
self.setCentralWidget(self.central)
def tabCloseRequestedHandler(self, index):
self.central.removeTab(index)
def currentTabChanged(self, index):
androconf.debug("curentTabChanged -> %d" % index)
if index == -1:
return # all tab closed
def cleanCentral(self):
#TOFIX: Removes all the pages, but does not delete them.
self.central.clear()
def setupFileMenu(self):
fileMenu = QtGui.QMenu("&File", self)
self.menuBar().addMenu(fileMenu)
fileMenu.addAction("&Open...", self.openFile, "Ctrl+O")
fileMenu.addAction("&Add...", self.addFile, "Ctrl+A")
fileMenu.addAction("&Save...", self.saveFile, "Ctrl+S")
fileMenu.addAction("E&xit", self.quit, "Ctrl+Q")
def setupViewMenu(self):
viewMenu = QtGui.QMenu("&View", self)
self.menuBar().addMenu(viewMenu)
viewMenu.addAction("&Strings...", self.openStringsWindow)
def setupHelpMenu(self):
helpMenu = QtGui.QMenu("&Help", self)
self.menuBar().addMenu(helpMenu)
helpMenu.addAction("&About", self.about)
helpMenu.addAction("About &Qt", QtGui.qApp.aboutQt)
def updateDockWithTree(self, empty=False):
'''Update the classes tree. Called when
- a new APK has been imported
- a classe has been renamed (displayed in the tree)
'''
self.setupTree()
self.tree.fill()
def openStringsWindow(self):
stringswin = StringsWindow(win=self, session=self.session)
self.central.addTab(stringswin, stringswin.title)
self.central.setTabToolTip(self.central.indexOf(stringswin), stringswin.title)
self.central.setCurrentWidget(stringswin)
def openBytecodeWindow(self, current_class, method=None):
pass#self.central.setCurrentWidget(sourcewin)
def openSourceWindow(self, current_class, method=None):
'''Main function to open a .java source window
It checks if it already opened and open that tab,
otherwise, initialize a new window.
'''
androconf.debug("openSourceWindow for %s" % current_class)
sourcewin = self.getMeSourceWindowIfExists(current_class)
if not sourcewin:
current_filename = self.session.get_filename_by_class(current_class)
current_digest = self.session.get_digest_by_class(current_class)
sourcewin = SourceWindow(win=self,
current_class=current_class,
current_title=current_class.current_title,
current_filename=current_filename,
current_digest=current_digest,
session=self.session)
sourcewin.reload_java_sources()
self.central.addTab(sourcewin, sourcewin.title)
self.central.setTabToolTip(self.central.indexOf(sourcewin), current_class.get_name())
if method:
sourcewin.browse_to_method(method)
self.central.setCurrentWidget(sourcewin)
def getMeSourceWindowIfExists(self, current_class):
'''Helper for openSourceWindow'''
for idx in range(self.central.count()):
if current_class.get_name() == self.central.tabToolTip(idx):
androconf.debug("Tab %s already opened at: %d" % (current_class.get_name(), idx))
return self.central.widget(idx)
return None
def doesClassExist(self, path):
arg = class2func(path)
try:
getattr(self.d, arg)
except AttributeError:
return False
return True
|
{
"content_hash": "92aaf6035667d642f71d2a705f947219",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 117,
"avg_line_length": 35.988188976377955,
"alnum_prop": 0.6138278087736572,
"repo_name": "CreatorB/hackerdroid",
"id": "1bf75ed234f883ec87a41a885015da39ae162321",
"size": "9141",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "AndroBugs_Framework/tools/modified/androguard/gui/mainwindow.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27553317"
},
{
"name": "Shell",
"bytes": "546"
}
],
"symlink_target": ""
}
|
from lxml.html.clean import Cleaner
TAGS = ['br', 'p', 'hr', 'b', 'strong', 'em', 'i', 'a',
'blockquote', 'ul', 'li']
ATTRS = ['href', 'target']
cleaner = Cleaner(style=False, links=True, add_nofollow=False,
page_structure=False, safe_attrs_only=True,
remove_unknown_tags=False, comments=False,
safe_attrs=ATTRS, allow_tags=TAGS)
def clean_html(html):
if html is None or not len(html):
return '<p></p>'
return cleaner.clean_html(html)
|
{
"content_hash": "971bce283db8bc5bb07b7ced969f3b90",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 62,
"avg_line_length": 32.3125,
"alnum_prop": 0.5783365570599613,
"repo_name": "pudo/storyweb",
"id": "6d658e37ffe597ae94f65e687abf321c4aceb92e",
"size": "517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "storyweb/analysis/html.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "172038"
},
{
"name": "JavaScript",
"bytes": "14446"
},
{
"name": "Python",
"bytes": "68707"
}
],
"symlink_target": ""
}
|
from helper import *
import collect
import draw
import pygame
import update
def start(game, inputs_record):
pygame.init()
screen = pygame.display.set_mode(
(draw.screen_w, draw.screen_h)
)
pygame.display.set_caption("ice-hall")
clock = pygame.time.Clock()
millis = 1
exit = False
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT: exit = True
else: inputs_record = collect.collect(inputs_record, event)
if exit: break
game = update.update(game, inputs_record, millis/1000)
draw.draw(screen, game)
pygame.display.flip()
millis = clock.tick(60)
pygame.quit()
start(update.new_game, collect.new_inputs)
|
{
"content_hash": "52ec5fa5bc150db41f5de36a0f64e871",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 71,
"avg_line_length": 22.029411764705884,
"alnum_prop": 0.6261682242990654,
"repo_name": "ivokosir/icy-hall",
"id": "1ea9e716fb61af886a75b1f10335f858a2f1453a",
"size": "773",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9725"
}
],
"symlink_target": ""
}
|
import pytest
def test_idea_installed(host):
assert host.run('which idea').rc == 0
@pytest.mark.parametrize('file_path,expected_text', [
('disabled_plugins.txt', 'org.jetbrains.plugins.gradle'),
('options/jdk.table.xml', '/usr/lib64/jvm/java-11-openjdk'),
('options/jdk.table.xml', '/usr/lib64/jvm/java-1.8.0-openjdk'),
('options/project.default.xml', '/test/maven/home'),
('codestyles/GoogleStyle.xml', 'code_scheme name="GoogleStyle"'),
('options/code.style.schemes',
'name="PREFERRED_PROJECT_CODE_STYLE" value="GoogleStyle"'),
('options/code.style.schemes.xml',
'name="CURRENT_SCHEME_NAME" value="GoogleStyle"'),
('inspection/GantSign.xml', 'value="GantSign"'),
('options/editor.codeinsight.xml',
'component name="DaemonCodeAnalyzerSettings" profile="GantSign"'),
('options/project.default.xml',
'option name="PROJECT_PROFILE" value="GantSign"')
])
def test_config_files(host, file_path, expected_text):
config_dir_pattern = (
'\\.config/JetBrains/(IdeaIC|IntelliJIdea)[0-9]+\\.[0-9]$')
config_home = host.check_output('find %s | grep --color=never -E %s',
'/home/test_usr',
config_dir_pattern)
assert host.file(config_home + '/' + file_path).contains(expected_text)
@pytest.mark.parametrize('plugin_dir_name', [
'google-java-format',
'MavenRunHelper'
])
def test_plugins_installed(host, plugin_dir_name):
plugins_dir_pattern = (
'\\.local/share/JetBrains/(IdeaIC|IntelliJIdea)[0-9]+\\.[0-9]$')
plugins_path = host.check_output('find %s | grep --color=never -E %s',
'/home/test_usr',
plugins_dir_pattern)
plugin_dir = host.file(plugins_path + '/' + plugin_dir_name)
assert plugin_dir.exists
assert plugin_dir.is_directory
assert plugin_dir.user == 'test_usr'
assert plugin_dir.group == 'users'
assert plugin_dir.mode == 0o755
def test_jar_plugin_installed(host):
plugins_dir_pattern = (
'\\.local/share/JetBrains/(IdeaIC|IntelliJIdea)[0-9]+\\.[0-9]$')
plugins_path = host.check_output('find %s | grep --color=never -E %s',
'/home/test_usr',
plugins_dir_pattern)
plugin_path = host.check_output('find %s | grep --color=never -E %s',
plugins_path,
'save-actions.*\\.jar')
plugin_file = host.file(plugin_path)
assert plugin_file.exists
assert plugin_file.is_file
assert plugin_file.user == 'test_usr'
assert plugin_file.group == 'users'
assert plugin_file.mode == 0o664
|
{
"content_hash": "46b37dcee3bf67c8d99b2797ae2144c5",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 75,
"avg_line_length": 39.85507246376812,
"alnum_prop": 0.5912727272727273,
"repo_name": "gantsign/ansible-role-intellij",
"id": "379a845cb5865248e4b20113ca0ed4ee14c49504",
"size": "2750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "molecule/opensuse/tests/test_role.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "1385"
},
{
"name": "Python",
"bytes": "63065"
},
{
"name": "Shell",
"bytes": "26082"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.