repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
nint8835/NintbotForDiscordV2 | NintbotForDiscord/FeatureManager.py | Python | mit | 593 | 0.001686 | from .Feature import Feature
from .BasePlugin import BasePlugin
from . imp | ort Bot
class FeatureManager(object):
def __init__(self, bot: "Bot.Bot"):
self.bot = bot
self.features = {}
def register_feature(self, owner: BasePlugin, name: str, description: str = "A feature.") -> Feature:
if name in self.features:
return self.features[name]
feature = Feature(owner, name, self.bot, description)
self.features[name] | = feature
return feature
def get_feature(self, name: str) -> Feature:
return self.features[name]
|
micahhausler/django-tour | run_tests.py | Python | mit | 1,141 | 0.002629 | """
Provides the ability to run test on a sta | ndalone Django app.
"""
import sys
from optparse import OptionParser
import django
from django.conf import settings
from settings import configure_settings
# Configure the default settings and setup django
configure_settings()
if django.VERSION[1] >= 7:
django.setup()
# Django nose must be imported here since it depends on the settings being configured
from django_nose import NoseTestSuiteRunner
def run_tests(*test_args, **kwargs):
if 'south' in settings.INSTALLED_APPS:
fro | m south.management.commands import patch_for_test_db_setup
patch_for_test_db_setup()
if not test_args:
test_args = ['tour']
kwargs.setdefault('interactive', False)
test_runner = NoseTestSuiteRunner(**kwargs)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--verbosity', dest='verbosity', action='store', default=1, type=int)
parser.add_options(NoseTestSuiteRunner.options)
(options, args) = parser.parse_args()
run_tests(*args, **options.__dict__)
|
dennisss/sympy | sympy/ntheory/tests/test_ntheory.py | Python | bsd-3-clause | 31,774 | 0.001322 | from collections import defaultdict
from sympy import Sieve, binomial_coefficients, binomial_coefficients_list, \
multinomial_coefficients, Mul, S, Pow, sieve, Symbol, summation, Dummy, \
factorial as fac, pi, GoldenRatio as phi, sqrt
from sympy.core.numbers import Integer, igcd, Rational
from sympy.core.compatibility import long
from sympy.ntheory import isprime, n_order, is_primitive_root, \
is_quad_residue, legendre_symbol, jacobi_symbol, npartitions, totient, \
factorint, primefactors, divisors, randprime, nextprime, prevprime, \
primerange, primepi, prime, pollard_rho, perfect_power, multiplicity, \
trailing, divisor_count, primorial, pollard_pm1, \
sqrt_mod, primitive_root, quadratic_residues, is_nthpow_residue, \
nthroot_mod, sqrt_mod_iter, mobius
from sympy.ntheory.residue_ntheory import _primitive_root_prime_iter
from sympy.ntheory.factor_ import smoothness, smoothness_p
from sympy.ntheory.generate import | cycle_length
from sympy.ntheory.primetest import _mr_safe_helper, mr
from sympy.ntheory.bbp_pi import pi_hex_digits
from sympy.ntheory.modular import crt, crt1, crt2, solve_congruence
from sympy.ntheory.continued_fraction import \
(continued_fraction_periodic as cf_p,
continued_fraction_iterator as cf_i,
continued_fraction_convergents as cf_c,
continued_fraction_reduce as cf_r)
from sympy.ntheory.egyptian_fraction import egyptian_fraction
from fract | ions import Fraction
from sympy.core.add import Add
from sympy.polys.domains import ZZ
from sympy.utilities.pytest import raises
from sympy.utilities.iterables import capture
from sympy.utilities.randtest import random_complex_number
from sympy.ntheory.multinomial import multinomial_coefficients_iterator
def test_trailing():
assert trailing(0) == 0
assert trailing(1) == 0
assert trailing(-1) == 0
assert trailing(2) == 1
assert trailing(7) == 0
assert trailing(-7) == 0
for i in range(100):
assert trailing((1 << i)) == i
assert trailing((1 << i) * 31337) == i
assert trailing((1 << 1000001)) == 1000001
assert trailing((1 << 273956)*7**37) == 273956
def test_multiplicity():
for b in range(2, 20):
for i in range(100):
assert multiplicity(b, b**i) == i
assert multiplicity(b, (b**i) * 23) == i
assert multiplicity(b, (b**i) * 1000249) == i
# Should be fast
assert multiplicity(10, 10**10023) == 10023
# Should exit quickly
assert multiplicity(10**10, 10**10) == 1
# Should raise errors for bad input
raises(ValueError, lambda: multiplicity(1, 1))
raises(ValueError, lambda: multiplicity(1, 2))
raises(ValueError, lambda: multiplicity(1.3, 2))
# handles Rationals
assert multiplicity(10, Rational(30, 7)) == 0
assert multiplicity(Rational(2, 7), Rational(4, 7)) == 1
assert multiplicity(Rational(1, 7), Rational(3, 49)) == 2
assert multiplicity(Rational(2, 7), Rational(7, 2)) == -1
assert multiplicity(3, Rational(1, 9)) == -2
def test_perfect_power():
assert perfect_power(0) is False
assert perfect_power(1) is False
assert perfect_power(2) is False
assert perfect_power(3) is False
assert perfect_power(4) == (2, 2)
assert perfect_power(14) is False
assert perfect_power(25) == (5, 2)
assert perfect_power(22) is False
assert perfect_power(22, [2]) is False
assert perfect_power(137**(3*5*13)) == (137, 3*5*13)
assert perfect_power(137**(3*5*13) + 1) is False
assert perfect_power(137**(3*5*13) - 1) is False
assert perfect_power(103005006004**7) == (103005006004, 7)
assert perfect_power(103005006004**7 + 1) is False
assert perfect_power(103005006004**7 - 1) is False
assert perfect_power(103005006004**12) == (103005006004, 12)
assert perfect_power(103005006004**12 + 1) is False
assert perfect_power(103005006004**12 - 1) is False
assert perfect_power(2**10007) == (2, 10007)
assert perfect_power(2**10007 + 1) is False
assert perfect_power(2**10007 - 1) is False
assert perfect_power((9**99 + 1)**60) == (9**99 + 1, 60)
assert perfect_power((9**99 + 1)**60 + 1) is False
assert perfect_power((9**99 + 1)**60 - 1) is False
assert perfect_power((10**40000)**2, big=False) == (10**40000, 2)
assert perfect_power(10**100000) == (10, 100000)
assert perfect_power(10**100001) == (10, 100001)
assert perfect_power(13**4, [3, 5]) is False
assert perfect_power(3**4, [3, 10], factor=0) is False
assert perfect_power(3**3*5**3) == (15, 3)
assert perfect_power(2**3*5**5) is False
assert perfect_power(2*13**4) is False
assert perfect_power(2**5*3**3) is False
def test_isprime():
s = Sieve()
s.extend(100000)
ps = set(s.primerange(2, 100001))
for n in range(100001):
# if (n in ps) != isprime(n): print n
assert (n in ps) == isprime(n)
assert isprime(179424673)
# Some Mersenne primes
assert isprime(2**61 - 1)
assert isprime(2**89 - 1)
assert isprime(2**607 - 1)
assert not isprime(2**601 - 1)
#Arnault's number
assert isprime(int('''
803837457453639491257079614341942108138837688287558145837488917522297\
427376533365218650233616396004545791504202360320876656996676098728404\
396540823292873879185086916685732826776177102938969773947016708230428\
687109997439976544144845341155872450633409279022275296229414984230688\
1685404326457534018329786111298960644845216191652872597534901'''))
# pseudoprime that passes the base set [2, 3, 7, 61, 24251]
assert not isprime(9188353522314541)
assert _mr_safe_helper(
"if n < 170584961: return mr(n, [350, 3958281543])") == \
' # [350, 3958281543] stot = 1 clear [2, 3, 5, 7, 29, 67, 679067]'
assert _mr_safe_helper(
"if n < 3474749660383: return mr(n, [2, 3, 5, 7, 11, 13])") == \
' # [2, 3, 5, 7, 11, 13] stot = 7 clear == bases'
def test_prime():
assert prime(1) == 2
assert prime(2) == 3
assert prime(5) == 11
assert prime(11) == 31
assert prime(57) == 269
assert prime(296) == 1949
assert prime(559) == 4051
assert prime(3000) == 27449
assert prime(4096) == 38873
assert prime(9096) == 94321
assert prime(25023) == 287341
raises(ValueError, lambda: prime(0))
def test_primepi():
assert primepi(1) == 0
assert primepi(2) == 1
assert primepi(5) == 3
assert primepi(11) == 5
assert primepi(57) == 16
assert primepi(296) == 62
assert primepi(559) == 102
assert primepi(3000) == 430
assert primepi(4096) == 564
assert primepi(9096) == 1128
assert primepi(25023) == 2763
def test_generate():
assert nextprime(-4) == 2
assert nextprime(2) == 3
assert nextprime(5) == 7
assert nextprime(12) == 13
assert nextprime(90) == 97
assert nextprime(10**40) == (10**40 + 121)
assert prevprime(3) == 2
assert prevprime(7) == 5
assert prevprime(13) == 11
assert prevprime(97) == 89
assert prevprime(10**40) == (10**40 - 17)
assert list(primerange(2, 7)) == [2, 3, 5]
assert list(primerange(2, 10)) == [2, 3, 5, 7]
assert list(primerange(1050, 1100)) == [1051, 1061,
1063, 1069, 1087, 1091, 1093, 1097]
s = Sieve()
for i in range(30, 2350, 376):
for j in range(2, 5096, 1139):
A = list(s.primerange(i, i + j))
B = list(primerange(i, i + j))
assert A == B
s = Sieve()
assert s[10] == 29
assert nextprime(2, 2) == 5
raises(ValueError, lambda: totient(0))
raises(ValueError, lambda: primorial(0))
assert mr(1, [2]) is False
func = lambda i: (i**2 + 1) % 51
assert next(cycle_length(func, 4)) == (6, 2)
assert list(cycle_length(func, 4, values=True)) == \
[17, 35, 2, 5, 26, 14, 44, 50, 2, 5, 26, 14]
assert next(cycle_length(func, 4, nmax=5)) == (5, None)
assert list(cycle_length(func, 4, nmax=5, values=True)) == \
[17, 35, 2, 5, 26]
def test_randprime():
import random
random.seed(1234)
assert randprime(2, 3) == 2
assert randprime(1, 3) == 2
assert randprime(3, 5) == 3
raises(ValueError, lambda: ran |
juergenhamel/cuon | cuon_server/LoadBalancer/txlb/__init__.py | Python | gpl-3.0 | 199 | 0 | name = 'txL | oadBalancer'
shortName = 'txLB'
projectURL = 'https://launchpad.net/txloadbalancer'
summary = 'txLoadBalancer - A Twisted-bas | ed TCP load balancer.'
description = summary
version = '1.1.0'
|
uptimejp/postgres-toolkit | postgres_toolkit/TcpdumpWrapper.py | Python | gpl-2.0 | 13,163 | 0.000304 | #!/usr/bin/env python
# coding: UTF-8
# TcpdumpWrapper
#
# Copyright(c) 2015-2018 Uptime Technologies, LLC.
from datetime import datetime, timedelta, date, time
import hashlib
import os
import re
import subprocess
import sys
import log
class TcpdumpPacket:
def __init__(self, ts, src, dst, bytes, debug=None):
self.ts = self.string2timestamp(ts)
self.src = src
self.dst = dst
self.debug = debug
self.bytes = bytes
self.messages = []
log.debug("+ " + ts + " " + src + " " + dst)
self.end()
self.session_id = self.get_session_id()
def string2timestamp(self, ts):
t = datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f')
log.debug("ts = " + str(t))
return t
def get_session_id(self):
src_dst = [self.iph_src + ":" + str(self.tcph_src),
self.iph_dst + ":" + str(self.tcph_dst)]
ss = ""
for s in sorted(src_dst):
ss = ss + s + " "
return hashlib.md5(ss).hexdigest()[0:12]
def get_timestamp(self) | :
log.debug("get_timestamp: %s" % str(self.ts))
return self.ts
def get_messages(self):
return self.messages
def parse_ip_header(self, data):
self.iph_version = (data[0] >> 4) & 0b1111
self.iph | _header_len = data[0] & 0b1111
self.iph_tos = data[1]
self.iph_dgram_len = (data[2] << 8) + data[3]
self.iph_id = (data[4] << 8) + data[5]
self.iph_dst = "%d.%d.%d.%d" % (data[12], data[13], data[14], data[15])
self.iph_src = "%d.%d.%d.%d" % (data[16], data[17], data[18], data[19])
if self.debug is True:
print("version : %d" % self.iph_version)
print("hd len : %d (%d)" % (self.iph_header_len,
self.iph_header_len * 4))
print("tos : %d" % self.iph_tos)
print("dgram len: %d" % self.iph_dgram_len)
print("data len: %d" % (self.iph_dgram_len -
self.iph_header_len*4))
print("id : %d" % self.iph_id)
print("dst : %s" % (self.iph_dst))
print("src : %s" % (self.iph_src))
return self.iph_header_len * 4
def parse_tcp_header(self, data):
self.tcph_src = (data[0] << 8) + data[1]
self.tcph_dst = (data[2] << 8) + data[3]
self.tcph_seq = ((data[4] << 24) + (data[5] << 16) +
(data[6] << 8) + data[7])
self.tcph_offset = (data[12] >> 4) & 0b1111
if self.debug is True:
print("src port : %d" % (self.tcph_src))
print("dst port : %d" % (self.tcph_dst))
print("seq : %d" % (self.tcph_seq))
print("offset : %d (%d)" % (self.tcph_offset,
self.tcph_offset * 4))
return self.tcph_offset * 4
def end(self):
cur = 0
iph_len = self.parse_ip_header(self.bytes[cur:])
cur = cur + iph_len
tcph_len = self.parse_tcp_header(self.bytes[cur:])
cur = cur + tcph_len
self.payload = self.bytes[cur:]
s = ""
for d in self.payload:
s = s + "%02x " % (d)
log.debug("payload: " + s)
if len(self.payload) >= 5:
pos = 0
cont = True
while cont:
if len(self.payload[pos:]) < 5:
cont = False
break
ch = self.read_char(self.payload[pos:])
# if not(ch >= 48 and ch <= 122):
# break
pos = pos + 1
i = self.read_int32(self.payload[pos:])
pos = pos + 4
log.debug("sess: " + self.get_session_id() + ": " +
str(self.ts) + ": %c[%x] len=%d" % (ch, ch, i))
# client to server
if ch == ord('S'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('Q'):
s = self.read_string(self.payload[pos:], i - 4)
log.debug(s)
self.messages.append([chr(ch), s])
elif ch == ord('P'):
s = self.read_string(self.payload[pos:], i - 4)
s1 = s.split('\0')
log.debug("> " + s1[0] + "," + s1[1])
self.messages.append([chr(ch), s1[0], s1[1]])
elif ch == ord('E'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
elif ch == ord('B'):
s = self.read_string(self.payload[pos:], i - 4)
s1 = s.split('\0')
log.debug("> " + s1[0] + "," + s1[1])
self.messages.append([chr(ch), s1[0], s1[1]])
elif ch == ord('X'):
self.messages.append([chr(ch), None])
cont = False
# server to client
elif ch == ord('T'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('D'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('C'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('1'):
self.messages.append([chr(ch), None])
elif ch == ord('2'):
self.messages.append([chr(ch), None])
elif ch == ord('n'):
self.messages.append([chr(ch), None])
elif ch == ord('Z'):
self.messages.append([chr(ch), None])
cont = False
pos = pos + (i - 4)
def parse(self):
self.pos = 12
while len(self.payload) > self.pos + 5:
c = self.read_char()
log.debug("%02x(%c)" % (c, c))
l = self.read_int32()
log.debug(l)
self.pos = self.pos + l
def read_char(self, data):
ch = data[0]
return ch
def read_int32(self, data):
i = (data[0] << 24) + (data[1] << 16) + (data[2] << 8) + (data[3])
return i
def read_string(self, data, size):
s = ""
i = 0
while i < size:
s = s + "%c" % data[i]
i = i + 1
return s
class TcpdumpWrapper:
pkt = None
tcpdump = None
process = None
line = None
def __init__(self, host=None, port=None, interface=None, inputfile=None,
debug=None):
if debug is True:
log.setLevel(log.DEBUG)
self.host = host
self.port = port
self.iface = interface
self.inputfile = inputfile
self.debug = debug
if self.port is None:
self.port = "5432"
if self.iface is None:
self.iface = "any"
self.tcpdump = "tcpdump -tttt"
if self.inputfile is not None:
self.tcpdump = self.tcpdump + " -r " + self.inputfile
self.tcpdump = (self.tcpdump + " -l -i " + self.iface +
" -s 0 -X -p tcp port " + str(self.port))
if self.host is not None:
self.tcpdump = self.tcpdump + " and host " + self.host
log.info(self.tcpdump)
self.process = subprocess.Popen([self.tcpdump],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
bufsize=0)
# header
self.p1 = re.compile('^([\d-]+) ([\d:\.]+) IP (.*) > (.*): Flags')
# da |
septicmk/MEHI | test/test_utils.py | Python | bsd-3-clause | 1,542 | 0.001297 | ############################# | ###
# Author : septicmk
# Date : 2015/07/2 | 4 19:41:26
# FileName : test_utils.py
################################
import shutil
import tempfile
import unittest
from numpy import vstack
from pyspark import SparkContext
class PySparkTestCase(unittest.TestCase):
def setUp(self):
class_name = self.__class__.__name__
self.sc = SparkContext('local', class_name)
self.sc._jvm.System.setProperty("spark.ui.showConsoleProgress", "false")
log4j = self.sc._jvm.org.apache.log4j
log4j.LogManager.getRootLogger().setLevel(log4j.Level.FATAL)
def tearDown(self):
self.sc.stop()
# To avoid Akka rebinding to the same port, since it doesn't unbind
# immediately on shutdown
self.sc._jvm.System.clearProperty("spark.driver.port")
class PySparkTestCaseWithOutputDir(PySparkTestCase):
def setUp(self):
super(PySparkTestCaseWithOutputDir, self).setUp()
self.outputdir = tempfile.mkdtemp()
def tearDown(self):
super(PySparkTestCaseWithOutputDir, self).tearDown()
shutil.rmtree(self.outputdir)
class LocalTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
class LocalTestCaseWithOutputDir(LocalTestCase):
def setUp(self):
super(LocalTestCaseWithOutputDir, self).setUp()
self.outputdir = tempfile.mktemp()
def tearDown(self):
super(LocalTestCaseWithOutputDir, self).tearDown()
shutil.rmtree(self.outputdir)
|
tdsmith/numpy | numpy/distutils/fcompiler/gnu.py | Python | bsd-3-clause | 15,266 | 0.001834 | from __future__ import division, absolute_import, print_function
import re
import os
import sys
import warnings
import platform
import tempfile
from subprocess import Popen, PIPE, STDOUT
from numpy.distutils.fcompiler import FCompiler
from numpy.distutils.exec_command import exec_command
from numpy.distutils.misc_util import msvc_runtime_library
from numpy.distutils.compat import get_exception
compilers = ['GnuFCompiler', 'Gnu95FCompiler']
TARGET_R = re.compile("Target: ([a-zA-Z0-9_\-]*)")
# XXX: handle cross compilation
def is_win64():
return sys.platform == "win32" and platform.architecture()[0] == "64bit"
def is_win32():
return sys.platform == "win32" and platform.architecture()[0] == "32bit"
if is_win64():
#_EXTRAFLAGS = ["-fno-leading-underscore"]
_EXTRAFLAGS = []
else:
_EXTRAFLAGS = []
class GnuFCompiler(FCompiler):
compiler_type = 'gnu'
compiler_aliases = ('g77',)
description = 'GNU Fortran 77 compiler'
def gnu_version_match(self, version_string):
"""Handle the different versions of GNU fortran compilers"""
# Strip warning(s) that may be emitted by gfortran
while version_string.startswith('gfortran: warning'):
version_string = version_string[version_string.find('\n')+1:]
# Gfortran versions from after 2010 will output a simple string
# (usually "x.y", "x.y.z" or "x.y.z-q") for ``-dumpversion``; older
# gfortrans may still return long version strings (``-dumpversion`` was
# an alias for ``--version``)
if len(version_string) <= 20:
# Try to find a valid version string
m = re.search(r'([0-9.]+)', version_string)
if m:
# g77 provides a longer version string that starts with GNU
# Fortran
if version_string.startswith('GNU Fortran'):
return ('g77', m.group(1))
# gfortran only outputs a version string such as #.#.#, so check
# if the match is at the start of the string
elif m.start() == 0:
return ('gfortran', m.group(1))
else:
# Output probably from --version, try harder:
m = re.search(r'GNU Fortran\s+95.*?([0-9-.]+)', version_string)
if m:
return ('gfortran', m.group(1))
m = re.search(r'GNU Fortran.*?\-?([0-9-.]+)', version_string)
if m:
v = m.group(1)
if v.st | artswith('0') or v.startswith('2') or v.startswith('3'):
# the '0' is for early g77's
return ('g | 77', v)
else:
# at some point in the 4.x series, the ' 95' was dropped
# from the version string
return ('gfortran', v)
# If still nothing, raise an error to make the problem easy to find.
err = 'A valid Fortran version was not found in this string:\n'
raise ValueError(err + version_string)
def version_match(self, version_string):
v = self.gnu_version_match(version_string)
if not v or v[0] != 'g77':
return None
return v[1]
possible_executables = ['g77', 'f77']
executables = {
'version_cmd' : [None, "-dumpversion"],
'compiler_f77' : [None, "-g", "-Wall", "-fno-second-underscore"],
'compiler_f90' : None, # Use --fcompiler=gnu95 for f90 codes
'compiler_fix' : None,
'linker_so' : [None, "-g", "-Wall"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-g", "-Wall"]
}
module_dir_switch = None
module_include_switch = None
# Cygwin: f771: warning: -fPIC ignored for target (all code is
# position independent)
if os.name != 'nt' and sys.platform != 'cygwin':
pic_flags = ['-fPIC']
# use -mno-cygwin for g77 when Python is not Cygwin-Python
if sys.platform == 'win32':
for key in ['version_cmd', 'compiler_f77', 'linker_so', 'linker_exe']:
executables[key].append('-mno-cygwin')
g2c = 'g2c'
suggested_f90_compiler = 'gnu95'
def get_flags_linker_so(self):
opt = self.linker_so[1:]
if sys.platform == 'darwin':
target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
# If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value
# and leave it alone. But, distutils will complain if the
# environment's value is different from the one in the Python
# Makefile used to build Python. We let disutils handle this
# error checking.
if not target:
# If MACOSX_DEPLOYMENT_TARGET is not set in the environment,
# we try to get it first from the Python Makefile and then we
# fall back to setting it to 10.3 to maximize the set of
# versions we can work with. This is a reasonable default
# even when using the official Python dist and those derived
# from it.
import distutils.sysconfig as sc
g = {}
filename = sc.get_makefile_filename()
sc.parse_makefile(filename, g)
target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
if target == '10.3':
s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
warnings.warn(s)
opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
else:
opt.append("-shared -Wl,-gc-sections -Wl,-s")
if sys.platform.startswith('sunos'):
# SunOS often has dynamically loaded symbols defined in the
# static library libg2c.a The linker doesn't like this. To
# ignore the problem, use the -mimpure-text flag. It isn't
# the safest thing, but seems to work. 'man gcc' says:
# ".. Instead of using -mimpure-text, you should compile all
# source code with -fpic or -fPIC."
opt.append('-mimpure-text')
return opt
def get_libgcc_dir(self):
status, output = exec_command(self.compiler_f77 +
['-print-libgcc-file-name'],
use_tee=0)
if not status:
return os.path.dirname(output)
return None
def get_library_dirs(self):
opt = []
if sys.platform[:5] != 'linux':
d = self.get_libgcc_dir()
if d:
# if windows and not cygwin, libg2c lies in a different folder
if sys.platform == 'win32' and not d.startswith('/usr/lib'):
d = os.path.normpath(d)
path = os.path.join(d, "lib%s.a" % self.g2c)
if not os.path.exists(path):
root = os.path.join(d, *((os.pardir,)*4))
d2 = os.path.abspath(os.path.join(root, 'lib'))
path = os.path.join(d2, "lib%s.a" % self.g2c)
if os.path.exists(path):
opt.append(d2)
opt.append(d)
return opt
def get_libraries(self):
opt = []
d = self.get_libgcc_dir()
if d is not None:
g2c = self.g2c + '-pic'
f = self.static_lib_format % (g2c, self.static_lib_extension)
if not os.path.isfile(os.path.join(d, f)):
g2c = self.g2c
else:
g2c = self.g2c
if g2c is not None:
opt.append(g2c)
c_compiler = self.c_compiler
if sys.platform == 'win32' and c_compiler and \
c_compiler.compiler_type == 'msvc':
# the following code is not needed (read: breaks) when using MinGW
# in case want to link F77 compiled code with MSVC
opt.append('gcc')
runtime_lib = msvc_runtime_library()
if runtime_lib:
opt.append(runtime_lib)
|
FePhyFoFum/PyPHLAWD | src/cluster_tree_wc.py | Python | gpl-2.0 | 2,115 | 0.009456 | import os
import sys
import random
from clint.textui import colored
import subprocess
from conf import DI
from conf import treemake
from conf import py
import emoticons
from logger import Logger
import tree_reader
def run_tree(infile,outfile):
cmd = "FastTree -nt -gtr "+infile+" 2>fasttree.out > "+outfile
os.system(cmd)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("python "+sys.argv[0]+" startdir logfile")
sys.exit(0)
root = sys.argv[1]
logfile = sys.argv[2]
log = Logger(logfile)
# get the random directory so you can run multiple things in the same directory
rantempdir = "TEMPDIR_"+str(random.randint(0,100000))+"/"
print(colored.blue("CREATED"),rantempdir)
os.mkdir(rantempdir)
log.wac("CREATED "+rantempdir)
count = 0
for root, dirs, files in os.walk(root,topdown=False):
if "clusters" not in root:
log.whac(root)
if len(dirs) == 1:
print(colored.yellow("CLUSTERING SINGLE"),root,colored.yellow(emoticons.get_ran_emot("meh")))
log.wac("CLUSTERING SINGLE "+root)
tablename = [x for x in files if ".table" in x][0]
cmd = py+" "+DI+"cluster_single_wc.py "+root+" "+logfile
os.system(cmd)
else:
print(colored.blue("CLUSTERING INTERNAL"),root,colored.blue(emoticons.get_ran_emot("meh")))
log.wac("CLUS | TERING INTERNAL "+root)
tablename = [x for x in files if ".table" in x][0]
if root[-1] != "/":
root = root+"/"
cmd = py+" "+DI+"cluster_internal_wc.py "+root+ " "+root+tablename+" "+logfile+" "+rantempdir
rc = subprocess.call(cmd,shell=True)
if rc != 0:
print(colored.red("PROBLEM WITH CLUSTERING INTERNAL"),colored.red(emoticons.get_ran_emot("sad")))
| sys.exit(1)
cmd = py+" "+DI+"annotate_clusters.py "+sys.argv[1]
os.system(cmd)
cmd = py+" "+DI+"post_process_cluster_info.py "+sys.argv[1]
os.system(cmd)
|
SF-Zhou/LeetCode.Solutions | solutions/container_with_most_water_2.py | Python | mit | 480 | 0 | class Solution(object):
def maxArea(self, height):
"""
:type height: List[int]
:rtype: int
"""
res = 0
left, right = 0, len(height) - 1
while left < right:
if height[left] <= height[right]:
res = max(res, height[left] | * (right - left))
left += 1
else:
res = max(res, height[right] * (right - left))
right -= 1
return | res
|
Graylog2/graylog-ansible-role | molecule/ci/tests/conftest.py | Python | apache-2.0 | 650 | 0.001538 | """PyTest Fixtures."""
from __future__ import absolute_import
import os
import pytest
def pytest_runtest_setup(item): |
"""Run tests only when under molecule with testinfra installed."""
try:
import testinfra
except ImportError:
pytest.skip("Test requires testinfra", allow_module_level=True)
if "MOLECULE_INVENTORY_FILE" in os.environ:
pytest.testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ["MOLECULE_INVENTORY | _FILE"]
).get_hosts("all")
else:
pytest.skip(
"Test should run only from inside molecule.", allow_module_level=True
)
|
tensorflow/datasets | tensorflow_datasets/question_answering/xquad_test.py | Python | apache-2.0 | 1,233 | 0.004055 | # coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for xquad dataset module."""
from tensorflow_datasets import testing
from tensorflow_datasets.question_answering import xquad
class XquadTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = xquad.Xquad
BUILDER_CONFIG_NAMES_TO_TEST = ["ar"]
DL_EXTRACT_RESULT = {
"translate-train": "translate-train.json",
"tran | slate-dev": "translate-dev.json",
"tra | nslate-test": "translate-test.json",
"test": "test.json",
}
SPLITS = {
"translate-train": 3,
"translate-dev": 2,
"translate-test": 3,
"test": 1,
}
if __name__ == "__main__":
testing.test_main()
|
jstacoder/modern-business-flask | app/data.py | Python | apache-2.0 | 6,480 | 0.01821 | from marshmallow import Schema, fields, pprint,utils
from faker import Factory
import pickle
import os
tmt = utils.to_marshallable_type
factory = Factory()
faker = factory.create()
f = faker
def dump_fields(meta):
for table in meta.tables.values():
print '{}'.format(table.name)
print '------------------------'
print '{}'.format('\n'.join(map(str,table.c)))
print
print
def dump_table(name,metadata):
return tmt(metadata.tables[name],metadata.tables[name].columns.keys())
class PageSchema(Schema):
id = fields.Integer()
name = fields.String()
title = fields.String()
keywords = fields.String()
slug = fields.String()
template_file = fields.String()
add_right_sidebar = fields.Boolean()
add_left_sidebar = fields.Boolean()
add_to_nav = fields.Boolean()
body_content = fields.String()
date_added = fields.DateTime()
date_modified = fields.DateTime()
def __init__(self,model,*args,**kwargs):
self._model = model
super(PageSchema,self).__init__(*args,**kwargs)
def make_object(self,data):
return self._model(**data)
# coding: utf-8
def save_pages(model):
schema = PageSchema(model,many=True)
pages = model.query().all()
results = schema.dump(pages)
with open('.pages','w') as f:
f.write(pickle.dumps(results._asdict().get('data')))
def load_page(fname):
return pickle.loads(open(fname,'r').read())
def load_page_files():
return sorted(
filter(
lambda x: x if (
lambda y: '.page' in y
)(x) else None,
os.listdir(
os.path.abspath(
os.path.dirname(
os.path.dirname(
__file__
)
)
)
)
),
key=lambda x: int(x.replace('.page-','')),
#cmp=lambda x,y: int(x)-int(y)
)
def call_method(obj,m):
return o.__class__.__dict__[m].__call__(o) if m in o.__class__.__dict__ else None
def extract_page(filename):
return pickle.loads(open(filename,'r').read())
def load_pages():
return map(extract_page,load_page_files())
def load_globals():
return pickle.loads(open('.globals','r').read())
def save_globals(g):
with open('.globals','w') as f:
f.write(pickle.dumps(g))
def resave_pages(model):
schema = PageSchema(model,many=True)
results = schema.load(pickle.loads(open('.pages','r').read()))._asdict().get('data')
for itm in results:
# #for attr in dir(itm):
# # if not attr.startswith('_'):
# # if not callable(getattr(itm,attr)) and (type(getattr(itm,attr)) == unicode or type(getattr(itm,attr)) == str):
# # setattr(itm,attr,str(getattr(itm,attr)))
itm.save()
# #print type(itm)
def make_emails(num,user,model):
for itm in [make_email(f.email(),model) for x in range(num)]:
user.email_addresses.append(itm)
itm.save()
def make_email(txt,model):
email = model()
email.text = txt
email.save()
return email
def post_data(blog_id):
return {
'content':f.text(),
'title':f.word(),
| 'blog_id':blog_id,
}
def make_post(model,blog_id):
post = model(**post_data(blog_id))
post.save()
return post
def make_posts(model,blogs):
rtn = []
for b in blogs:
for i in range(5):
rtn.append(make_post(model,b.id))
return rtn
categorys = [
{'name':'python','description':f.text()},
{'name':'flask','d | escription':f.text()},
{'name':'another','description':f.text()},
]
def make_categorys(model):
rtn = []
for c in categorys:
tmp = model()
tmp.name = c['name']
tmp.description = c['description']
tmp.save()
rtn.append(tmp)
return rtn
def users():
return {
'username' : f.user_name(),
'first_name' : f.first_name_male(),
'last_name' : f.last_name(),
'password':f.text(),
}
def make_user(model):
user = model(**users())
user.save()
return user
def make_users(user_model,email_model,blog_model):
rtn = []
for i in range(5):
user = user_model(**users())
make_emails(5,user,email_model)
user.save()
blogs = []
for x in range(5):
blogs.append(make_blog(user.id,blog_model))
for b in blogs:
user.blogs.append(b)
user.save()
rtn.append(user)
return rtn
comments = {}
def make_comment(data,model):
pass
blog = lambda aid: dict(author_id=aid,name=f.name())
def make_blog(aid,model):
tmp = model()
data = blog(aid)
tmp.name = data['name']
tmp.author_id = data['author_id']
tmp.save()
return tmp
pages = {
'contact':'contact',
'about':'about',
'services':'services',
'blog-home-1':'blog-home-1',
'blog-home-2':'blog-home-2',
'blog-post':'blog-post',
'fill-width':'full-width-page',
'fill-width-page':'full-width-page',
'faq':'faq',
'pricing':'pricing',
'404':'404',
'sidebar':'sidebar',
'portfolio-1-col':'portfolio-1-col',
'portfolio-2-col':'portfolio-2-col',
'portfolio-3-col':'portfolio-3-col',
'portfolio-4-col':'portfolio-4-col',
'portfolio-item':'portfolio-item',
}
def make_page(slug,model):
page = model()
page.title = slug.title()
page.slug = slug
page.template_file = slug + '.html'
if slug == 'sidebar':
page.add_left_sidebar = True
if 'blog' in slug:
page.add_left_sidebar = True
page.add_to_navbar = True
page.save()
return page
if __name__ == "__main__":
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import create_engine,MetaData
meta = MetaData()
base = automap_base()
e = create_engine('sqlite:///new_test.db')
e2 = create_engine('sqlite:///newest_test.db')
base.prepare(e,reflect=True)
for table in base.metadata.tables:
base.metadata.tables[table].tometadata(meta)
print meta.tables.keys()
print base.classes.keys()
print dir(base.classes['users'])
e2.echo=True
meta.create_all(bind=e2)
|
babbel/floto | examples/child_workflow/decider.py | Python | mit | 1,797 | 0.006678 | import floto
import logging
from floto.specs.task import ActivityTask, ChildWorkflow
from floto.specs import DeciderSpec
import floto.decider
logger = logging.getLogger(__name__)
rs = floto.specs.retry_strategy.InstantRetry(retries=3)
domain = 'floto_test'
#Define tasks and decider of the workflow
copy_files = ActivityTask(domain=domain, name='copyFiles', version='1', retry_strategy=rs)
child_workflow = ChildWorkflow(domain=domain,
workflow_type_name='s3_files_example',
workflow_type_version='1',
requires=[copy_files.id_],
task_list='file_length_task_list',
retry_strategy=rs)
decider_spec = DeciderSpec(domain=doma | in,
task_list='copy_files_task_list',
default_activity_task_list='s3_files_worker',
activity_tasks=[copy_files, child_workflow],
terminate_decider_after_completion=True)
decider = floto.decider.Decider(decider_spec=decider_spec)
decider.run | (separate_process=True)
# Define tasks and decider of the child workflow
file_length = ActivityTask(domain=domain, name='fileLength', version='1', retry_strategy=rs)
child_workflow_spec = DeciderSpec(domain=domain,
task_list='file_length_task_list',
default_activity_task_list='s3_files_worker',
activity_tasks=[file_length],
terminate_decider_after_completion=True)
child_decider = floto.decider.Decider(decider_spec=child_workflow_spec)
child_decider.run(separate_process=True)
# Wait until workflow has completed
decider._separate_process.join()
|
tedye/leetcode | Python/leetcode.124.binary-tree-maximum-path-sum.py | Python | mit | 704 | 0.017045 | please enter code here...
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# | self.right = None
class Solution(object):
def __init__(self):
self.res = -0x7fffffff
def maxPathSum(self, root):
"""
:type root: TreeNode
:rtype: int
"""
| if not root: return 0
self.helper(root)
return self.res
def helper(self,root):
l = 0
r = 0
if root.left: l = self.helper(root.left)
if root.right: r = self.helper(root.right)
self.res = max(l+r+root.val,self.res)
return max(max(l,r) + root.val,0) |
mplucinski/tex-gettext | tex_math.py | Python | bsd-2-clause | 15,267 | 0.006747 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import subprocess
import sys
import unittest
COMMAND_PREFIX = 'gettextmath'
def generate_command_call(name, prefix, *args):
return '\\' + prefix + name + '{' + '}{'.join(args) + '}'
class Parser:
class Token:
function = False
def process(self, stack, output):
output.append(self)
def consume(self, stack):
stack.append(self)
def __repr__(self):
return str(self)
class Number(Token):
def __init__(self, number):
self.number = int(number)
def generate(self):
return str(self.number)
def __eq__(self, other):
return isinstance(other, Parser.Number) and self.number == other.number
def __str__(self):
return 'Number({})'.format(self.number)
class Identifier(Token):
def __init__(self, identifier):
self.identifier = identifier
def generate(self):
return self.identifier
def __eq__(self, other):
return isinstance(other, Parser.Identifier) and self.identifier == other.identifier
def __str__(self):
return 'Identifier("{}")'.format(self.identifier)
class Operator(Token):
function = True
def __init__(self, operation):
self.operation = operation
def process(self, stack, output):
while len(stack) > 0 and stack[len(stack)-1].priority < self.priority:
output.append(stack.pop())
stack.append(self)
def __eq__(self, other):
return type(self) == type(other) and self.operation == other.operation
def __str__(self):
return 'Operator("{}")'.format(self.operation)
class BinaryOperator(Operator):
def consume(self, stack):
self.arg2 = stack.pop()
self.arg1 = stack.pop()
stack.append(self)
def generate(self):
return generate_command_call(self.command, COMMAND_PREFIX, self.arg1.generate(), self.arg2.generate())
class OperatorEqual(BinaryOperator):
priority = 7
command = 'equal'
class OperatorNotEqual(BinaryOperator):
priority = 7
command = 'notequal'
class OperatorGreaterEqual(BinaryOperator):
priority = 6
command = 'greaterequal'
class OperatorLesserEqual(BinaryOperator):
priority = 6
command = 'lesserequal'
class OperatorGreaterThan(BinaryOperator):
priority = 6
command = 'greaterthan'
class OperatorLesserThan(BinaryOperator):
priority = 6
command = 'lesserthan'
class OperatorAnd(BinaryOperator):
priority = 11
command = 'and'
class OperatorOr(BinaryOperator):
priority = 12
command = 'or'
class OperatorModulo(BinaryOperator):
priority = 3
command = 'modulo'
class OperatorTernaryStart(Operator):
priority = 100
function = False
def consume(self, stack):
self.arg_truefalse = stack.pop()
self.arg_condition = stack.pop()
if not isinstan | ce(self.arg_truefalse, Parser.OperatorTernaryMiddle):
raise Exception('Operator "?" must have matching ":", but "{}" found'.format(self.arg_truefalse))
stack.append(self)
def generate(self):
return generate_command_call('ifthenelse', COMMAND_PREFIX, self.arg_condition.generate(), self.arg_truefalse.true.generate(), self.arg_truefalse.false.generate())
class OperatorTernaryMiddle(Operator):
priority = 100
| function = False
def consume(self, stack):
self.false = stack.pop()
self.true = stack.pop()
stack.append(self)
class OpenParenthesis(Token):
priority = 100
def process(self, stack, output):
stack.append(self)
def __str__(self):
return 'OpenParenthesis'
class CloseParenthesis(Token):
priority = 100
def process(self, stack, output):
while len(stack) > 0 and not isinstance(stack[len(stack)-1], Parser.OpenParenthesis):
x = stack.pop()
output.append(x)
open = stack.pop()
if not isinstance(open, Parser.OpenParenthesis):
raise Exception('Could not find matching left parenthesis')
if len(stack) > 0 and stack[len(stack)-1].function:
output.append(stack.pop())
def __str__(self):
return 'CloseParenthesis'
tokens = [
# boolean operations
(re.compile(r'^(==)'), OperatorEqual),
(re.compile(r'^(!=)'), OperatorNotEqual),
(re.compile(r'^(>=)'), OperatorGreaterEqual),
(re.compile(r'^(<=)'), OperatorLesserEqual),
(re.compile(r'^(>)'), OperatorGreaterThan),
(re.compile(r'^(<)'), OperatorLesserThan),
(re.compile(r'^(&&)'), OperatorAnd),
(re.compile(r'^(\|\|)'), OperatorOr),
(re.compile(r'^(\?)'), OperatorTernaryStart),
(re.compile(r'^(:)'), OperatorTernaryMiddle),
# arithmentic operations
(re.compile(r'^(%)'), OperatorModulo),
# parenthesis
(re.compile(r'^\('), OpenParenthesis),
(re.compile(r'^\)'), CloseParenthesis),
# others
(re.compile(r'^([0-9]+)'), Number),
(re.compile(r'^([_A-Za-z][_A-Za-z0-9]*)'), Identifier),
(re.compile(r'^\s+'), None),
]
def __init__(self, source):
self.source = source
self.overriden_identifiers = {}
def override_identifier(self, old_identifier, new_identifier):
self.overriden_identifiers[old_identifier] = new_identifier
def parse(self):
source = self.source
output = []
stack = []
while len(source) > 0:
for i in self.tokens:
m = i[0].match(source)
if m:
break
if not m:
raise Exception('No token matches "{}<...>"'.format(source[:10]))
source = source[len(m.group(0)):]
token = i[1]
if not token:
continue
args = m.groups()
token = token(*args)
token = token.process(stack, output)
while len(stack) > 0:
output.append(stack.pop())
o = []
for i in output:
if isinstance(i, Parser.Identifier):
o.append(Parser.Identifier(self.overriden_identifiers.get(i.identifier, i.identifier)))
else:
o.append(i)
output = o
return output
class Generator:
def __init__(self, queue):
self.queue = queue
def generate(self):
stack = []
for i in self.queue:
i.consume(stack)
if len(stack) != 1:
raise Exception('RPN processing problem, stack size is not 1 ({})'.format(repr(stack)))
r = stack[0]
r = r.generate()
return r
def generate_command(name, source, new_command=True):
s = '\\newcommand' if new_command else '\\renewcommand'
s += '{'+name+'}[1]{'
parser = Parser(source)
parser.override_identifier('n', '#1')
s += Generator(parser.parse()).generate()
s += '}'
return s
class TestMath(unittest.TestCase):
def test_parser(self):
exprs = [(
'0',
[Parser.Number(0),]
),(
'1',
[Parser.Number(1),]
),(
'01',
[Parser.Number(1),]
),(
'0 1',
[Parser.Number(0), Parser.Number(1)]
),(
'0 == 1',
[Parser.Number(0), Parser.Number(1), Parser.OperatorEqual('==')]
),(
'0%2 == 1',
[
Parser.Number(0),
Parser.Number(2),
Parser.OperatorModulo('%'),
Parser.Number(1),
Parser.OperatorEqual('==')
]
),(
'0 == 1%2',
[
Parser.Number(0),
Parser.Nu |
davidvon/pipa-pay-server | admin/sms/sdk.py | Python | apache-2.0 | 2,956 | 0.001691 | # -*- coding: UTF-8 -*-
import hashlib
import base64
import datetime
import urllib2
import json
class TemplateSMS:
account_sid = ''
account_token = ''
app_id = ''
server_ip = ''
server_port = ''
soft_version = ''
timestamp = ''
def set_account(self, account_sid, token):
self.account_sid = account_sid
self.account_token = token
def __init__(self, ip, port, version):
self.server_ip = ip
self.server_port = port
self.soft_version = version
def set_app_id(self, app_id):
self.app_id = app_id
def send_template_sms(self, to, random, valid_min, temp_id):
now_date = datetime.datetime.now()
self.timestamp = now_date.strftime("%Y%m%d%H%M%S")
signature = self.account_sid + self.account_token + self.timestamp
sig = hashlib.md5()
sig.update(signature)
sig = sig.hexdigest().upper()
| url = "https://" + self.server_ip + ":" + sel | f.server_port + "/" + self.soft_version + "/Accounts/" + \
self.account_sid + "/SMS/TemplateSMS?sig=" + sig
src = self.account_sid + ":" + self.timestamp
req = urllib2.Request(url)
b = '["%s","%s"]' % (random, valid_min)
body = '''{"to": "%s", "datas": %s, "templateId": "%s", "appId": "%s"}''' % (to, b, temp_id, self.app_id)
req.add_data(body)
auth = base64.encodestring(src).strip()
req.add_header("Authorization", auth)
req.add_header("Accept", 'application/json;')
req.add_header("Content-Type", "application/json;charset=utf-8;")
req.add_header("Host", "127.0.0.1")
req.add_header("content-length", len(body))
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
locations = json.loads(data)
return locations
except:
return {'172001': 'network error'}
def query_account_info(self):
now_date = datetime.datetime.now()
self.timestamp = now_date.strftime("%Y%m%d%H%M%S")
signature = self.account_sid + self.account_token + self.timestamp
sig = hashlib.md5()
sig.update(signature)
sig = sig.hexdigest().upper()
url = "https://" + self.server_ip + ":" + self.server_port + "/" + self.soft_version + "/Accounts/" + \
self.account_sid + "/AccountInfo?sig=" + sig
src = self.account_sid + ":" + self.timestamp
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
req.add_header("Accept", "application/json")
req.add_header("Content-Type", "application/jsoncharset=utf-8")
req.add_header("Authorization", auth)
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
locations = json.loads(data)
return locations
except:
return {"statusCode": '172001'}
|
cea-sec/ivre | ivre/tools/db2view.py | Python | gpl-3.0 | 3,806 | 0.000788 | #! /usr/bin/env python
# This file is part of IVRE.
# Copyright 2011 - 2021 Pierre LALET <pierre@droids-corp.org>
#
# IVRE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# IVRE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IVRE. If not, see <http://www.gnu.org/licenses/>.
"""Create views from nmap and passive databases."""
import argparse
from typing import Generator, List
from ivre.activecli import displayfunction_json
from ivre.db import db, DB
from ivre.types import Record
from ivre.view import from_passive, from_nmap, to_view
def main() -> None:
parser = argparse.ArgumentParser(description=__doc__, parents=[DB().argparser])
if db.nmap is None:
fltnmap = None
else:
fltnmap = db.nmap.flt_empty
if db.passive is None:
fltpass = None
else:
fltpass = db.passive.flt_empty
_from: List[Generator[Record, None, None]] = []
parser.add_argument(
"--view-category",
metavar="CATEGORY",
help="Choose a different category than the default",
)
parser.add_argument(
"--test",
"-t",
action="store_true",
help="Give results in standard output instead of "
"inserting them in database.",
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="For test output, print out formatted results.",
)
parser.add_argument(
"--no-merge",
action="store_true",
help="Do **not** " "merge with existing results for same host and " "source.",
)
subparsers = parser.add_subparsers(
dest="view_source",
help=(
"Accepted values are 'nmap' and 'passive'. " "None or 'all' will do both"
),
)
if db.nmap is not None:
subparsers.add_parser("nmap", parents=[db.nmap.argparser])
if db.passive is not None:
subparsers.add_parser("passive", parents=[db.passive.argparser])
subparsers.add_parser("all")
args = parser.parse_args()
view_category = args.view_category
if not args.view_source:
args.view_source = "all"
if args.view_source == "all":
_from = []
if db.nmap is not None:
fltnmap = DB().parse_args(args, flt=fltnmap)
_from.append(from_nmap(fltnmap, category=view_category))
if db.passive is not None:
fltpass = DB().parse_args(args, flt=fltpass)
_from.append(from_passive(fltpass, category=view_category))
elif args.view_source == "nmap":
if db.nmap is None:
parser.error('Cannot use "nmap" (no Nmap database exists)')
fltnmap = db.nmap.parse_args(args, fltnmap)
_from = [from_nmap(fltnmap, category=view_category)]
elif args.view_source == "passive":
if db.passive is None:
parser.error('Cannot use "passive" (no Passive database exists)')
fltpass = db.passive.parse_args(args, fltpass)
_from = [from_passive(fltpass, category=view | _category)]
if args.test:
def output(host: Record) -> None:
return displayfunction_json([host], db.view)
elif args.no_merge:
output = db.view.store_host
| else:
output = db.view.store_or_merge_host
# Output results
itr = to_view(_from)
if not itr:
return
for elt in itr:
output(elt)
|
MiLk/ansible-modules-core | system/service.py | Python | gpl-3.0 | 52,336 | 0.003573 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: service
author: Michael DeHaan
version_added: "0.1"
short_description: Manage services.
description:
- Controls services on remote hosts.
options:
name:
required: true
description:
- Name of the service.
state:
required: false
choices: [ started, stopped, restarted, reloaded ]
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
service. C(reloaded) will always reload. B(At least one of state
and enabled are required.)
sleep:
required: false
version_added: "1.3"
description:
- If the service is being C(restarted) then sleep this many seconds
between the stop and start command. This helps to workaround badly
behaving init scripts that exit immediately after signaling a process
to stop.
pattern:
required: false
version_added: "0.7"
description:
- If the service does not respond to the status command, name a
substring to look for as would be found in the output of the I(ps)
command as a stand-in for a status result. If the string is found,
the service will be assumed to be running.
enabled:
required: false
choices: [ "yes", "no" ]
description:
- Whether the service should start on boot. B(At least one of state and
enabled are required.)
runlevel:
required: false
default: 'default'
description:
- "For OpenRC init scripts (ex: Gentoo) only. The runlevel that this service belongs to."
arguments:
description:
- Additional arguments provided on the command line
aliases: [ 'args' ]
'''
EXAMPLES = '''
# Example action to start service httpd, if not running
- service: name=httpd state=started
# Example action to stop service httpd, if running
- service: name=httpd state=stopped
# Example action to restart service httpd, in all cases
- service: name=httpd state=restarted
# Example action to reload service httpd, in all cases
- service: name=httpd state=reloaded
# Example action to enable service httpd, and not touch the running state
- service: name=httpd enabled=yes
# Example action to start service foo, based on running process /usr/bin/foo
- service: name=foo pattern=/usr/bin/foo state=started
# Example action to restart network service for interface eth0
- service: name=network state=restarted args=eth0
'''
import platform
import os
import re
import tempfile
import shlex
import select
import time
import string
from distutils.version import LooseVersion
class Service(object):
"""
This is the generic Service manipulation class that is subclassed
based on platform.
A subclass should override the following action methods:-
- get_service_tools
- service_enable
- get_service_status
- service_control
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Service, args, kwargs)
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.state = module.params['state']
self.sleep = module.params['sleep']
self.pattern = module.params['pattern']
self.enable = module.params['enabled']
self.runlevel = module.params['runlevel']
self.changed = False
self.running = None
self.crashed = None
self.action = None
self.svc_cmd = None
self.svc_initscript = None
self.svc_initctl = None
self.enable_cmd = None
self.arguments = module.params.get('arguments', '')
self.rcconf_file = None
self.rcconf_key = None
self.rcconf_value = None
self.svc_change = False
# select whether we dump additional debug info through syslog
self.syslogging = False
# ===========================================
# Platform specific methods (must be replaced by subclass).
def get_service_tools(self):
self.module.fail_json(msg="get_service_tools not implemented on target platform")
def service_enable(self):
self.module.fail_json(msg="service_enable not implemented on target platform")
def get_service_status(self):
self.module.fail_json(msg="get_service_status not implemented on target platform")
def service_control(self):
self.module.fail_json(msg="service_control not implem | ented on target platform")
# ===========================================
# Generic methods that should be used on all platforms.
def execute_command(self, cmd, | daemonize=False):
if self.syslogging:
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Command %s, daemonize %r' % (cmd, daemonize))
# Most things don't need to be daemonized
if not daemonize:
return self.module.run_command(cmd)
# This is complex because daemonization is hard for people.
# What we do is daemonize a part of this module, the daemon runs the
# command, picks up the return code and output, and returns it to the
# main process.
pipe = os.pipe()
pid = os.fork()
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
if fd != 0:
os.dup2(fd, 0)
if fd != 1:
os.dup2(fd, 1)
if fd != 2:
os.dup2(fd, 2)
if fd not in (0, 1, 2):
os.close(fd)
# Make us a daemon. Yes, that's all it takes.
pid = os.fork()
if pid > 0:
os._exit(0)
os.setsid()
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# Start the command
if isinstance(cmd, basestring):
cmd = shlex.split(cmd)
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
stdout = ""
stderr = ""
fds = [p.stdout, p.stderr]
# Wait for all output, or until the main process is dead and its output is done.
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if not (rfd + wfd + efd) and p.poll() is not None:
break
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), 4096)
if not dat:
fds.remove(p.stdout)
stdout += dat
if p.stderr in rfd:
dat = os.read(p.stderr.fileno(), 4096)
if not dat:
fds.remove(p.stderr)
stderr += dat
p.wait()
# Return a JSON blob to parent
os.write(pipe[1], jso |
digitalocean/netbox | netbox/circuits/migrations/0019_nullbooleanfield_to_booleanfield.py | Python | apache-2.0 | 426 | 0 | # Generated by Django 3.1b1 on 2020-07-16 15:5 | 5
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('circuits', '0018_standardize_description'),
]
operations = [
migrations.AlterField(
model_name='circuittermination',
name='connection_status',
field=models.BooleanField(blank=True, null=True),
),
| ]
|
rezoo/chainer | tests/chainer_tests/functions_tests/loss_tests/test_mean_squared_error.py | Python | mit | 3,876 | 0 | import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import type_check
@testing.parameterize(
{'dtype': numpy.float32,
'places': 5,
'backward_tols': {'atol': 1e-5, 'rtol': 1e-4},
'double_backward_tols': {'atol': 1e-5, 'rtol': 1e-4}},
{'dtype': numpy.float16,
'places': 3,
'backward_tols': {'atol': 5e-2, 'rtol': 5e-1},
'double_backward_tols': {'atol': 5e-2, 'rtol': 5e-1}},
)
class TestMeanSquaredError(unittest.TestCase):
def setUp(self):
dtype = self.dtype
self.x0 = numpy.random.uniform(-1, 1, (4, 3)).astype(dtype)
self.x1 = numpy.random.uniform(-1, 1, (4, 3)).astype(dtype)
self.gy = numpy.random.uniform(-1, 1, ()).astype(dtype)
self.ggx0 = numpy.random.uniform(-1, 1, (4, 3)).astype(dtype)
self.ggx1 = numpy.random.uniform(-1, 1, (4, 3)).astype(dtype)
def check_forward(self, x0_data, x1_data):
x0 = chainer.Variable(x0_data)
x1 = chainer.Variable(x1_data)
loss = functions.mean_squared_error(x0, x1)
loss_value = cuda.to_cpu(loss.data)
self.assertEqual(loss_value.dtype, self.dtype)
self.assertEqual(loss_value.shape, ())
# Compute expected value
loss_expect = 0.
for i in numpy.ndindex(self.x0.shape):
loss_expect += (self.x0[i] - self.x1[i]) ** 2
loss_expect /= self.x0.size
self.assertAlmostEqual(loss_expect, loss_value, places=self.places)
@condition.retry(3)
def test_forward_cpu(self | ):
self.check_forward(self.x0, self.x1)
@attr.gpu
@condition.retry(3)
def test_forward_gpu | (self):
self.check_forward(cuda.to_gpu(self.x0), cuda.to_gpu(self.x1))
def check_backward(self, x0_data, x1_data):
gradient_check.check_backward(
functions.mean_squared_error,
(x0_data, x1_data), None, eps=1e-2, **self.backward_tols)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x0, self.x1)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x0), cuda.to_gpu(self.x1))
def check_double_backward(self, x0_data, x1_data, gy_data,
ggx0_data, ggx1_data):
gradient_check.check_double_backward(
functions.mean_squared_error, (x0_data, x1_data), gy_data,
(ggx0_data, ggx1_data), eps=1e-2, **self.double_backward_tols)
@condition.retry(3)
def test_double_backward_cpu(self):
self.check_double_backward(
self.x0, self.x1, self.gy, self.ggx0, self.ggx1)
@attr.gpu
@condition.retry(3)
def test_double_backward_gpu(self):
self.check_double_backward(
cuda.to_gpu(self.x0), cuda.to_gpu(self.x1),
cuda.to_gpu(self.gy),
cuda.to_gpu(self.ggx0), cuda.to_gpu(self.ggx1))
class TestMeanSquaredErrorTypeCheck(unittest.TestCase):
def test_invalid_dtype1(self):
x0 = chainer.Variable(
numpy.random.uniform(-1, 1, (4, 3)).astype(numpy.int32))
x1 = chainer.Variable(
numpy.random.uniform(-1, 1, (4, 3)).astype(numpy.int32))
with self.assertRaises(type_check.InvalidType):
functions.mean_squared_error(x0, x1)
def test_invalid_dtype2(self):
x0 = chainer.Variable(
numpy.random.uniform(-1, 1, (4, 3)).astype(numpy.float32))
x1 = chainer.Variable(
numpy.random.uniform(-1, 1, (4, 3)).astype(numpy.float16))
with self.assertRaises(type_check.InvalidType):
functions.mean_squared_error(x0, x1)
testing.run_module(__name__, __file__)
|
jithinbp/vLabtool-v0 | v0/templates/template_exp_standalone.py | Python | gpl-3.0 | 4,665 | 0.009861 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'exp_std.ui'
#
# Created: Sat Apr 18 15:55:49 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.setWindowModality(QtCore.Qt.NonModal)
MainWindow.resize(849, 479)
MainWindow.setStyleSheet(_fromUtf8(" QPushButton{ background-image: url(:/images/bt_01_off.png);}\n"
" QPushButton:pressed {background-image:url(:/images/bt_01_on.png);}\n"
"QFrame{background-color: rgb(21, 107, 113);}\n"
"\n"
" QDockWidget {\n"
" border: 1px solid lightgray;\n"
" }\n"
"\n"
" QDockWidget::title {\n"
" text-align: left; /* align the text to the left */\n"
" background: lightgray;\n"
" padding-left: 5px;\n"
" height:6px;\n"
" }\n"
"\n"
" QDockWidget::close-button, QDockWidget::float-button {\n"
" border: 1px solid transparent;\n"
" background: darkgray;\n"
" padding: 0px;\n"
" }\n"
"\n"
" QDockWidget::close-button:hover, QDockWidget::float-button:hover {\n"
" background: gray;\n"
" }\n"
"\n"
" QDockWidget::close-button:pressed, QDockWidget::float-button:pressed {\n"
" padding: 1px -1px -1px 1px;\n"
" }\n"
"\n"
"\n"
"\n"
"border-color: rgb(29, 122, 162);\n"
""))
MainWindow.setDocumentMode(False)
MainWindow.setDockOptions(QtGui.QMainWindow.AllowTabbedDocks|QtGui.QMainWindow.AnimatedDocks)
MainWindow.setUnifiedTitleAndToolBarOnMac(False)
self.centralwidget = QtGui.QWidget(MainWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setMargin(0)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.scrollAre | a = QtGui.QScrollArea(self.centralwidget)
self.scrollArea.setStyleSheet(_fromUtf8(""))
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollAreaWidgetContents_2 = QtGui.QWidget | ()
self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 847, 452))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollAreaWidgetContents_2.sizePolicy().hasHeightForWidth())
self.scrollAreaWidgetContents_2.setSizePolicy(sizePolicy)
self.scrollAreaWidgetContents_2.setObjectName(_fromUtf8("scrollAreaWidgetContents_2"))
self.verticalLayout = QtGui.QVBoxLayout(self.scrollAreaWidgetContents_2)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.graph_splitter = QtGui.QSplitter(self.scrollAreaWidgetContents_2)
self.graph_splitter.setOrientation(QtCore.Qt.Vertical)
self.graph_splitter.setObjectName(_fromUtf8("graph_splitter"))
self.verticalLayout.addWidget(self.graph_splitter)
self.scrollArea.setWidget(self.scrollAreaWidgetContents_2)
self.horizontalLayout_2.addWidget(self.scrollArea)
MainWindow.setCentralWidget(self.centralwidget)
self.menuBar = QtGui.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 849, 25))
self.menuBar.setObjectName(_fromUtf8("menuBar"))
MainWindow.setMenuBar(self.menuBar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Experiments", None))
|
imscs21/myuniv | 1학기/programming/basic/파이썬/파이썬 과제/5/5_6.py | Python | apache-2.0 | 271 | 0.060837 | def double(n):
return n * 2
def halve(n):
return n // | 2
def fastmult(m,n):
if n > 0:
if n % 2 == 0:
return fastmult(double(m),halve(n))
else:
return m+fastmult | (m,n-1)
else:
return 0
#그냥 재귀 |
cdrooom/odoo | openerp/addons/base/ir/ir_actions.py | Python | agpl-3.0 | 63,067 | 0.004678 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2014 OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from functools import partial
import logging
import operator
import os
import time
import datetime
import dateutil
import openerp
from openerp import SUPERUSER_ID
from openerp import tools
from openerp import workflow
import openerp.api
from openerp.osv import fields, osv
from openerp.osv.orm import browse_record
import openerp.report.interface
from openerp.report.report_sxw import report_sxw, report_rml
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
import openerp.workflow
from openerp.exceptions import UserError
_logger = logging.getLogger(__name__)
class actions(osv.osv):
_name = 'ir.actions.actions'
_table = 'ir_actions'
_order = 'name'
_columns = {
'name': fields.char('Name', required=True),
'type': fields.char('Action Type', required=True),
'usage': fields.char('Action Usage'),
'xml_id': fields.function(osv.osv.get_external_id, type='char', string="External ID"),
'help': fields.text('Action description',
help='Optional help text for the users with a description of the target view, such as its usage and purpose.',
translate=True),
}
_defaults = {
'usage': lambda *a: False,
}
def unlink(self, cr, uid, ids, context=None):
"""unlink ir.action.todo which are related to actions which will be deleted.
NOTE: ondelete cascade will not work on ir.actions.actions so we will need to do it manually."""
todo_obj = self.pool.get('ir.actions.todo')
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
todo_ids = todo_obj.search(cr, uid, [('action_id', 'in', ids)], context=context)
todo_obj.unlink(cr, uid, todo_ids, context=context)
return super(actions, self).unlink(cr, uid, ids, context=context)
def _get_eval_context(self, cr, uid, action=None, context=None):
""" evaluation context to pass to safe_eval """
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return {
'uid': uid,
'user': user,
'time': time,
'datetime': datetime,
'dateutil': dateutil,
}
class ir_actions_report_xml(osv.osv):
def _report_content(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
data = report[name + '_data']
if not data and report[name[:-8]]:
fp = None
try:
fp = tools.file_open(report[name[:-8]], mode='rb')
data = fp.read()
except:
data = False
finally:
if fp:
fp.close()
res[report.id] = data
return res
def _report_content_inv(self, cursor, user, id, name, value, arg, context=None):
self.write(cursor, user, id, {name+'_data': value}, context=context)
def _report_sxw(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
if report.report_rml:
res[report.id] = report.report_rml.replace('.rml', '.sxw')
else:
res[report.id] = False
return res
def _lookup_report(self, cr, name):
"""
Look up a report definition.
"""
opj = os.path.join
# First lookup in the deprecated place, because if the report definition
# has not been updated, it is more likely the correct definition is there.
# Only reports with custom parser sepcified in Python are still there.
if 'report.' + name in openerp.report.interface.report_int._reports:
new_report = openerp.report.interface.report_int._reports['report.' + name]
else:
cr.execute("SELECT * FROM ir_act_report_xml WHERE report_name=%s", (name,))
r = cr.dictfetchone()
if r:
if r['report_type'] in ['qweb-pdf', 'qweb-html']:
return r['report_name']
elif r['report_rml'] or r['report_rml_content_data']:
if r['parser']:
kwargs = { 'parser': operator.attrgetter(r['parser'])(openerp.addons) }
else:
kwargs = {}
new_report = report_sxw('report.'+r['report_name'], r['model'],
opj('addons',r['report_rml'] or '/'), header=r['header'], register=False, **kwargs)
elif r['report_xsl'] and r['report_xml']:
new_report = report_rml('report.'+r['report_name'], r['model'],
opj('addons',r['report_xml']),
r['report_xsl'] and opj('addons',r['report_xsl']), register=False)
else:
raise Exception, "Unhandled report type: %s" % r
else:
raise Exception, "Required report does not exist: %s" % name
return new_report
def create_action(self, cr, uid, ids, context=None):
""" Create a contextual action for each of the report."""
for ir_actions_report_xml in self.browse(cr, uid, ids, context=context):
ir_values_id = self.pool['ir.values'].create(cr, SUPERUSER_ID, {
'name': ir_actions_report_xml.name,
'model': ir_actions_report_xml.model,
'key2': 'client_print_multi',
'value': "ir.actions.report.xml,%s" % ir_actions_report_xml.id,
}, context)
ir_actions_report_xml.write({
'ir_values_id': ir_values_id,
})
return True
def unlink_action(self, cr, uid, ids, context=None):
""" Remove the contextual actions created for the reports."""
self.check_access_rights(cr , uid, 'write', raise_exception=True)
| for ir_actions_report_xml in self.browse(cr, uid, ids, context=context):
if ir_actions_report_xml.ir_values_id:
try:
self.pool['ir.values'].unlink(
cr, SUPERUSER_ID, ir_actions_report_xml.ir_values_id.id, context
)
except Exception:
raise UserError(_('Deletion of the action record failed.'))
return True
def render_report(self, cr, uid, res_ids | , name, data, context=None):
"""
Look up a report definition and render the report for the provided IDs.
"""
new_report = self._lookup_report(cr, name)
if isinstance(new_report, (str, unicode)): # Qweb report
# The only case where a QWeb report is rendered with this method occurs when running
# yml tests originally written for RML reports.
if openerp.tools.config['test_enable'] and not tools.config['test_report_directory']:
# Only generate the pdf when a destination folder has been provided.
return self.pool['report'].get_html(cr, uid, res_ids, n |
viljoviitanen/plugin.minimal.example | addon.py | Python | unlicense | 1,556 | 0.003856 | # This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHAN | TABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF | OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
#
import xbmcgui
import xbmcplugin
import xbmcaddon
addon = xbmcaddon.Addon()
username = addon.getSetting('username') or 'nobody'
item = xbmcgui.ListItem('Hello, %s' % username)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), '', item, isFolder=0)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
vmalavolta/django-pieguard | tests/test_app/models.py | Python | mit | 461 | 0 | from __future__ import unicode_literals
from datetime imp | ort datetime
from django.db import models
class Project(models.Model):
name = models.CharField(max_length=128, unique=True)
created_at = models.DateTimeFi | eld(default=datetime.now)
class Meta:
get_latest_by = 'created_at'
permissions = (
('view_project', 'View project'), # used by django-tastypie
)
def __unicode__(self):
return self.name
|
mehdy/pyeez | setup.py | Python | gpl-2.0 | 1,167 | 0 | """
This module instructs the setuptools to setpup this package properly
:copyright: (c) 2016 by Mehdy Khoshnoody.
:license: GPLv3, see LICENSE for more | details.
"""
impor | t os
from distutils.core import setup
setup(
name='pyeez',
version='0.1.0',
packages=['pyeez'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='terminal console',
url='https://github.com/mehdy/pyeez',
license='GPLv3',
author='Mehdy Khoshnoody',
author_email='me@mehdy.net',
description='A micro-framework to create console-based applications like'
'htop, vim and etc'
)
|
google-research/google-research | linear_dynamical_systems/process_mit_bih.py | Python | apache-2.0 | 3,056 | 0.009162 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law o | r agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate timeseries in 2 clusters: NSR and SVT from mit-bih data."""
from __future__ import absolute_import
from __future__ import division
from __fut | ure__ import print_function
import random
from absl import app
from absl import flags
import numpy as np
import wfdb
FLAGS = flags.FLAGS
flags.DEFINE_string(
'input_dir', None,
'Local input directory containing the mit-bih file that can be copied from '
'/namespace/health-research/unencrypted/reference/user/milah/mit_bih/.')
flags.DEFINE_string('outfile_dir', None,
'Output filepath.')
def main(argv):
del argv
all_ = [100, 101, 102, 103, 104, 105, 106, 107, 108, 111, 112, 113, 114, 115,
116, 117, 118, 119, 121, 122, 123, 124, 200, 201, 202, 203, 205, 207,
208, 209, 210, 212, 213, 214, 215, 217, 219, 220, 221, 222, 223, 228,
230, 231, 232, 233, 234]
target_rhythms = ['AB', 'AFIB', 'AFL', 'B', 'BII', 'IVR', 'N', 'NOD', 'P',
'PREX', 'SBR', 'SVTA', 'T', 'VFL', 'VT']
rhythms = dict()
for a in all_:
ann_ref = wfdb.rdann(FLAGS.input_dir + str(a), 'atr')
for k, label in enumerate(ann_ref.aux_note):
label = str(label).strip('\x00').strip('(')
if label in target_rhythms:
sampfrom = max(0, ann_ref.sample[k] - 140)
sampto = ann_ref.sample[k] + 361
sig, _ = wfdb.rdsamp(FLAGS.input_dir + str(a), channels=[0, 1],
sampfrom=sampfrom, sampto=sampto)
for channel in [0, 1]:
key = str(a) + ':' + str(k) + ':' + str(channel) + ':' + str(
ann_ref.sample[k])
x = np.array(sig)
x = x[:, channel]
record = ','.join([key, str(channel), str(label)] + [
str(i) for i in x])
if label not in rhythms:
rhythms[label] = []
rhythms[label].append(record)
all_rhythms = sorted(rhythms.keys())
print(all_rhythms)
random.seed(1984)
with file(FLAGS.outfile + 'all.csv', 'w') as f_all:
for label in all_rhythms:
records = rhythms[label]
idxs = range(len(records)/2)
random.shuffle(idxs)
outfile = FLAGS.outfile + label + '.csv'
with file(outfile, 'w') as f:
for i in idxs:
f.write(records[2*i] + '\n')
f.write(records[2*i+1] + '\n')
f_all.write(records[2*i] + '\n')
f_all.write(records[2*i+1] + '\n')
if __name__ == '__main__':
app.run(main)
|
tomato42/tlsfuzzer | scripts/test-sslv2-force-export-cipher.py | Python | gpl-2.0 | 8,593 | 0.00256 | # Author: Hubert Kario, (c) 2015
# Released under Gnu GPL v2.0, see LICENSE file for details
"""Test forcing of export ciphers in SSLv2"""
from __future__ import print_function
import traceback
import sys
import re
from random import sample
import getopt
from tlsfuzzer.runner import Runner
from tlsfuzzer.messages import Connect, ClientHelloGenerator, \
ChangeCipherSpecGenerator, \
FinishedGenerator, ApplicationDataGenerator, AlertGenerator, \
ClientMasterKeyGenerator
from tlsfuzzer.expect import ExpectServerHello, ExpectCertificate, \
ExpectServerHelloDone, ExpectChangeCipherSpec, \
ExpectAlert, ExpectClose, ExpectApplicationData, ExpectServerHello2, \
ExpectVerify, ExpectSSL2Alert
from tlslite.constants import CipherSuite, AlertLevel, \
ExtensionType, SSL2ErrorDescription
from tlsfuzzer.utils.lists import natural_sort_keys
version = 3
def help_msg():
"""Usage information"""
print("Usage: <script-name> [-h hostname] [-p port]")
print(" -h hostname hostname to connect to, \"localhost\" by default")
print(" -p port port to use for connection, \"4433\" by default")
print(" -e probe-name exclude the probe from the list of the ones run")
print(" may be specified multiple times")
print(" -n num run 'num' or all(if 0) tests instead of default(all)")
print(" (excluding \"sanity\" tests)")
print(" -x probe-name expect the probe to fail. When such probe passes despite being marked like this")
print(" it will be reported in the test summary and the whole script will fail.")
print(" May be specified multiple times.")
print(" -X message expect the `message` substring in exception raised during")
print(" execution of preceding expected failure probe")
print(" usage: [-x probe-name] [-X exception], order is compulsory!")
print(" --help this message")
def main():
"""Test if the server supports export grade SSLv2 ciphers"""
conversations = {}
host = "localhost"
port = 4433
num_limit = None
run_exclude = set()
expected_failures = {}
last_exp_tmp = None
argv = sys.argv[1:]
opts, argv = getopt.getopt(argv, "h:p:e:n:x:X:", ["help"])
for opt, arg in opts:
if opt == '-h':
host = arg
elif opt == '-p':
port = int(arg)
elif opt == '-e':
run_exclude.add(arg)
elif opt == '-n':
num_limit = int(arg)
elif opt == '-x':
expected_failures[arg] = None
last_exp_tmp = str(arg)
elif opt == '-X':
if not last_exp_tmp:
raise ValueError("-x has to be specified before -X")
expected_failures[last_exp_tmp] = str(arg)
elif opt == '--help':
help_msg()
sys.exit(0)
else:
raise ValueError("Unknown option: {0}".format(opt))
if argv:
help_msg()
raise ValueError("Unknown options: {0}".format(argv))
for prot_vers, proto_name in {
(0, 2):"SSLv2",
(3, 0):"SSLv3",
(3, 1):"TLSv1.0"
}.items():
for cipher_id, cipher_name in {
CipherSuite.SSL_CK_RC4_128_EXPORT40_WITH_MD5:"EXP-RC4-MD5",
CipherSuite.SSL_CK_RC2_128_CBC_EXPORT40_WITH_MD5:
"EXP-RC2-CBC-MD5"
}.items():
# instruct RecordLayer to use SSLv2 record layer protocol (0, 2)
conversation = Connect(host, port, version=(0, 2))
node = conversation
ciphers = [CipherSuite.SSL_CK_DES_192_EDE3_CBC_WITH_MD5,
CipherSuite.SSL_CK_RC4_128_WITH_MD5,
CipherSuite.SSL_CK_RC4_128_EXPORT40_WITH_MD5,
CipherSuite.SSL_CK_RC2_128_CBC_WITH_MD5,
CipherSuite.SSL_CK_RC2_128_CBC_EXPORT40_WITH_MD5,
CipherSuite.SSL_CK_IDEA_128_CBC_WITH_MD5,
CipherSuite.SSL_CK_DES_64_CBC_WITH_MD5]
node = node.add_child(ClientHelloGenerator(ciphers,
version=prot_vers,
ssl2=True))
# we can get a ServerHello with no ciphers:
node = node.add_child(ExpectServerHello2())
# or we can get an error stright away, and connection closure
node.next_sibling = ExpectSSL2Alert(SSL2ErrorDescription.no_cipher)
node.next_sibling.add_child(ExpectClose())
alternative = node.next_sibling
# or the server may close the connection right away (likely in
# case SSLv2 is completely disabled)
alternative.next_sibling = ExpectClose()
alternative = alternative.next_sibling
# or finally, we can get a TLS Alert message
alternative.next_sibling = ExpectAlert()
alternative.next_sibling.add_child(ExpectClose())
# in case we got ServerHello, try to force one of the ciphers
node = node.add_child(ClientMasterKeyGenerator(cipher=cipher_id))
# it should result in error
node = node.add_child(ExpectSSL2Alert())
# or connection close
node.next_sibling = ExpectClose()
# in case of error, we expect the server to close connection
node.add_child(ExpectClose())
conversations["Connect with {1} {0}"
.format(cipher_name, proto_name)] = conversation
good = 0
bad = 0
xfail = 0
xpass = 0
failed = []
xpassed = []
if not num_limit:
num_limit = len(conversations)
sampled_tests = sample(list(conversations.items()), len(conversations))
for c_name, conversation in sampled_tests:
if c_name in run_ex | clude:
continue
print("{0} ...".format(c_name))
runner = Runner(conversation)
res = True
exception = None
try:
runner.run()
except Exception as exp:
exception = exp
print("Error while processing")
print(traceback.format_exc())
| print("")
res = False
if c_name in expected_failures:
if res:
xpass += 1
xpassed.append(c_name)
print("XPASS-expected failure but test passed\n")
else:
if expected_failures[c_name] is not None and \
expected_failures[c_name] not in str(exception):
bad += 1
failed.append(c_name)
print("Expected error message: {0}\n"
.format(expected_failures[c_name]))
else:
xfail += 1
print("OK-expected failure\n")
else:
if res:
good+=1
print("OK\n")
else:
bad+=1
print("Note: SSLv2 was officially deprecated (MUST NOT use) in 2011, see")
print(" RFC 6176.")
print(" If one or more of the tests fails because of error in form of")
print("")
print(" Unexpected message from peer: Handshake()")
print("")
print(" With any number inside parethensis, and the server is")
print(" configured to not support SSLv2, it means it most")
print(" likely is vulnerable to CVE-2015-3197 and CVE-2016-0800.")
print(" In case it's a RC4 or 3DES cipher, you may verify that it")
print(" really supports it using:")
print(" test-sslv2-connection.py")
print("")
print("Test end")
print(20 * '=')
print("version: {0}".format(version))
print(20 * '=')
print("TOTAL: {0}".format(len(sampled_tests)))
print("SKIP: {0}".format(len(run_exclude.intersection(conversations.keys()))))
print("PASS: {0}".format(good))
print("XFAIL: {0}".format(xfail))
print("FAIL: {0}".format(bad))
print("X |
jibaron/q35-seabios | tools/acpi_extract.py | Python | gpl-3.0 | 11,577 | 0.008465 | #!/usr/bin/python
# Copyright (C) 2011 Red Hat, Inc., Michael S. Tsirkin <mst@redhat.com>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
# Process mixed ASL/AML listing (.lst file) produced by iasl -l
# Locate and execute ACPI_EXTRACT directives, output offset info
#
# Documentation of ACPI_EXTRACT_* directive tags:
#
# These directive tags output offset information from AML for BIOS runtime
# table generation.
# Each directive is of the form:
# ACPI_EXTRACT_<TYPE> <array_name> <Operator> (...)
# and causes the extractor to create an array
# named <array_name> with offset, in the generated AML,
# of an object of a given type in the following <Operator>.
#
# A directive must fit on a single code line.
#
# Object type in AML is verified, a mismatch causes a build failure.
#
# Directives and operators currently supported are:
# ACPI_EXTRACT_NAME_DWORD_CONST - extract a Dword Const object from Name()
# ACPI_EXTRACT_NAME_WORD_CONST - extract a Word Const object from Name()
# ACPI_EXTRACT_NAME_BYTE_CONST - extract a Byte Const object from Name()
# ACPI_EXTRACT_METHOD_STRING - extract a NameString from Method()
# ACPI_EXTRACT_NAME_STRING - extract a NameString from Name()
# ACPI_EXTRACT_PROCESSOR_START - start of Processor() block
# ACPI_EXTRACT_PROCESSOR_STRING - extract a NameString from Processor()
# ACPI_EXTRACT_PROCESSOR_END - offset at last byte of Processor() + 1
# ACPI_EXTRACT_PKG_START - start of Package block
#
# ACPI_EXTRACT_ALL_CODE - create an array storing the generated AML bytecode
#
# ACPI_EXTRACT is not allowed anywhere else in code, except in comments.
import re;
import sys;
import fileinput;
aml = []
asl = []
output = {}
debug = ""
class asl_line:
line = None
lineno = None
aml_offset = None
def die(diag):
sys.stderr.write("Error: %s; %s\n" % (diag, debug))
sys.exit(1)
#Store an ASL command, matching AML offset, and input line (for debugging)
def add_asl(lineno, line):
l = asl_line()
l.line = line
l.lineno = lineno
l.aml_offset = len(aml)
asl.append(l)
#Store an AML byte sequence
#Verify that offset output by iasl matches # of bytes so far
def add_aml(offset, line):
o = int(offset, 16);
# Sanity check: offset must match size of code so far
if (o != len(aml)):
die("Offset 0x%x != 0x%x" % (o, len(aml)))
# Strip any trailing dots and ASCII dump after "
line = re.sub(r'\s*\.*\s*".*$',"", line)
# Strip traling whitespace
line = re.sub(r'\s+$',"", line)
# Strip leading whitespace
line = re.sub(r'^\s+',"", line)
# Split on whitespace
code = re.split(r'\s+', line)
for c in code:
# Require a legal hex number, two digits
if (not(re.search(r'^[0-9A-Fa-f][0-9A-Fa-f]$', c))):
die("Unexpected octet %s" % c);
aml.append(int(c, 16));
# Process aml bytecode array, decoding AML
def aml_pkglen_bytes(offset):
# PkgLength can be multibyte. Bits 8-7 give the # of extra bytes.
pkglenbytes = aml[offset] >> 6;
return pkglenbytes + 1
def aml_pkglen(offset):
pkgstart = offset
pkglenbytes = aml_pkglen_bytes(offset)
pkglen = aml[offset] & 0x3F
# If multibyte, first nibble only uses bits 0-3
if ((pkglenbytes > 1) and (pkglen & 0x30)):
die("PkgLen bytes 0x%x but first nibble 0x%x expected 0x0X" %
(pkglen, pkglen))
offset += 1
pkglenbytes -= 1
for i in range(pkglenbytes):
pkglen |= aml[offset + i] << (i * 8 + 4)
if (len(aml) < pkgstart + pkglen):
die("PckgLen 0x%x at offset 0x%x exceeds AML size 0x%x" %
(pkglen, offset, len(aml)))
return pkglen
# Given method offset, find its NameString offset
def aml_method_string(offset):
#0x14 MethodOp PkgLength NameString MethodFlags TermList
if (aml[offset] != 0x14):
die( "Method offset 0x%x: expected 0x14 actual 0x%x" %
(offset, aml[offset]));
offset += 1;
pkglenbytes = aml_pkglen_bytes(offset)
offset += pkglenbytes;
return offset;
# Given name offset, find its NameString offset
def aml_name_string(offset):
#0x08 NameOp NameString DataRef
if (aml[offset] != 0x08):
die( "Name offset 0x%x: expected 0x08 actual 0x%x" %
(offset, aml[offset]));
offset += 1
# Block Name Modifier. Skip it.
if (aml[offset] == 0x5c or aml[offset] == 0x5e):
offset += 1
return offset;
# Given data offset, find dword const offset
def aml_data_dword_const(offset):
#0x08 NameOp NameString DataRef
if (aml[offset] != 0x0C):
die( "Name offset 0x%x: expected 0x0C actual 0x%x" %
(offset, aml[offset]));
return offset + 1;
# Given data offset, find word const offset
def aml_data_word_const(offset):
#0x08 NameOp NameString DataRef
if (aml[offset] != 0x0B):
die( "Name offset 0x%x: expected 0x0B actual 0x%x" %
(offset, aml[offset]));
return offset + 1;
# Given data offset, find byte const offset
def aml_data_byte_const(offset):
#0x08 NameOp NameString DataRef
if (aml[offset] != 0x0A):
die( "Name offset 0x%x: expected 0x0A actual 0x%x" %
(offset, aml[offset]));
return offset + 1;
# Given name offset, find dword const offset
def aml_name_dword_const(offset):
return aml_data_dword_const(aml_name_string(offset) + 4)
# Given name offset, find word const offset
def aml_name_word_const(offset):
return aml_data_word_const(aml_name_string(offset) + 4)
# Given name offset, find byte const offset
def aml_name_byte_const(offset):
return aml_data_byte_const(aml_name_string(offset) + 4)
def aml_device_start(offset):
#0x5B 0x82 DeviceOp PkgLength NameString
if ((aml[offset] != 0x5B) or (aml[offset + 1] != 0x82)):
die( "Name offset 0x%x: expected 0x5B 0x82 actual 0x%x 0x%x" %
(offset, aml[offset], aml[offset + 1]));
return offset
def aml_device_string(offset):
#0x5B 0x82 DeviceOp PkgLength NameString
start = aml_device_start(offset)
offset += 2
pkglenbytes = aml_pkglen_bytes(offset)
offset += pkglenbytes
return offset
def aml_device_end(offset):
start = aml_device_start(offset)
offset += 2
pkglenbytes = aml_pkglen_bytes(offset)
pkglen = aml_pkglen(offset)
return offset + pkglen
def aml_processor_start(offset):
#0x5B 0x83 ProcessorOp PkgLength NameString ProcID
if ((aml[offset] != 0x5B) or (aml[offset + 1] != 0x83)):
die( "Name offset 0x%x: expected 0x5B 0x83 actual 0x%x 0x%x" %
(offset, aml[offset], aml[offset + 1]));
return offset
def aml_processor_string(offset):
#0x5B 0x83 ProcessorOp PkgLength NameString ProcID
start = aml_processor_start(offset)
offset += 2
pkglenbytes = aml_pkglen_bytes(offset)
offset += pkglenbytes
return offset
def aml_processor_end(offset):
start = aml_processor_start(offset)
offset += 2
pkglenbytes = aml_pkglen_bytes(offset)
pkglen = aml_pkglen(offset)
return offset + pkglen
def aml_package_start(offset):
offset = aml_name_string(offset) + 4
# 0x12 PkgLength NumElements PackageElementList
if (aml[offset] != 0x12):
die( "Name offset 0x%x: expected 0x12 actual 0x%x" %
(offset, aml[offset]));
offset += 1
return offset + aml_pkglen_bytes(offset) + 1
lineno = 0
for line | in fileinput.input():
# Strip trailing newline
line = line.rstrip( | );
# line number and debug string to output in case of errors
lineno = lineno + 1
debug = "input line %d: %s" % (lineno, line)
#ASL listing: space, then line#, then ...., then code
pasl = re.compile('^\s+([0-9]+)\.\.\.\.\s*')
m = pasl.search(line)
if (m):
add_asl(lineno, pasl.sub("", line));
# AML listing: offset in hex, then ...., then code
paml = re.compile('^([0-9A-Fa-f]+)\.\.\.\.\s*')
m = paml.search(line)
if (m):
add_aml(m.group(1), paml.sub("", line))
# Now go over code
# Track AML offset of a previous non-empty ASL command
prev_aml_offset = -1
for i in range(len(asl)):
debug = "input line %d: %s" % (asl[i].lineno, asl[i].line)
l = asl[i].lin |
KirarinSnow/Google-Code-Jam | Round 1C 2010/C.py | Python | gpl-3.0 | 1,627 | 0.006146 | #!/usr/bin/python
#
# Problem: Making Chess Boards
# Language: Python
# Author: KirarinSnow
# Usage: python thisfile.py <input.in >output.out
from heapq import *
def process(r1, r2, c1, c2):
for i in range(r1, r2):
for j in range(c1, c2):
if 0 <= i < m and 0 <= j < n:
if g[i][j] == None:
s[i][j] = 0
elif i == 0 or j == 0:
s[i][j] = 1
elif g[i-1][j] != g[i][j] and g[i][j-1] != g[i][j] and \
g[i-1][j-1] == g[i][j]:
s[i][j] = 1 + min(s[i-1][j], s[i][j-1], s[i-1][j-1])
else:
s[i][j] = 1
heappush(q, (-s[i][j], i, j))
def clear(r1, r2, c1, c2):
for i in range(r1, r2):
for j in range(c1, c2):
if 0 <= i < m and 0 <= j < n:
g[i][j] = None
for case in range(int(raw_input())):
m, n = map(int, raw_input().split())
v = [eval('0x'+raw_input()) for i in range(m)]
g = map(lambda x: map(lambda y: (x>>y)%2, range(n)[::-1]), v)
| s = [[1 for i in range(n)] for j in range(m)]
q = []
process(0, m, 0, n)
b = []
while q:
x, r, c = heappop(q)
if x != 0 and s[r][c] == -x:
b.append((-x, r, c))
clear(r+x+1, r+1, c+x+1, c+1)
process(r+x+1, r-x+1, c+x+1, c-x+1)
vs = sorted(list(set(map(lambda x: x[0], b))))[::-1]
print "Case #%d: %d" % (case+1, len(vs))
for k in vs:
| print k, len(filter(lambda x: x[0] == k, b))
|
olivierdalang/stdm | third_party/FontTools/fontTools/t1Lib.py | Python | gpl-2.0 | 9,506 | 0.035451 | """fontTools.t1Lib.py -- Tools for PostScript Type 1 fonts
Functions for reading and writing raw Type 1 data:
read(path)
reads any Type 1 font file, returns the raw data and a type indicator:
'LWFN', 'PFB' or 'OTHER', depending on the format of the file pointed
to by 'path'.
Raises an error when the file does not contain valid Type 1 data.
write(path, data, kind='OTHER', dohex=0)
writes raw Type 1 data to the file pointed to by 'path'.
'kind' can be one of 'LWFN', 'PFB' or 'OTHER'; it defaults to 'OTHER'.
'dohex' is a flag which determines whether the eexec encrypted
part should be written as hexadecimal or binary, but only if kind
is 'LWFN' or 'PFB'.
"""
__author__ = "jvr"
__version__ = "1.0b2"
DEBUG = 0
from fontTools.misc import eexec
from fontTools.misc.macCreatorType import getMacCreatorAndType
import string
import re
import os
try:
try:
from Carbon import Res
except ImportError:
import Res # MacPython < 2.2
except ImportError:
haveMacSupport = 0
else:
haveMacSupport = 1
import MacOS
class T1Error(Exception): pass
class T1Font:
"""Type 1 font class.
Uses a minimal interpeter that supports just about enough PS to parse
Type 1 fonts.
"""
def __init__(self, path=None):
if path is not None:
self.data, type = read(path)
else:
pass # XXX
def saveAs(self, path, type):
write(path, self.getData(), type)
def getData(self):
# XXX Todo: if the data has been converted to Python object,
# recreate the PS stream
return self.data
def getGlyphSet(self):
"""Return a generic GlyphSet, which is a dict-like object
mapping glyph names to glyph objects. The returned glyph objects
have a .draw() method that supports the Pen protocol, and will
have an attribute named 'width', but only *after* the .draw() method
has been called.
In the case of Type 1, the GlyphSet is simply the CharStrings dict.
"""
return self["CharStrings"]
def __getitem__(self, key):
if not hasattr(self, "font"):
self.parse()
return self.font[key]
def parse(self):
from fontTools.misc import psLib
from fontTools.misc import psCharStrings
self.font = psLib.suckfont(self.data)
charStrings = self.font["CharStrings"]
lenIV = self.font["Private"].get("lenIV", 4)
assert lenIV >= 0
subrs = self.font["Private"]["Subrs"]
for glyphName, charString in charStrings.items():
charString, R = eexec.decrypt(charString, 4330)
charStrings[glyphName] = psCharStrings.T1CharString(charString[lenIV:],
subrs=subrs)
for i in range(len(subrs)):
charString, R = eexec.decrypt(subrs[i], 4330)
subrs[i] = psCharStrings.T1CharString(charString[lenIV:], subrs=subrs)
del self.data
# low level T1 data read and write functions
def read(path, onlyHeader=0):
"""reads any Type 1 font file, returns raw data"""
normpath = string.lower(path)
creator, type = getMacCreatorAndType(path)
if type == 'LWFN':
return readLWFN(path, onlyHeader), 'LWFN'
if normpath[-4:] == '.pfb':
return readPFB(path, onlyHeader), 'PFB'
else:
return readOther(path), 'OTHER'
def write(path, data, kind='OTHER', dohex=0):
assertType1(data)
kind = string.upper(kind)
try:
os.remove(path)
except os.error:
pass
err = 1
try:
if kind == 'LWFN':
writeLWFN(path, data)
elif kind == 'PFB':
writePFB(path, data)
else:
writeOther(path | , data, dohex)
err = 0
finally:
if err and not DEBUG:
try:
os.remove(path)
except os.error:
pass
# -- internal --
LWFNCHUNKSIZE = 2000
HEXLINELENGTH = 80
def readLWFN(path, onlyHeader=0):
"""reads an LWFN font file, returns raw data"""
resRef = Res.FSOpenResFile(path, 1) # read-only
try:
Res.UseResFile(resRef)
n = Res.Count1Resources('POST')
data = []
for i in range(501, 501 + n):
res = Res.Get1Resource('POST', | i)
code = ord(res.data[0])
if ord(res.data[1]) <> 0:
raise T1Error, 'corrupt LWFN file'
if code in [1, 2]:
if onlyHeader and code == 2:
break
data.append(res.data[2:])
elif code in [3, 5]:
break
elif code == 4:
f = open(path, "rb")
data.append(f.read())
f.close()
elif code == 0:
pass # comment, ignore
else:
raise T1Error, 'bad chunk code: ' + `code`
finally:
Res.CloseResFile(resRef)
data = string.join(data, '')
assertType1(data)
return data
def readPFB(path, onlyHeader=0):
"""reads a PFB font file, returns raw data"""
f = open(path, "rb")
data = []
while 1:
if f.read(1) <> chr(128):
raise T1Error, 'corrupt PFB file'
code = ord(f.read(1))
if code in [1, 2]:
chunklen = stringToLong(f.read(4))
chunk = f.read(chunklen)
assert len(chunk) == chunklen
data.append(chunk)
elif code == 3:
break
else:
raise T1Error, 'bad chunk code: ' + `code`
if onlyHeader:
break
f.close()
data = string.join(data, '')
assertType1(data)
return data
def readOther(path):
"""reads any (font) file, returns raw data"""
f = open(path, "rb")
data = f.read()
f.close()
assertType1(data)
chunks = findEncryptedChunks(data)
data = []
for isEncrypted, chunk in chunks:
if isEncrypted and isHex(chunk[:4]):
data.append(deHexString(chunk))
else:
data.append(chunk)
return string.join(data, '')
# file writing tools
def writeLWFN(path, data):
Res.FSpCreateResFile(path, "just", "LWFN", 0)
resRef = Res.FSOpenResFile(path, 2) # write-only
try:
Res.UseResFile(resRef)
resID = 501
chunks = findEncryptedChunks(data)
for isEncrypted, chunk in chunks:
if isEncrypted:
code = 2
else:
code = 1
while chunk:
res = Res.Resource(chr(code) + '\0' + chunk[:LWFNCHUNKSIZE - 2])
res.AddResource('POST', resID, '')
chunk = chunk[LWFNCHUNKSIZE - 2:]
resID = resID + 1
res = Res.Resource(chr(5) + '\0')
res.AddResource('POST', resID, '')
finally:
Res.CloseResFile(resRef)
def writePFB(path, data):
chunks = findEncryptedChunks(data)
f = open(path, "wb")
try:
for isEncrypted, chunk in chunks:
if isEncrypted:
code = 2
else:
code = 1
f.write(chr(128) + chr(code))
f.write(longToString(len(chunk)))
f.write(chunk)
f.write(chr(128) + chr(3))
finally:
f.close()
def writeOther(path, data, dohex = 0):
chunks = findEncryptedChunks(data)
f = open(path, "wb")
try:
hexlinelen = HEXLINELENGTH / 2
for isEncrypted, chunk in chunks:
if isEncrypted:
code = 2
else:
code = 1
if code == 2 and dohex:
while chunk:
f.write(eexec.hexString(chunk[:hexlinelen]))
f.write('\r')
chunk = chunk[hexlinelen:]
else:
f.write(chunk)
finally:
f.close()
# decryption tools
EEXECBEGIN = "currentfile eexec"
EEXECEND = '0' * 64
EEXECINTERNALEND = "currentfile closefile"
EEXECBEGINMARKER = "%-- eexec start\r"
EEXECENDMARKER = "%-- eexec end\r"
_ishexRE = re.compile('[0-9A-Fa-f]*$')
def isHex(text):
return _ishexRE.match(text) is not None
def decryptType1(data):
chunks = findEncryptedChunks(data)
data = []
for isEncrypted, chunk in chunks:
if isEncrypted:
if isHex(chunk[:4]):
chunk = deHexString(chunk)
decrypted, R = eexec.decrypt(chunk, 55665)
decrypted = decrypted[4:]
if decrypted[-len(EEXECINTERNALEND)-1:-1] <> EEXECINTERNALEND \
and decrypted[-len(EEXECINTERNALEND)-2:-2] <> EEXECINTERNALEND:
raise T1Error, "invalid end of eexec part"
decrypted = decrypted[:-len(EEXECINTERNALEND)-2] + '\r'
data.append(EEXECBEGINMARKER + decrypted + EEXECENDMARKER)
else:
if chunk[-len(EEXECBEGIN)-1:-1] == EEXECBEGIN:
data.append(chunk[:-len(EEXECBEGIN)-1])
else:
data.append(chunk)
return string.join(data, '')
def findEncryptedChunks(data):
chunks = []
while 1:
eBegin = string.find(data, EEXECBEGIN)
if eBegin < 0:
break
eBegin = eBegin + len(EEXECBEGIN) + 1
eEnd = string.find(data, EEXECEND, eBegin)
if eEnd < 0:
raise T1Error, "can't find end of eexec part"
cypherText = data |
speth/ember | test/reference/bdf.py | Python | mit | 3,354 | 0.002683 | #!/usr/bin/env python
"""
Simple implementations of BDF integration to generate comparisons
for unit testing.
"""
import numpy as np
from scipy import linalg
class BDFIntegrator(object):
def __init__(self, h, y, A, k):
"""
Integrator for y' = Ay + k.
Uses second-order BDF with step size h.
First timestep is taken as first-order BDF with step size h/8
"""
self.stepCount = 0
self.h = h
self.y = y
self.A = A
self.k = k
self.N = len(self.y)
self.nSub = 8 # number of substeps
def step(self):
if self.stepCount == 0:
self.yprev = self.y.copy()
for j in range(self.nSub):
# Apply first-order BDF: y_n = y_(n-1) + h * y'_n
# by solving (I-A) y_n = y_(n-1) + h*k
h = self.h / self.nSub
M = np.eye(self.N) - self.A * h
b = self.y + h * self.k
self.y = | np.linalg.solve(M, b)
else:
# Apply second-order BDF: y_n = 4/3* | y_(n-1) - 1/3*y_(n-2) + 2/3*h * y'_n
# by solving (I-2/3*h*A) y_n = 4/3*y_(n-1) - 1/3*y_(n-2) + (2/3)*h*k
self.yprev2 = self.yprev.copy()
self.yprev = self.y.copy()
M = np.eye(self.N) - 2.0/3.0 * self.h * self.A
b = 4.0/3.0 * self.yprev - 1.0/3.0 * self.yprev2 + 2.0/3.0 * self.h * self.k
self.y = np.linalg.solve(M, b)
self.stepCount += 1
class ExactIntegrator(object):
def __init__(self, y, A, k):
"""
Integrator for y' = Ay + k.
Uses matrix exponential to give exact solution.
"""
self.y = y
self.A = A
self.k = k
self.N = len(self.y)
self.Ainv_k = np.dot(linalg.inv(A), k)
def __call__(self, t):
return np.dot(linalg.expm(self.A * t), self.y + self.Ainv_k) - self.Ainv_k
def getBdfSolutions(dt, tf, y0, A, k):
sys = BDFIntegrator(dt, y0, A, k)
nSteps = int(round(tf/dt))
Y = [y0.copy()]
for i in range(nSteps):
sys.step()
Y.append(sys.y.copy())
return Y
def main():
y0 = np.array([0, 0.5, 2.0, 1.0, 0])
k = np.array([0, 0, 0, 0.2, 0.4])
A = np.array([[-2, 1, 0, 0, 0],
[1, -2, 1, 0, 0],
[0, 1, -2, 1, 0],
[0, 0, 1, -2, 1],
[0, 0, 0, 1, -2]], dtype=float)
Y1 = getBdfSolutions(0.20, 1.0, y0, A, k)
Y2 = getBdfSolutions(0.05, 1.0, y0, A, k)
Y3 = getBdfSolutions(0.0125, 1.0, y0, A, k)
exact = ExactIntegrator(y0, A, k)
print('Temporal BDF2 solutions, dt = 0.20:')
for y in Y1:
print('{' + ', '.join(['%16.14f' % yi for yi in y]) + '}')
print()
print('BDF2 solution at t = 1.0 (dt = 0.05)')
print('{' + ', '.join(['%16.14f' % yi for yi in Y2[-1]]) + '}\n')
print('BDF2 solution at t = 1.0 (dt = 0.0125):')
print('{' + ', '.join(['%16.14f' % yi for yi in Y3[-1]]) + '}\n')
print('Exact solution at t = 1.0:')
print('{' + ', '.join(['%16.14f' % yi for yi in exact(1.0)]) + '}\n')
print('Apparent order of convergence: dt = 0.05 vs dt 0.0125:')
print(np.log((np.linalg.norm(Y2[-1] - exact(1.0))
/ np.linalg.norm(Y3[-1] - exact(1.0)))) / np.log(0.05/0.0125))
if __name__ == '__main__':
main()
|
Who8MyLunch/euler | problem_001.py | Python | mit | 632 | 0.003165 |
from __future__ import division, print_function #, unicode_literals
"""
Multiples of 3 and 5
If we list all the natural numbers below 10 that are multiples
of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
"""
import numpy as np
# Setup.
num_max = 1000
basis = [3, 5]
factors = []
for i in range(num_max):
for k in basis | :
if not i % k:
factors.append(i)
break
print('\nRange: {:d}'.format(num_max))
print('Number of factors: {:d}'.format(len(factors)))
print('The answer: {:d}'.format(np.sum(factors))) |
# Done.
|
Mifiel/python-api-client | mifiel/certificate.py | Python | mit | 120 | 0.016667 | from mifiel import Base |
class Certificate(Base):
def __init__(self, client):
Base.__init__(self, | client, 'keys')
|
dsarkozi/care4care-sdp-grp4 | Care4Care/C4CApplication/tests/OIDTest.py | Python | agpl-3.0 | 5,177 | 0.006761 | from base64 import urlsafe_b64decode
from io import StringIO
from django.http.response import HttpResponse
from django.shortcuts import render
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
import openid2rp
from Care4Care.settings import STATICFILES_DIRS
@never_cache
def oidtest(request):
ax = (("http://axschema.org/eid/card-validity/end",
"http://axschema.org/person/gender",
"http://axschema.org/contact/postalAddress/home",
"http://axschema.org/namePerson/first",
"http://axschema.org/eid/photo",
"http://axschema.org/eid/card-validity/begin",
"http://axschema.org/contact/city/home",
"http://axschema.org/contact/postalCode/home",
"http://axschema.org/birthDate",
"http://openid.net/schema/birthDate/birthYear",
"http://openid.net/schema/birthDate/birthMonth",
"http://openid.net/schema/birthDate/birthday",
"http://axschema.org/eid/pob",
"http://axschema.org/eid/card-number",
"http://axschema.org/eid/nationality",
"http://axschema.org/namePerson/last",
"http://axschema.org/namePerson",
"http://axschema.org/eid/rrn",
# "http://axschema.org/eid/cert/auth",
"http://axschema.org/eid/age"), ())
uri = "https://www.e-contract.be/eid-idp/endpoints/openid/ident"
kind, claimedId = openid2rp.normalize_uri(uri)
res = openid2rp.discover(claimedId)
if res is not None:
services, op_endpoint, op_local = res
session = openid2rp.associate(services, op_endpoint)
redirect_url = openid2rp.request_authentication(
services,
op_endpoint,
session['assoc_handle'],
"http://127.0.0.1:8000/tests/openid2",
claimedId, op_local,
sreg=((), ()),
ax=ax
)
response = HttpResponse()
response['Location'] = redirect_url
response.status_code=303
return response
# return render_to_response('OIDTest.html',
# {
# 'services' : services,
# 'op_endpoint' : op_endpoint,
# 'op_local' : op_local,
# 'kind' : kind,
# 'claimedID' : claimedId,
# 'redirect_url' : redirect_url
# }
# )
@csrf_exempt
@never_cache
def oidtest2(request):
# signed, claimedID = openid2rp.verify(request.POST, None, None, True)
# printy(get_ax(request.POST))
ax = get_ax(request.POST)
args = ax.copy()
args.update({'ax' : ax})
strIO = StringIO()
print(ax, file=strIO)
args.update({'printy' : strIO.getvalue()})
return render(request, "OIDTest.html", args)
def get_ax | (response):
ax = 'ax' + "."
oax = 'openid.' + ax
res = {}
for k, v in response.items():
if k.startswith(oax+"type."):
k = k.rsplit('.',1)[1]
value_name = oax+"value."+k
if ax+"value."+k not in response['openid.signed']:
continue
res[v] = response[value_name]
return _get_readable_ax(res)
def _get_readable_ax(ax):
res = {}
AX = { | "http://axschema.org/eid/card-validity/end" : 'card_validity_end',
"http://axschema.org/person/gender" : 'gender',
"http://axschema.org/contact/postalAddress/home" : 'address',
"http://axschema.org/namePerson/first" : 'firstname',
"http://axschema.org/eid/photo" : 'photo',
"http://axschema.org/eid/card-validity/begin" : 'card_validity_start',
"http://axschema.org/contact/city/home" : 'city',
"http://axschema.org/contact/postalCode/home" : 'postal_code',
"http://axschema.org/birthDate" : 'birth_date',
"http://openid.net/schema/birthDate/birthYear" : 'birth_year',
"http://openid.net/schema/birthDate/birthMonth" : 'birth_month',
"http://openid.net/schema/birthDate/birthday" : 'birth_day',
"http://axschema.org/eid/pob" : 'birth_place',
"http://axschema.org/eid/card-number" : 'card_number',
"http://axschema.org/eid/nationality" : 'nationality',
"http://axschema.org/namePerson/last" : 'lastname',
"http://axschema.org/namePerson" : 'fullname',
"http://axschema.org/eid/rrn" : 'register_number',
"http://axschema.org/eid/age" : 'age'
}
for key, value in ax.items():
if key.endswith('photo'):
filename = STATICFILES_DIRS[0] + '/images/photo.jpg'
#value = value.replace('-', '+').replace('_', '/')
if len(value) % 3 != 0: value += '=';
if len(value) % 3 != 0: value += '=';
with open(filename, 'wb') as fd:
fd.write(urlsafe_b64decode(value))
fd.close()
value = filename
res[AX[key]] = value
return res |
arnomoonens/Mussy-Robot | myservo.py | Python | mit | 2,876 | 0.019471 | import pigpio
import time
from multiprocessing import Process, Queue, Lock
#----------------------- SERVO 1 ----------------------------
speed = .1
minP = 30
maxP = 180
currentP = 90
newP = 90
q = Queue() #queue for the new position
q.put(newP)
q2 = Queue() #queue for the current position
q2.put(currentP)
l = Lock()
#800 minimo fino a 1900
def g2q(g):
return 2000./180.*(g)+500
def turnL(dist):
global newP
l.acquire()
if not q.empty():
newP = q.get()
newP = newP+dist
if newP > maxP:
newP = maxP
q.put(newP)
l.release()
def turnR(dist):
global newP
l.acquire()
if not q.empty():
newP = q.get()
newP = newP-dist
if newP < minP:
newP = minP
q.put(newP)
l.release()
def P0(proc):
try:
servoPin=23
pi = pigpio.pi()
_newP = 90
while not proc.empty():
l.acquire()
if not q.empty():
_newP = q.get()
_currentP = q2.get()
while _currentP < _newP:
#move the motor
_currentP += 1
pi.set_servo_pulsewidth(servoPin,g2q(_currentP))
while _currentP > _newP:
#move the motor
_currentP -= 1
pi.set_servo_pulsewidth(servoPin,g2q(_currentP))
q2.put(_currentP)
l.release()
pi.stop()
print 'Process P0 terminated.'
except:
print 'Error!!Process P0 terminated:'
pi.stop()
# ----------------------------- SERVO 2 ------------------
speed = .1
minP_2 = 80
maxP_2 = 100
currentP_2 = 90
newP_2 = 90
q_2 = Queue() #queue for the new position
q_2.put(newP_2)
q2_2 = Queue() #queue for the current position
q2_2.put(currentP_2)
l_2 = Lock()
def turnDw(dist):
global newP_2
l_2.acquire()
if not q_2.empty():
newP_2 = q_2.get()
newP_2 = newP_2+dist
if newP_2 > maxP_2 :
newP_2 = maxP_2
q_2 .put(newP_2 )
l_2 .release()
def turnUp(dist):
global newP_2
l_2 .acquire()
if not q_2 .empty():
newP_2 = q_2 .get()
newP_2 = newP_2 -dist
if newP_2 < minP_2 :
newP_2 = minP_2
q_2 .put(newP_2 )
l_2 .release()
def P1(proc):
try:
servoPin_2=17
pi_2 = pigpio.pi()
_newP_2 = 90
while not proc.empty():
l_2.acquire()
if not q_2.empty():
_newP_2 = q_2.get()
_currentP_2 = q2_2.get()
while _currentP_2 < _newP_2:
#move the motor
_currentP_2 += 1
pi_2.set_servo_pulsewidth(servoPin_2,g2q(_currentP_2))
while _currentP_2 > _newP_2:
#move the motor
_currentP_2 -= 1
pi_2.set_servo_pulsewidth(serv | oPin_2,g2q(_currentP_2))
q2_2.put(_currentP_2)
l_2.release()
pi_2.stop()
print 'Process P1 terminated.'
except:
print 'Error!!Process P1 terminated'
pi_2.sto | p()
|
onehao/opensource | pyml/crawler/minispider/SpiderFileUtils.py | Python | apache-2.0 | 610 | 0.004967 | # -*- coding:utf-8 -*-
'''
Created on 20 | 15年3月12日
@author: wanhao01
'''
import os
class SpiderFileUtils(object):
'''
deal with file related operations.
'''
def __save_page(self, data, url, outputdir):
'''
save the page content with the specific url to the local path.
'''
if(not os.path.exists(outputdir)):
os.makedirs(outputdir)
filename = self.__validate_name(url)
f = open(outputdir + os.sep + filename, | 'w')
f.writelines(data)
f.close()
if __name__ == '__main__':
pass |
Titulacion-Sistemas/PracticasDjango | practicaConCarratala/manage.py | Python | gpl-2.0 | 263 | 0.003802 | #!/usr/bin/env python
import os
import sys |
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "practicaConCarratala.s | ettings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
metapolator/mutatormathtools | python_modules/lib/python/defcon/objects/glyph.py | Python | apache-2.0 | 60,725 | 0.001894 | import weakref
from warnings import warn
from fontTools.misc import arrayTools
from defcon.objects.base import BaseObject
from defcon.objects.contour import Contour
from defcon.objects.point import Point
from defcon.objects.component import Component
from defcon.objects.anchor import Anchor
from defcon.objects.lib import Lib
from defcon.objects.guideline import Guideline
from defcon.objects.image import Image
from defcon.objects.color import Color
from defcon.tools.representations import glyphBoundsRepresentationFactory, glyphControlPointBoundsRepresentationFactory
from defcon.pens.decomposeComponentPointPen import DecomposeComponentPointPen
def addRepresentationFactory(name, factory):
warn("addRepresentationFactory is deprecated. Use the functions in defcon.__init__.", DeprecationWarning)
Glyph.representationFactories[name] = dict(factory=factory, destructiveNotifications=["Glyph.Changed"])
def removeRepresentationFactory(name):
warn("removeRepresentationFactory is deprecated. Use the functions in defcon.__init__.", DeprecationWarning)
del Glyph.representationFactories[name]
class Glyph(BaseObject):
"""
This object represents a glyph and it contains contour, component, anchor
and other assorted bits data about the glyph.
**This object posts the following notifications:**
============================
Name
============================
Glyph.Changed
Glyph.NameWillChange
Glyph.NameChanged
Glyph.UnicodesChanged
Glyph.WidthChanged
Glyph.HeightChanged
Glyph.NoteChanged
Glyph.LibChanged
Glyph.ImageChanged
Glyph.ImageWillBeDeleted
Glyph.ContourWillBeDeleted
Glyph.ContoursChanged
Glyph.ComponentWillBeDeleted
Glyph.ComponentsChanged
Glyph.AnchorWillBeDeleted
Glyph.AnchorsChanged
Glyph.GuidelineWillBeDeleted
Glyph.GuidelinesChanged
Glyph.MarkColorChanged
============================
The Glyph object has list like behavior. This behavior allows you to interact
with contour data directly. For example, to get a particular contour::
contour = glyph[0]
To iterate over all contours::
for contour in glyph:
To get the number of contours::
contourCount = len(glyph)
To interact with components or anchors in a similar way,
use the ``components`` and ``anchors`` attributes.
"""
changeNotificationName = "Glyph.Changed"
representationFactories = {
"defcon.glyph.bounds" : dict(
factory=glyphBoundsRepresentationFactory,
destructiveNotifications=("Glyph.ContoursChanged", "Glyph.ComponentsChanged", "Glyph.ComponentBaseGlyphDataChanged")
),
"defcon.glyph.controlPointBounds" : dict(
factory=glyphControlPointBoundsRepresentationFactory,
destructiveNotifications=("Glyph.ContoursChanged", "Glyph.ComponentsChanged", "Glyph.ComponentBaseGlyphDataChanged")
)
}
def __init__(self, layer=None,
contourClass=None, pointClass=None, componentClass=None, anchorClass=None,
guidelineClass=None, libClass=None, imageClass=None):
layerSet = font = None
if layer is not None:
font = weakref.ref(layer.layerSet.font)
layerSet = weakref.ref(layer.layerSet)
layer = weakref.ref(layer)
self._font = font
self._layerSet = layerSet
self._layer = layer
super(Glyph, self).__init__()
self.beginSelfNotificationObservation()
self._isLoading = False
self._dirty = False
self._name = None
self._unicodes = []
self._width = 0
self._height = 0
self._note = None
self._image = None
self._identifiers = set()
se | lf._shallowLoadedContours = None
self._contours = []
self._components = []
self._anchors = []
self._guidelines = []
self._lib = None
if contourClass is None:
contourClass = Contour
if pointClass is None:
pointClass = Point
if componentC | lass is None:
componentClass = Component
if anchorClass is None:
anchorClass = Anchor
if guidelineClass is None:
guidelineClass = Guideline
if libClass is None:
libClass = Lib
if imageClass is None:
imageClass = Image
self._contourClass = contourClass
self._pointClass = pointClass
self._componentClass = componentClass
self._anchorClass = anchorClass
self._guidelineClass = Guideline
self._libClass = libClass
self._imageClass = imageClass
def __del__(self):
super(Glyph, self).__del__()
self._contours = None
self._components = None
self._anchors = None
self._guidelines = None
self._lib = None
self._image = None
# --------------
# Parent Objects
# --------------
def getParent(self):
return self.font
def _get_font(self):
if self._font is None:
return None
return self._font()
font = property(_get_font, doc="The :class:`Font` that this glyph belongs to.")
def _get_layerSet(self):
if self._layerSet is None:
return None
return self._layerSet()
layerSet = property(_get_layerSet, doc="The :class:`LayerSet` that this glyph belongs to.")
def _get_layer(self):
if self._layer is None:
return None
return self._layer()
layer = property(_get_layer, doc="The :class:`Layer` that this glyph belongs to.")
# ----------------
# Basic Attributes
# ----------------
# identifiers
def _get_identifiers(self):
return self._identifiers
identifiers = property(_get_identifiers, doc="Set of identifiers for the glyph. This is primarily for internal use.")
# name
def _set_name(self, value):
oldName = self._name
if oldName != value:
self.postNotification(notification="Glyph.NameWillChange", data=dict(oldValue=oldName, newValue=value))
self._name = value
self.postNotification(notification="Glyph.NameChanged", data=dict(oldValue=oldName, newValue=value))
self.dirty = True
def _get_name(self):
return self._name
name = property(_get_name, _set_name, doc="The name of the glyph. Setting this posts *GLyph.NameChanged* and *Glyph.NameChanged* notifications.")
# unicodes
def _get_unicodes(self):
return list(self._unicodes)
def _set_unicodes(self, value):
oldValue = self.unicodes
if oldValue != value:
self._unicodes = value
self.postNotification(notification="Glyph.UnicodesChanged", data=dict(oldValue=oldValue, newValue=value))
self.dirty = True
unicodes = property(_get_unicodes, _set_unicodes, doc="The list of unicode values assigned to the glyph. Setting this posts *Glyph.UnicodesChanged* and *Glyph.Changed* notifications.")
def _get_unicode(self):
if self._unicodes:
return self._unicodes[0]
return None
def _set_unicode(self, value):
if value is None:
self.unicodes = []
else:
existing = list(self._unicodes)
if value in existing:
existing.pop(existing.index(value))
existing.insert(0, value)
self.unicodes = existing
unicode = property(_get_unicode, _set_unicode, doc="The primary unicode value for the glyph. This is the equivalent of ``glyph.unicodes[0]``. This is a convenience attribute that works with the ``unicodes`` attribute.")
# -------
# Metrics
# -------
# bounds
def _get_bounds(self):
return self.getRepresentation("defcon.glyph.bounds")
bounds = property(_get_bounds, doc="The bounds of the glyph's outline expressed as a tuple of form (xMin, yMin, xMax, yMax).")
def _get_controlPointBounds(self):
return self.getRepresentation("defcon.glyph.controlPointBounds")
controlPointBounds = property(_get_controlPointBounds, |
biomodels/BIOMD0000000534 | BIOMD0000000534/model.py | Python | cc0-1.0 | 427 | 0.009368 | import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000534.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def m | odule_exists(module_name):
try:
__import__(module_name)
except ImportError: |
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) |
22i/minecraft-voxel-blender-models | models/extra/blender-scripting/lib/agent.py | Python | gpl-3.0 | 1,320 | 0.018182 | import bpy
import os
# join them together ctrl+j
bpy.ops.object.join()
def get_override(area_type, region_type):
for area in bpy.context.screen.areas:
if area.type == area_type:
for region in area.regions:
if region.type == region_type:
override = {'area': area, 'region': region}
return override
#error message if the area or region wasn't found
raise RuntimeError("Wasn't able to find", region_type," in area ", area_type,
"\n Make sure it's open while executing script.")
#we need to override the context of our operator
override = get_override( 'VIEW_3D', 'WINDOW' )
#rotate about the X-axis by 45 degrees
bpy.ops.transform.rotate(override, axis=(0,0,1))
bpy.ops.transform.rotate(override, axis=(0,0 | ,1))
blend_file_path = bpy.data.filepath
directory = os.path.dirname(blend_file_path)
#target_f | ile = os.path.join(directory, 'agent.obj')
#target_file = os.path.join(directory, 'exported/agent.b3d')
target_file = os.path.join(directory, 'agent.b3d')
#bpy.ops.export_scene.obj(filepath=target_file)
bpy.ops.screen.b3d_export(filepath=target_file)
#bpy.ops.export_scene.obj()
#bpy.ops.screen.b3d_export()
# exits blender
bpy.ops.wm.quit_blender()
|
partp/gtg-services | GTG/plugins/notification_area/notification_area.py | Python | gpl-3.0 | 18,501 | 0.00027 | # -*- coding: utf-8 -*-
# Copyright (c) 2009 - Paulo Cabido <paulo.cabido@gmail.com>
# - Luca Invernizzi <invernizzi.l@gmail.com>
# - Izidor Matušov <izidor.matusov@gmail.com>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
import os
from gi.repository import Gtk
try:
from gi.repository import AppIndicator3 as appindicator
except:
pass
from GTG import _
from GTG import DATA_DIR
from GTG.tools.borg import Borg
from GTG.tools.dates import Date
class TheIndicator(Borg):
"""
Application indicator can be instantiated only once. The
plugin api, when toggling the activation state of a plugin,
instantiates different objects from the plugin class. Therefore,
we need to keep a reference to the indicator object. This class
does that.
"""
def __init__(self):
super(TheIndicator, self).__init__()
if not hasattr(self, "_indicator"):
try:
self._indicator = appindicator.Indicator(
"gtg",
"indicator-messages",
appindicator.CATEGORY_APPLICATION_STATUS)
except:
self._indicator = None
def get_indicator(self):
return self._indicator
class IconIndicator:
"""
A common interface to an app indicato | r and a status icon
"""
NORMAL_ICON = "gtg"
ATTENTION_ICON = "gtg_need_attention"
def __init__(self):
self._indicator = TheIndicator().get_indicator()
self._icon = None
self._menu = None
| self._attention = False
def activate(self, leftbtn_callback, menu):
""" Setup the icon / the indicator """
self._menu = menu
if self._indicator:
# Show the icon even when runing ./scripts/debug.sh
theme_path = os.path.join(DATA_DIR, 'icons')
self._indicator.set_icon_theme_path(theme_path)
self._indicator.set_icon("gtg-panel")
self._indicator.set_attention_icon(self.ATTENTION_ICON)
self._indicator.set_menu(menu)
self._indicator.set_status(appindicator.STATUS_ACTIVE)
else:
self._icon = Gtk.StatusIcon()
self._icon.set_from_icon_name(self.NORMAL_ICON)
self._icon.set_tooltip_text("Getting Things GNOME!")
self._icon.set_visible(True)
self._icon.connect('activate', leftbtn_callback)
self._icon.connect('popup-menu', self._on_icon_popup)
def deactivate(self):
""" Hide the icon """
if self._indicator:
self._indicator.set_status(appindicator.STATUS_PASSIVE)
else:
self._icon.set_visible(False)
def update_menu(self):
""" Force indicator to update menu """
if self._indicator:
self._indicator.set_menu(self._menu)
def set_attention(self, attention):
""" Show a special icon when the indicator needs attention """
# Change icon only when the attention change
if self._attention == attention:
return
if self._indicator:
if attention:
status = appindicator.STATUS_ATTENTION
else:
status = appindicator.STATUS_ACTIVE
self._indicator.set_status(status)
else:
if attention:
icon = self.ATTENTION_ICON
else:
icon = self.NORMAL_ICON
self._icon.set_from_icon_name(icon)
self._attention = attention
def _on_icon_popup(self, icon, button, timestamp):
""" Show the menu on right click on the icon """
if not self._indicator:
self._menu.popup(None, None, Gtk.StatusIcon.position_menu,
icon, button, timestamp)
def _due_within(task, danger_zone):
"""
Determine if a task is the danger zone.
Convention: a danger zone of 1 day includes tasks due today.
"""
ddate = task.get_due_date()
if (ddate != Date.no_date()):
if ddate.days_left() < danger_zone:
return True
return False
class _Attention:
"""
Define need attention state depending on whether there
are tasks in danger zone.
There are two levels of attention:
"normal": there are no tasks in danger zone
"high": there is at least one task in danger zone
A task is in danger zone if the number of days left is less
than time span (in days) defined by danger_zone.
"""
def __init__(self, danger_zone, indicator, tree, req):
self.__tree = tree
self.__req = req
self._indicator = indicator
self.danger_zone = danger_zone
# Setup list of tasks in danger zone
""" Setup a list of tasks in danger zone, use task id """
self.tasks_danger = []
for tid in self.__tree.get_all_nodes():
task = self.__req.get_task(tid)
if _due_within(task, self.danger_zone):
self.tasks_danger.append(tid)
# Set initial status
self._update_indicator()
def _update_indicator(self):
""" Set the proper icon for the indicator """
self._indicator.set_attention(len(self.tasks_danger) > 0)
def update_on_task_modified(self, tid):
task = self.__req.get_task(tid)
if tid in self.tasks_danger:
if not _due_within(task, self.danger_zone):
self.tasks_danger.remove(tid)
else:
if _due_within(task, self.danger_zone):
self.tasks_danger.append(tid)
self._update_indicator()
def update_on_task_deleted(self, tid):
if tid in self.tasks_danger:
self.tasks_danger.remove(tid)
self._update_indicator()
class NotificationArea:
"""
Plugin that display a notification area widget or an indicator
to quickly access tasks.
"""
DEFAULT_PREFERENCES = {"start_minimized": False,
"danger_zone": 1}
PLUGIN_NAME = "notification_area"
MAX_TITLE_LEN = 30
MAX_ITEMS = 10
def __init__(self):
self._indicator = IconIndicator()
self.__browser_handler = None
self.__liblarch_callbacks = []
def activate(self, plugin_api):
""" Set up the plugin, set callbacks, etc """
self.__plugin_api = plugin_api
self.__view_manager = plugin_api.get_view_manager()
self.__requester = plugin_api.get_requester()
# Tasks_in_menu will hold the menu_items in the menu, to quickly access
# them given the task id. Contains tuple of this format:
# (title, key, Gtk.MenuItem)
self.__init_gtk()
# We load preferences before connecting to tree
self.preference_dialog_init()
self.preferences_load()
# Enable attention monitor.
self.__attention = None
self.__tree_att = self.__connect_to_tree([
("node-added-inview", self.__on_task_added_att),
("node-modified-inview", self.__on_task_added_att),
("node-deleted-inview", self.__on_task_deleted_att),
])
self.__tree_att.apply_filter('workview')
self.__init_attention()
self.__tree = self.__connect_to_tree([
("node-added-inview", self.__on_task_added),
("node-modified-inview", self.__on_task_added),
("node-deleted-inview", self.__on_task_deleted),
])
self.__tree.apply_filter('workview')
# When no windows (browser or text editors) are shown, it tries to quit
# With |
Bengt/AL-FanControl | python/fancontrol/ui/cli_util.py | Python | mit | 2,042 | 0 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
RJUST = 12
def format_fans(fans):
return format_line(prefix='fans'.rjust( | RJUST), values=fans)
def format_rpms(rpms):
return format_line(prefix='rpms'.rjust(RJUST), values=rpms)
def format_pwms(pwms):
return format_line(prefix='pwms'.rjust(RJUST), values=pwms)
def format_tmps(tmps) | :
return format_line(prefix='temps'.rjust(RJUST), values=tmps)
def format_names(names):
return format_line(prefix='names'.rjust(RJUST), values=names)
def format_ports(ports):
return format_line(prefix='ports'.rjust(RJUST), values=ports)
def format_temps(temps):
return format_line(prefix='temps'.rjust(RJUST), values=temps)
def format_ambients(ambients):
return format_line(prefix='ambients'.rjust(RJUST), values=ambients)
def format_limits(limits):
return format_line(prefix='limits'.rjust(RJUST), values=limits)
def format_buffers(buffers):
return format_line(prefix='buffers'.rjust(RJUST), values=buffers)
def format_headrooms(headrooms):
return format_line(prefix='headrooms'.rjust(RJUST), values=headrooms)
def format_directions(directions):
return format_line(prefix='directions'.rjust(RJUST), values=directions)
def format_differences(differences):
return format_line(prefix='differences'.rjust(RJUST), values=differences)
def format_pwms_new(pwms_new):
return format_line(prefix='new pwms'.rjust(RJUST), values=pwms_new)
def format_line(prefix, values):
line = ''
line += prefix
line += ': '
line += '['
for value in values:
try:
if value >= 1:
value = int(round(value, 0))
if 1 > value != 0:
value = str(value)[1:4].ljust(3, '0')
except TypeError:
# value is None
pass
value = str(value) if value is not None else ''
line += value.rjust(6)
line += ', '
line = line[:-len(', ')]
line += ']'
return line
|
AnotherBobSmith/CLUZ | forms/cluz_form_remove.py | Python | gpl-2.0 | 3,682 | 0.003259 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'cluz_form_remove.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_removeDialog(object):
def setupUi(self, removeDialog):
removeDialog.setObjectName(_fromUtf8("removeDialog"))
removeDialog.setWindowModality(QtCore.Qt.ApplicationModal)
removeDialog.resize(800, 400)
removeDialog.setMinimumSize(QtCore.QSize(800, 400))
self.featLabel = QtGui.QLabel(removeDialog)
self.featLabel.setGeometry(QtCore.QRect(120, 20, 661, 16))
self.featLabel.setMinimumSize(QtCore.QSize(461, 16))
self.featLabel.setObjectName(_fromUtf8("featLabel"))
self.featListWidget = QtGui.QListWidget(removeDialog)
self.featListWidget.setGeometry(QtCore.QRect(120, 40, 661, 290))
self.featListWidget.setSelectionMode(QtGui.QAbstractItemView.MultiSelection)
self.featListWidget.setObjectName(_fromUtf8("featListWidget"))
self.logoLabel = QtGui.QLabel(removeDialog)
self.logoLabel.setGeometry(QtCore.QRect(-20, 20, 131, 351))
self.logoLabel.setText(_fromUtf8(""))
self.logoLabel.setPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/Cluz/icons/icons/setup_logo.png")))
self.logoLabel.setObjectName(_fromUtf8("logoLabel"))
self.horizontalLayoutWidget = QtGui.QWidget(removeDialog)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(350, 340, 211, 51))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.okButton = | QtGui.QPushButton(self.h | orizontalLayoutWidget)
self.okButton.setMinimumSize(QtCore.QSize(0, 24))
self.okButton.setObjectName(_fromUtf8("okButton"))
self.horizontalLayout.addWidget(self.okButton)
self.cancelButton = QtGui.QPushButton(self.horizontalLayoutWidget)
self.cancelButton.setMinimumSize(QtCore.QSize(0, 24))
self.cancelButton.setObjectName(_fromUtf8("cancelButton"))
self.horizontalLayout.addWidget(self.cancelButton)
self.retranslateUi(removeDialog)
QtCore.QObject.connect(self.cancelButton, QtCore.SIGNAL(_fromUtf8("clicked()")), removeDialog.close)
QtCore.QMetaObject.connectSlotsByName(removeDialog)
def retranslateUi(self, removeDialog):
removeDialog.setWindowTitle(_translate("removeDialog", "Choose features to remove", None))
self.featLabel.setText(_translate("removeDialog", "Select conservation features to remove from the abundance and target tables", None))
self.okButton.setText(_translate("removeDialog", "OK", None))
self.cancelButton.setText(_translate("removeDialog", "Cancel", None))
import resources_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
removeDialog = QtGui.QDialog()
ui = Ui_removeDialog()
ui.setupUi(removeDialog)
removeDialog.show()
sys.exit(app.exec_())
|
gushaokun/AppPack | iospack/views.py | Python | mit | 1,042 | 0.002037 | # -*- coding: utf-8 -*-
from django.contrib.auth.models import User, Group
from iospack.models import App
from rest_framework import viewsets
from rest_framework import generics
from iospack.serializers import UserSerializer, GroupSerializer, AppSerializer
# Create your views here.
class UserViewSet(viewsets.ModelViewSet) | :
"""
允许查看和编辑user 的 API endpoint
"""
queryset = User.objects.all()
serializer_class = UserSerializer
class GroupViewSet(viewsets.ModelViewSet):
"""
允许查看和编辑group的 API endpoint
"""
| queryset = Group.objects.all()
serializer_class = GroupSerializer
class AppsViewSet(viewsets.ModelViewSet):
"""
允许查看和编辑app的 API endpoint
"""
queryset = App.objects.all()
serializer_class = AppSerializer
lookup_field = 'bundle_id'
class app(generics.RetrieveUpdateDestroyAPIView):
"""
根据app_id获取app信息
"""
queryset = App.objects.all()
serializer_class = AppSerializer
lookup_field = 'id' |
puttarajubr/commcare-hq | corehq/ex-submodules/couchforms/tests/test_errors.py | Python | bsd-3-clause | 3,166 | 0.000632 | from django.test import TestCase
from corehq.apps.receiverwrapper import submit_form_locally
from couchforms.models import XFormError
class CaseProcessingErrorsTest(TestCase):
def test_no_case_id(self):
"""
submit form with a case block that has no case_id
check that
- it errors
- the form is not saved under its original id
- an XFormError is saved with the original id as orig_id
- the error was logged (<-- is this hard to test?)
<data xmlns="example.com/foo">
<case case_id="">
<update><foo>bar</foo></update>
</case>
</data>
"""
submit_form_locally(
"""<data xmlns="example.com/foo">
<meta>
<instanceID>abc-easy-as-123</instanceID>
</meta>
<case case_id="" xmlns="http://commcarehq.org/case/transaction/v2">
<update><foo>bar</foo></update>
</case>
</data>""",
'my_very_special_domain',
)
xform_errors = XFormError.view(
'domain/docs',
startkey=['my_very_special_domain', 'XFormError'],
endkey=['my_very_special_domain', 'XFormError', {}],
reduce=False,
include_docs=True,
).all()
related_errors = [xform_error for xform_error in xform_errors
if xform_error.get_id == 'abc-easy-as-123']
self.assertEqual(len(related_errors), 1)
related_error = related_errors[0]
self.assertEqual(related_error.problem,
'IllegalCaseId: case_id must not be empty')
def test_uses_referrals(self):
"""
submit form with a case block that uses referrals
check that
- it errors
- the form is not saved under its original id
- an XFormError is saved with the original id as orig_id
"""
submit_form_locally(
| """<data xmlns="example.com/foo">
<meta>
<instanceID>abc-easy-as-456</instanceID>
</meta>
<case case_id="123" xmlns="http://commcarehq.org/case/transaction/v2">
<referral>
| <referral_id>456</referral_id>
<open>
<referral_types>t1 t2</referral_types>
</open>
</referral>
</case>
</data>""",
'my_very_special_domain',
)
xform_errors = XFormError.view(
'domain/docs',
startkey=['my_very_special_domain', 'XFormError'],
endkey=['my_very_special_domain', 'XFormError', {}],
reduce=False,
include_docs=True,
).all()
related_errors = [xform_error for xform_error in xform_errors
if xform_error.get_id == 'abc-easy-as-456']
self.assertEqual(len(related_errors), 1)
related_error = related_errors[0]
self.assertEqual(related_error.problem,
'UsesReferrals: Sorry, referrals are no longer supported!')
|
CoderBotOrg/coderbotsrv | server/lib/cryptography/hazmat/primitives/hashes.py | Python | gpl-3.0 | 3,022 | 0 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HashBackend
from cryptography.hazmat.primitives import interfaces
@utils.register_interface(interfaces.HashContext)
class Hash(object):
def __init__(self, algorithm, backend, ctx=None):
if not isinstance(backend, HashBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HashBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if not isinstance(algorithm, interfaces.HashAlgorithm):
raise TypeError("Expected instance of interfaces.HashAlgorithm.")
self._algorithm = algorithm
self._backend = backend
if ctx is None:
self._ctx = self._backend.create_hash_ctx(self.algorithm)
else:
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def update(self, data):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
if not isinstance(data, bytes):
raise TypeError("data must be bytes.")
self._ctx.update(data)
def copy(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
return Hash(
self.algorithm, backend=s | elf._backend, ctx=self._ctx.copy()
)
def finalize(self):
if self._ctx is None:
raise AlreadyFinalized("Context was already finalized.")
digest = self._ctx.finalize()
self._ctx = None
return digest
@utils.register_interface(interfaces.HashAlgorithm)
class SHA1(object):
name = "sha1"
digest_size = 20
block_size = 64
@utils.register_interface(interfaces.HashAlgorithm)
class SHA224(object):
name = "sha224"
digest_size = 28
| block_size = 64
@utils.register_interface(interfaces.HashAlgorithm)
class SHA256(object):
name = "sha256"
digest_size = 32
block_size = 64
@utils.register_interface(interfaces.HashAlgorithm)
class SHA384(object):
name = "sha384"
digest_size = 48
block_size = 128
@utils.register_interface(interfaces.HashAlgorithm)
class SHA512(object):
name = "sha512"
digest_size = 64
block_size = 128
@utils.register_interface(interfaces.HashAlgorithm)
class RIPEMD160(object):
name = "ripemd160"
digest_size = 20
block_size = 64
@utils.register_interface(interfaces.HashAlgorithm)
class Whirlpool(object):
name = "whirlpool"
digest_size = 64
block_size = 64
@utils.register_interface(interfaces.HashAlgorithm)
class MD5(object):
name = "md5"
digest_size = 16
block_size = 64
|
everaldo/example-code | 17-futures/crypto/arcfour_futures.py | Python | mit | 1,219 | 0.00082 | import sys
import time
from concurrent import futures
from random import randrange
from arcfour import arcfour
JOBS = 12
SIZE = 2**18
KEY = b"'Twas brillig, and the slithy toves\nDid gyre"
STATUS = '{} workers, elapsed time: {:.2f}s'
def arcfour_test(size, key):
in_text = bytearray(randrange(256) for i in range(size))
cypher_text = arcfour(key, in_text)
out_text = arcfour(key, cypher_text)
assert in_text == out_text, 'Failed arcfour_test'
return size
def main(workers=None):
if workers:
workers = int(workers)
t0 = time.time()
with futures.ProcessPoolExecutor(workers) as executor:
actual_workers = executor._max_workers
to_do = []
for i in range(JOBS, 0, -1):
size = SIZE + int(SIZE / JOBS * (i - JOBS/2))
job = executor.submit(arcfour_test, size, KEY)
to_do.appen | d(job)
for future in futures.as_completed(to_do):
| res = future.result()
print('{:.1f} KB'.format(res/2**10))
print(STATUS.format(actual_workers, time.time() - t0))
if __name__ == '__main__':
if len(sys.argv) == 2:
workers = int(sys.argv[1])
else:
workers = None
main(workers)
|
perimosocordiae/sparray | sparray/tests/test_truediv.py | Python | mit | 662 | 0.006042 | import unittest
import numpy as np
from numpy.testing import assert_array_almost_equal
from .test_base import BaseSparrayTest, dense2d
class | TestTrueDivision(BaseSparrayTest):
def test_truediv(self):
c = 3
assert_array_almost_equal(dense2d / c, (self.sp2d / c).toarray())
with np.errstate(divide='ignore'):
assert_array_almost_equal(c / dense2d, c / self.sp2d)
def test_itruediv(self):
self.sp2d /= 1
assert_array_almost_equal(dense2d, self.sp2d.toarray())
b = np.random.random(dense2d.shape)
self.sp2d /= b
assert_array_almost | _equal(dense2d / b, self.sp2d.toarray())
if __name__ == '__main__':
unittest.main()
|
michigraber/neuralyzer | docker/jupyter_minimal/jupyter_notebook_config.py | Python | mit | 1,000 | 0.004 | # Copyright (c) Jupyter Development Team.
from jupyter_core.paths import jupyter_data_dir
import subprocess
import os
PEM_FILE = os.path.join(jupyter_data_dir(), 'notebook.pem')
c = get_config()
c.NotebookApp.ip = os.getenv('INTERFACE', '') or '*'
c.NotebookApp.port = int(os.getenv('PORT', '') or 8888)
c.NotebookApp.open_browser = False
# Set a certificate if USE_HTTPS is set to any value
if 'USE_HTTPS' in os.environ:
if not os.path.isfile(PEM_FILE):
# Generate a certificate if one doesn't exist on disk
subprocess.check_call(['openssl' | , 'req', '-new',
'-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509',
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
| '-keyout', PEM_FILE, '-out', PEM_FILE])
c.NotebookApp.certfile = PEM_FILE
# Set a password if PASSWORD is set
if 'PASSWORD' in os.environ:
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
purduesigbots/pros-cli | pros/serial/devices/system_device.py | Python | mpl-2.0 | 285 | 0 | import typing
from pros.conductor import Project
class SystemDevice(object):
def upload_project(self, project: Project, **kwargs):
raise NotImplementedError
def | write_program(self, file: typing.BinaryIO, quirk: int = 0, **kwargs):
| raise NotImplementedError
|
saangel/randomcoding | QuestionShuffling.py | Python | gpl-2.0 | 678 | 0.041298 | import numpy as np
# import text file, which has a determined format
a=open("test.dat")
b=open("test2.dat","w")
f=a.read()
g=f.split("\n")
nlines=6
nquestions=16
q1=[g[nlines*i:nlines*(i+1)] for i in range(nquestions)]
# these two lines can be commented if y | ou want to shuffle last question also
last=q1[-1]
q2=q1[:-1]
np.random.shuffle(q2)
for q in q2:
alts=q[4:9]
np.random.shuffle(alts)
q=np.concatenate([q[:4],alts,q[-2:]])
for l in q:
b. | write(str(l)+"\n")
# comment this block also if you want to shuffle last question
alts=last[4:9]
np.random.shuffle(alts)
last=np.concatenate([last[:4],alts,last[-2:]])
for l in last:
b.write(str(l)+"\n")
a.close()
b.close()
|
kaushik94/sympy | sympy/functions/elementary/hyperbolic.py | Python | bsd-3-clause | 48,081 | 0.000478 | from __future__ import print_function, division
from sympy.core import S, sympify, cacheit, pi, I, Rational
from sympy.core.add import Add
from sympy.core.function import Function, ArgumentIndexError, _coeff_isneg
from sympy.functions.combinatorial.factorials import factorial, RisingFactorial
from sympy.functions.elementary.exponential import exp, log, match_real_imag
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.elementary.integers import floor
from sympy import pi, Eq
from sympy.logic import Or, And
from sympy.core.logic import fuzzy_or, fuzzy_and, fuzzy_bool
def _rewrite_hyperbolics_as_exp(expr):
expr = sympify(expr)
return expr.xreplace({h: h.rewrite(exp)
for h in expr.atoms(HyperbolicFunction)})
###############################################################################
########################### HYPERBOLIC FUNCTIONS ##############################
###############################################################################
class HyperbolicFunction(Function):
"""
Base class for hyperbolic functions.
See Also
========
sinh, cosh, tanh, coth
"""
unbranched = True
def _peeloff_ipi(arg):
"""
Split ARG into two parts, a "rest" and a multiple of I*pi/2.
This assumes ARG to be an Add.
The multiple of I*pi returned in the second position is always a Rational.
Examples
========
>>> from sympy.functions.elementary.hyperbolic import _peeloff_ipi as peel
>>> from sympy import pi, I
>>> from sympy.abc import x, y
>>> peel(x + I*pi/2)
(x, I*pi/2)
>>> peel(x + I*2*pi/3 + I*pi*y)
(x + I*pi*y + I*pi/6, I*pi/2)
"""
for a in Add.make_args(arg):
if a == S.Pi*S.ImaginaryUnit:
K = S.One
break
elif a.is_Mul:
K, p = a.as_two_terms()
if p == S.Pi*S.ImaginaryUnit and K.is_Rational:
break
else:
return arg, S.Zero
m1 = (K % S.Half)*S.Pi*S.ImaginaryUnit
m2 = K*S.Pi*S.ImaginaryUnit - m1
return arg - m2, m2
class sinh(HyperbolicFunction):
r"""
The hyperbolic sine function, `\frac{e^x - e^{-x}}{2}`.
* sinh(x) -> Returns the hyperbolic sine of x
See Also
========
cosh, tanh, asinh
"""
def fdiff(self, argindex=1):
"""
Returns the first derivative of this function.
"""
if argindex == 1:
return cosh(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
def inverse(self, argindex=1):
"""
Returns the inverse of this func | tion.
"""
return asinh
@classmethod
def eval(cls, arg):
from sympy import sin
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
| elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.NegativeInfinity
elif arg.is_zero:
return S.Zero
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.NaN
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
return S.ImaginaryUnit * sin(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if arg.is_Add:
x, m = _peeloff_ipi(arg)
if m:
return sinh(m)*cosh(x) + cosh(m)*sinh(x)
if arg.is_zero:
return S.Zero
if arg.func == asinh:
return arg.args[0]
if arg.func == acosh:
x = arg.args[0]
return sqrt(x - 1) * sqrt(x + 1)
if arg.func == atanh:
x = arg.args[0]
return x/sqrt(1 - x**2)
if arg.func == acoth:
x = arg.args[0]
return 1/(sqrt(x - 1) * sqrt(x + 1))
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
"""
Returns the next term in the Taylor series expansion.
"""
if n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) > 2:
p = previous_terms[-2]
return p * x**2 / (n*(n - 1))
else:
return x**(n) / factorial(n)
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def as_real_imag(self, deep=True, **hints):
"""
Returns this function as a complex coordinate.
"""
from sympy import cos, sin
if self.args[0].is_extended_real:
if deep:
hints['complex'] = False
return (self.expand(deep, **hints), S.Zero)
else:
return (self, S.Zero)
if deep:
re, im = self.args[0].expand(deep, **hints).as_real_imag()
else:
re, im = self.args[0].as_real_imag()
return (sinh(re)*cos(im), cosh(re)*sin(im))
def _eval_expand_complex(self, deep=True, **hints):
re_part, im_part = self.as_real_imag(deep=deep, **hints)
return re_part + im_part*S.ImaginaryUnit
def _eval_expand_trig(self, deep=True, **hints):
if deep:
arg = self.args[0].expand(deep, **hints)
else:
arg = self.args[0]
x = None
if arg.is_Add: # TODO, implement more if deep stuff here
x, y = arg.as_two_terms()
else:
coeff, terms = arg.as_coeff_Mul(rational=True)
if coeff is not S.One and coeff.is_Integer and terms is not S.One:
x = terms
y = (coeff - 1)*x
if x is not None:
return (sinh(x)*cosh(y) + sinh(y)*cosh(x)).expand(trig=True)
return sinh(arg)
def _eval_rewrite_as_tractable(self, arg, **kwargs):
return (exp(arg) - exp(-arg)) / 2
def _eval_rewrite_as_exp(self, arg, **kwargs):
return (exp(arg) - exp(-arg)) / 2
def _eval_rewrite_as_cosh(self, arg, **kwargs):
return -S.ImaginaryUnit*cosh(arg + S.Pi*S.ImaginaryUnit/2)
def _eval_rewrite_as_tanh(self, arg, **kwargs):
tanh_half = tanh(S.Half*arg)
return 2*tanh_half/(1 - tanh_half**2)
def _eval_rewrite_as_coth(self, arg, **kwargs):
coth_half = coth(S.Half*arg)
return 2*coth_half/(coth_half**2 - 1)
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return arg
else:
return self.func(arg)
def _eval_is_real(self):
arg = self.args[0]
if arg.is_real:
return True
# if `im` is of the form n*pi
# else, check if it is a number
re, im = arg.as_real_imag()
return (im%pi).is_zero
def _eval_is_extended_real(self):
if self.args[0].is_extended_real:
return True
def _eval_is_positive(self):
if self.args[0].is_extended_real:
return self.args[0].is_positive
def _eval_is_negative(self):
if self.args[0].is_extended_real:
return self.args[0].is_negative
def _eval_is_finite(self):
arg = self.args[0]
return arg.is_finite
def _eval_is_zero(self):
arg = self.args[0]
if arg.is_zero:
return True
class cosh(HyperbolicFunction):
r"""
The hyperbolic cosine function, `\frac{e^x + e^{-x}}{2}`.
* cosh(x) -> Returns the hyperbolic cosine of x
See Also
========
sinh, tanh, acosh
"""
def fdiff(self, argindex=1):
if argindex == 1:
return sinh(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
from sympy import cos
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
r |
jessenieminen/aalto-fitness-homepage | manage.py | Python | mit | 256 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ = | = "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "aalto_fitness.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| |
saghul/sipwping-django | sipwping/forms.py | Python | gpl-3.0 | 333 | 0.012012 |
from django.forms import Form, RegexField
# Naive validation of a SIP URI
_sip_uri_regex = r'^((?P<user>[a-zA-Z | 0-9 _\-\+\.\%]+)@)?(?P<host>([a-zA-Z0-9]([a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6})$'
class SIPU | RIForm(Form):
uri = RegexField(label='', regex=_sip_uri_regex, error_messages={'invalid': 'Invalid SIP URI'})
|
trivoldus28/pulsarch-verilog | tools/local/bas-release/bas,3.9-SunOS-i386/lib/python/lib/python2.4/email/Generator.py | Python | gpl-2.0 | 13,450 | 0.000967 | # Copyright (C) 2001-2004 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""Classes to generate plain text from a message object tree."""
import re
import sys
import time
import random
import warnings
from cStringIO import StringIO
from email.Header import Header
UNDERSCORE = '_'
NL = '\n'
fcre = re.compile(r'^From ', re.MULTILINE)
def _is8bitstring(s):
if isinstance(s, str):
try:
unicode(s, 'us-ascii')
except UnicodeError:
return True
return False
class Generator:
"""Generates output from a Message object tree.
This basic generator writes the message to the given file object as plain
text.
"""
#
# Public interface
#
def __init__(self, outfp, mangle_from_=True, maxheaderlen=78):
"""Create the generator for message flattening.
outfp is the output file-like object for writing the message to. It
must have a write() method.
Optional mangle_from_ is a flag that, when True (the default), escapes
From_ lines in the body of the message by putting a `>' in front of
them.
Optional maxheaderlen specifies the longest length for a non-continued
header. When a header line is longer (in characters, with tabs
expanded to 8 spaces) than maxheaderlen, the header will split as
defined in the Header class. Set maxheaderlen to zero to disable
header wrapping. The default is 78, as recommended (but not required)
by RFC 2822.
"""
self._fp = outfp
self._mangle_from_ = mangle_from_
self._maxheaderlen = maxheaderlen
def write(self, s):
# Just delegate to the file object
self._fp.write(s)
def flatten(self, msg, unixfrom=False):
"""Print the message object tree rooted at msg to the output file
specified when the Generator instance was created.
unixfrom is a flag that forces the printing of a Unix From_ delimiter
before the first object in the message tree. If the original message
has no From_ delimiter, a `standard' one is crafted. By default, this
is False to inhibit the printing of any From_ delimiter.
Note that for subobjects, no From_ line is printed.
"""
if unixfrom:
ufrom = msg.get_unixfrom()
if not ufrom:
ufrom = 'From nobody ' + time.ctime(time.time())
print >> self._fp, ufrom
self._write(msg)
# For backwards compatibility, but this is slower
def __call__(self, msg, unixfrom=False):
warnings.warn('__call__() deprecated; use flatten()',
DeprecationWarning, 2)
self.flatten(msg, unixfrom)
def clone(self, fp):
"""Clone this generator with the exact same options."""
return self.__class__(fp, self._mangle_from_, self._maxheaderlen)
#
# Protected interface - undocumented ;/
#
def _write(self, msg):
# We can't write the headers yet because of the following scenario:
# say a multipart message includes the boundary string somewhere in
# its body. We'd have to calculate the new boundary /before/ we write
# the headers so that we can write the correct Content-Type:
# parameter.
#
# The way we do this, so as to make the _handle_*() methods simpler,
# is to cache any subpart writes into a StringIO. The we write the
# headers and the StringIO contents. That way, subpart handlers can
# Do The Right Thing, and can still modify the Content-Type: header if
# necessary.
oldfp = self._fp
try:
self._fp = sfp = StringIO()
self._dispatch(msg)
finally:
self._fp = oldfp
# Write the headers. First we see if the message object wants to
# handle that itself. If not, we'll do it generically.
meth = getattr(msg, '_write_headers', None)
if meth is None:
self._write_headers(msg)
else:
meth(self)
self._fp.write(sfp.getvalue())
def _dispatch(self, msg):
# Get the Content-Type: for the message, then try to dispatch to
# self._handle_<maintype>_<subtype>(). If there's no handler for the
# full MIME type, then dispatch to self._handle_<maintype>(). If
# that's missing too, then dispatch to self._writeBody().
main = msg.get_content_maintype()
sub = msg.get_content_subtype()
specific = UNDERSCORE.join((main, sub)).replace('-', '_')
meth = getattr(self, '_handle_' + specific, None)
if meth is None:
generic = main.replace('-', '_')
meth = getattr(self, '_handle_' + generic, None)
if meth is None:
meth = self._writeBody
meth(msg)
#
# Default handlers
#
def _write_headers(self, msg):
for h, v in msg.items():
print >> self._fp, '%s:' % h,
if self._maxheaderlen == 0:
# Explicit no-wrapping
print >> self._fp, v
elif isinstance(v, Header):
# Header instances know what to do
print >> self._fp, v.encode()
elif _is8bitstring(v):
# If we have raw 8bit data in a byte string, we have no idea
# what the encoding is. There is no safe way to split this
# string. If it's ascii-subset, then we could do a normal
# ascii split, but if it's multibyte then we could break the
# string. There's no way to know so the least harm seems to
# be to not split the string and risk it being too long.
print >> self._fp, v
else:
# Header's got lots of smarts, so use it.
print >> self._fp, Header(
v, maxlinelen=self._maxheaderlen,
header_nam | e=h, continuation_ws='\t').encode()
# A blank line always separates headers from body
print >> self._fp
#
# Handlers for writing types and subtypes
#
def _handle_text(self, msg):
payload = msg.get_payload()
| if payload is None:
return
cset = msg.get_charset()
if cset is not None:
payload = cset.body_encode(payload)
if not isinstance(payload, basestring):
raise TypeError('string payload expected: %s' % type(payload))
if self._mangle_from_:
payload = fcre.sub('>From ', payload)
self._fp.write(payload)
# Default body handler
_writeBody = _handle_text
def _handle_multipart(self, msg):
# The trick here is to write out each part separately, merge them all
# together, and then make sure that the boundary we've chosen isn't
# present in the payload.
msgtexts = []
subparts = msg.get_payload()
if subparts is None:
subparts = []
elif isinstance(subparts, basestring):
# e.g. a non-strict parse of a message with no starting boundary.
self._fp.write(subparts)
return
elif not isinstance(subparts, list):
# Scalar payload
subparts = [subparts]
for part in subparts:
s = StringIO()
g = self.clone(s)
g.flatten(part, unixfrom=False)
msgtexts.append(s.getvalue())
# Now make sure the boundary we've selected doesn't appear in any of
# the message texts.
alltext = NL.join(msgtexts)
# BAW: What about boundaries that are wrapped in double-quotes?
boundary = msg.get_boundary(failobj=_make_boundary(alltext))
# If we had to calculate a new boundary because the body text
# contained that string, set the new boundary. We don't do it
# unconditionally because, while set_boundary() preserves order, it
# doesn't preserve newlines/continuations in headers. This is no big
# deal in practice, but turns out to be inco |
flaviovdf/phoenix | phoenix/basic_io/__init__.py | Python | bsd-3-clause | 88 | 0.011364 | #-*- c | oding: utf8
from __future__ import division, print_function
__all__ = [ | 'parser']
|
qunying/gps | share/support/ui/pygps/notebook.py | Python | gpl-3.0 | 1,632 | 0.000613 | """ This module provides a higher-level interface to notebooks.
It relies on the pygobject package.
"""
try:
from gi.repository import Gtk, GObject
import pygps
def switch_notebook_page(notebook, label):
"""Switch to the page with the given label in the notebook.
Returns the page number that was selected.
-1 is returned if no such page exists."""
page = 0
while notebook.get_nth_page(page):
p = notebook.get_nth_page(page)
if get_notebook_page_label(notebook, p).get_label() == label:
notebook.set_current_page(page)
pygps.process_all_events()
return page
page = page + 1
return -1
def get_notebook_page_label(notebook, page):
"""Return the label of a page in a notebook.
Page is an integer, the index of the page in the notebook"""
return pygps.get_widgets_by_type \
(Gtk.Label, notebook.get_tab_label(page))[0]
def | get_notebook_pages(notebook):
"""Return the list of all visible pages for the notebook"""
pages = []
for p in range(0, notebook.get_n_pages()):
page = notebook.get_nth_page(p)
if page.get_visible():
pages.append(page | )
return pages
def get_notebook_page_labels_text(notebook):
"""Return a list containing the labels of all visible pages for the
notebook"""
return [get_notebook_page_label(notebook, p).get_label()
for p in get_notebook_pages(notebook)]
except ImportError:
pass
|
sibis-platform/ncanda-data-integration | scripts/redcap/import_mr_sessions_stroop.py | Python | bsd-3-clause | 5,597 | 0.021083 | #!/usr/bin/env python
##
## See COPYING file distr | ibuted along with the ncanda-data-integration package
## for the copyright and license terms
##
from __future__ import print_function
from builtins import str
import os
import re
import tempfile
import shutil
from sibis | py import sibislogger as slog
from sibispy import utils as sutils
#
# Check for Stroop data (ePrime log file) in given XNAT session
#
import_bindir = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ), 'import', 'laptops' )
bindir = os.path.dirname( os.path.abspath(__file__) )
# Check a list of experiments for ePrime Stroop files
def check_for_stroop( xnat, xnat_eid_list, verbose=False ):
stroop_files = []
if verbose :
print("check_for_stroop: " + str(xnat_eid_list))
for xnat_eid in xnat_eid_list:
experiment = xnat.select.experiments[ xnat_eid ]
# Get list of resource files that match the Stroop file name pattern
for resource in list(experiment.resources):
resource_files = xnat._get_json( '/data/experiments/%s/resources/%s/files' % ( xnat_eid, resource ) );
stroop_files += [ (xnat_eid, resource, re.sub( '.*\/files\/', '', file['URI']) ) for file in resource_files if re.match( '^NCANDAStroopMtS_3cycles_7m53stask_.*.txt$', file['Name'] ) ]
# No matching files - nothing to do
if len( stroop_files ) == 0:
if verbose :
print("check_for_stroop: no stroop")
return (None, None, None)
# Get first file from list, warn if more files
if len( stroop_files ) > 1:
error = "ERROR: experiment have/has more than one Stroop .txt file. Please make sure there is exactly one per session."
for xnat_eid in xnat_eid_list:
slog.info(xnat_eid,error)
return (None, None, None)
if verbose :
print("check_for_stroop: Stroop File: " + str(stroop_files[0]))
return stroop_files[0]
# Import a Stroop file into REDCap after scoring
def import_stroop_to_redcap( xnat, stroop_eid, stroop_resource, stroop_file, \
redcap_key, verbose=False, no_upload=False, post_to_github=False, time_log_dir=None):
if verbose:
print("Importing Stroop data from file %s:%s" % ( stroop_eid, stroop_file ))
# Download Stroop file from XNAT into temporary directory
experiment = xnat.select.experiments[stroop_eid]
tempdir = tempfile.mkdtemp()
try:
stroop_file_path = os.path.join( tempdir, stroop_file )
stroop_dir_path = os.path.dirname(stroop_file_path)
if not os.path.isdir(stroop_dir_path):
os.makedirs(stroop_dir_path)
experiment.resources[stroop_resource].files[stroop_file].download( stroop_file_path, verbose=False )
except IOError as e:
details = "Error: import_mr_sessions_stroop: unable to get copy resource {0} file {1} to {2}".format(stroop_resource, stroop_file, stroop_file_path)
slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), details, error_obj={ 'message': str(e), 'errno': e.errno, 'filename': e.filename, 'strerror': e.strerror })
return
# Convert downloaded Stroop file to CSV scores file
cmd = str(os.path.join(import_bindir, "stroop2csv")) + f' --mr-session --record "{redcap_key[0]}" --event "{redcap_key[1]}" "{str(stroop_file_path)}" "{str(tempdir)}"'
(ecode,sout, serr) = sutils.call_shell_program(cmd)
if ecode:
slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), "Error: import_stroop_to_redcap: failed to run stroop2csv!", cmd = str(cmd), stderr = str(serr), stdout = str(sout))
added_files = sout
if len( added_files ):
if not no_upload:
# Upload CSV file(s) (should only be one anyway)
for file in added_files.decode('utf-8').split( '\n' ):
if re.match( '.*\.csv$', file ):
if verbose:
print("Uploading ePrime Stroop scores",file)
cmd = str(os.path.join( bindir, 'csv2redcap' ))
if post_to_github:
cmd += " -p"
if time_log_dir:
cmd += " -t " + str(time_log_dir)
cmd += " " + str(file)
(ecode,sout, serr) = sutils.call_shell_program(cmd)
if ecode:
slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), "Error: import_stroop_to_redcap: failed to run csv2redcap!", cmd = str(cmd), stderr = str(serr), stdout = str(sout))
# Upload original ePrime file for future reference
cmd = str(os.path.join( import_bindir, "eprime2redcap" ))
if post_to_github:
cmd += " -p"
cmd += f' --project data_entry --record {redcap_key[0]} --event {redcap_key[1]} "{str(stroop_file_path)}" mri_stroop_log_file'
if verbose:
print("Uploading ePrime Stroop file",stroop_file_path)
# print " ".join(cmd_array)
(ecode,sout, serr) = sutils.call_shell_program(cmd)
if ecode:
slog.info(str(redcap_key[0]) + "-" + str(redcap_key[1]), "Error: import_stroop_to_redcap: failed to run eprime2redcap!", cmd = str(cmd), stderr = str(serr), stdout = str(sout))
else:
error = "ERROR: could not convert Stroop file %s:%s" % ( redcap_key[0], stroop_file )
slog.info(str(redcap_key[0]) + '-' + str(redcap_key[1]), error,
stroop_file = stroop_file)
shutil.rmtree( tempdir )
|
udoprog/python-adc | adc/parser.py | Python | bsd-3-clause | 5,221 | 0.014557 | from pyparsing import *
from .arguments import *
import string
"""
The following module is a parser based on the ADC specification version 1.0:
http://adc.sourceforge.net/ADC.html
"""
FEATURE_ADD="+"
FEATURE_REM="-"
SEPARATOR=" "
TYPE_SEP=":"
EOL="\n"
B_HEADER = ["B"];
CIH_HEADER = ["C", "I", "H"];
DE_HEADER = ["D", "E"];
F_HEADER = ["F"];
U_HEADER = ["U"];
"""
A pyparser parsing with all methods encapsulated as static fields in this class.
"""
ParserElement.setDefaultWhitespaceChars("")
ParserElement.enablePackrat();
"""
separator ::= ' '
"""
separator = Literal(SEPARATOR).suppress()
"""
eol ::= #x0a
"""
eol = Literal(EOL)
"""
simple_alphanum ::= [A-Z0-9]
"""
simple_alphanum = string.uppercase + string.digits
"""
simple_alpha ::= [A-Z]
"""
simple_alpha = string.uppercase
"""
base32_character ::= simple_alpha | [2-7]
"""
base32_character = simple_alpha + "234567"
"""
escape ::= '\'
"""
escape = "\\"
"""
convenience functions for escaped_letter
"""
escaped_nl = Literal(escape + "n").setParseAction(lambda s, l, t: "\n")
escaped_s = Literal(escape + "s").setParseAction(lambda s, l, t: " ")
escaped_bs = Literal(escape + escape).setParseAction(lambda s, l, t: "\\")
"""
escaped_letter ::= [^ \#x0a] | escape 's' | escape 'n' | escape escape
"""
escaped_letter = (escaped_s | escaped_nl | escaped_bs) | Regex("[^ \n]")
"""
feature_name ::= simple_alpha simple_alphanum{3}
"""
feature_name = Combine(Word(simple_alpha, exact=1) + Word(simple_alphanum, exact=3))
"""
encoded_sid ::= base32_character{4}
"""
encoded_sid = Word(base32_character, exact=4)
"""
my_sid ::= encoded_sid
"""
my_sid = encoded_sid.setResultsName('my_sid');
"""
encoded_cid ::= base32_character+
"""
encoded_cid = Word(base32_character)
"""
my_cid ::= encoded_cid
"""
my_cid = encoded_cid.setResultsName('my_cid');
"""
target_sid ::= encoded_sid
"""
target_sid = encoded_sid.setResultsName('target_sid')
"""
command_name ::= simple_alpha simple_alphanum simple_alphanum
"""
command_name = Combine(Word(simple_alpha, exact=1) + Word(simple_alphanum, exact=2)).setResultsName('command_name')
"""
parameter_value ::= escaped_letter+
"""
parameter_value = Combine(OneOrMore(escaped_letter))
"""
parameter_type ::= 'INT' | 'STR' | 'B32' | 'IP4' | 'IP6'
"""
parameter_type = (Literal(INT) | Literal(STR) | Literal(B32) | Literal(IP4) | Literal(IP6))
"""
parameter_name ::= simple_alpha simple_alphanum
"""
parameter_name = Combine(Word(simple_alpha, exact=1) + Word(simple_alphanum, exact=1))
"""
parameter ::= parameter_type ':' parameter_name (':' parameter_value)?
"""
parameter = parameter_value
"""
convenience function for parameters
"""
parameters = ZeroOrMore(separator + parameter).setResultsName('parameters')
"""
convenience function for f_message_header
"""
feature_list = OneOrMore(Group(separator + (Literal(FEATURE_ADD) | Literal(FEATURE_REM)) + feature_name)).setResultsName('feature_list')
"""
b_message_header ::= 'B' command_name separator my_sid
"""
b_message_header = Word(B_HEADER, exact=1).setResultsName('type') + command_name + separator + my_sid;
"""
cih_message_header ::= ('C' | 'I' | 'H') command_name
"""
cih_message_header = (Word(CIH_HEADER, exact=1)).setResultsName('type') + command_name
"""
de_message_header ::= ('D' | 'E') command_name separator my_sid separator target_sid
"""
de_message_header = Word(DE_HEADER, exact = 1).setResultsName('type') + command_name + separator + my_sid + separator + target_sid
"""
f_message_header ::= 'F' command_name separator my_sid separator (('+'|'-') feature_name)+
"""
f_message_header = Word(F_HEADER, exact=1).setResultsName('type') + command_name + separator + my_sid + feature_list
"""
u_message_header ::= 'U' command_name separator my_cid
"""
u_message_header = Word(U_HEADER, exact=1).setResultsName('type') + command_name + separator + my_cid
"""
convenience function to match all different message headers.
"""
message_header = (b_message_header | cih_message_header | de_message_header | f_message_header | u_message_header).setResultsName('message_header');
"""
message_body ::= (b_messag | e_header | cih_message_header | d | e_message_header | f_message_header | u_message_header | message_header)
(separator parameter)*
"""
message_body = (message_header + parameters).setResultsName('message_body');
"""
message ::= message_body? eol
"""
message = Optional(message_body) + StringEnd();
def parseFrame(s):
"""
Parses an entire frame and returns a tree of syntax nodes
"""
return message.parseString(s, parseAll=True)
__all__ = [
"FEATURE_ADD",
"FEATURE_REM",
"SEPARATOR",
"TYPE_SEP",
"EOL",
"B_HEADER",
"CIH_HEADER",
"DE_HEADER",
"F_HEADER",
"U_HEADER",
"parseFrame"
];
|
s-hertel/ansible | test/units/plugins/connection/test_ssh.py | Python | gpl-3.0 | 31,464 | 0.003564 | # -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import pytest
from ansible import constants as C
from ansible.errors import AnsibleAuthenticationFailure
from units.compat import unittest
from units.compat.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.compat.selectors import SelectorKey, EVENT_READ
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
from ansible.plugins.loader import connection_loader, become_loader
class TestConnectionBaseClass(unittest.TestCase):
def test_plugins_connection_ssh_module(self):
play_context = PlayContext()
play_context.prompt = (
'[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
)
in_stream = StringIO()
self.assertIsInstance(ssh.Connection(play_context, in_stream), ssh.Connection)
def test_plugins_connection_ssh_basic(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
# connect just returns self, so assert that
res = conn._connect()
self.assertEqual(conn, res)
ssh.SSHPASS_AVAILABLE = False
self.assertFalse(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = True
self.assertTrue(conn._sshpass_available())
with patch('subprocess.Popen') as p:
ssh.SSHPASS_AVAILABLE = None
p.return_value = MagicMock()
self.assertTrue(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = None
p.return_value = None
p.side_effect = OSError()
self.assertFalse(conn._sshpass_available())
conn.close()
self.assertFalse(conn._connected)
def test_plugins_connection_ssh__build_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command('ssh', 'ssh')
def test_plugins_connection_ssh_exec_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn._build_command = MagicMock()
conn._build_command.return_value = 'ssh something something'
conn._run = MagicMock()
conn._run.return_value = (0, 'stdout', 'stderr')
conn.get_option = MagicMock()
conn.get_option.return_value = True
res, stdout, stderr = conn.exec_command('ssh')
res, stdout, stderr = conn.exec_command('ssh', 'this is some data')
def test_plugins_connection_ssh__examine_output(self):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
conn.set_become_plugin(become_loader.get('sudo'))
conn.check_password_prompt = MagicMock()
conn.check_become_success = MagicMock()
conn.check_incorrect_password = MagicMock()
conn.check_missing_password = MagicMock()
def _check_password_prompt(line):
if b'foo' in line:
return True
return False
def _check_become_success(line):
if b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line:
return True
return False
def _check_incorrect_password(line):
if b'incorrect password' in line:
return True
return False
def _check_missing_password(line):
if b'bad password' in line:
return True
return False
conn.become.check_password_prompt = MagicMock(side_effect=_check_password_prompt)
conn.become.check_become_success = MagicMock(side_effect=_check_become_success)
conn.become.check_incorrect_password = MagicMock(side_effect=_check_incorrect_password)
conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
# test examining output for prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = True
conn.become.prompt = True
def get_option(option):
if option == 'become_pass':
return 'password'
return None
conn.become.get_option = get_option
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'this should be the remainder')
self.assertTrue(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
conn.become.success = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n' | )
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertTrue(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining outpu | t for become failure
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nincorrect password\n', True)
self.assertEqual(output, b'line 1\nline 2\nincorrect password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertTrue(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for missing password
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
conn.become.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nbad password\n', True)
self.assertEqual(output, b'line 1\nbad password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_pro |
mosquito/rest-client | setup.py | Python | mit | 1,043 | 0.000959 | #!/usr/bin/env python
# encoding: utf-8
from setuptools import setup, find_packages
import rest_client
import sys
requires = [
'tornado',
]
if sys.version_info < (3,):
requires.append('futures')
setup(
name='rest-client',
version=rest_client.__version__,
author=rest_client.__author__,
url="https://github.com/mosquito/rest-client",
author_email=rest_client.author_info[1],
| license="MIT",
description="RESTful Client for tornado",
platforms="all",
classifiers=[
'Environment :: Console',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming L | anguage :: Python :: Implementation :: PyPy',
],
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests']),
install_requires=requires,
)
|
maggu/hack | tictactoe.py | Python | gpl-2.0 | 8,127 | 0.002461 | #!/usr/bin/env python3
# Copyright (C) 2019 C C Magnus Gustavsson
# Released under the GNU General Public License
"""Tic-tac-toe / noughts and crosses / Xs and Os using Pygame
Choose if each side is to be played by the computer or a human
"""
import pygame
import sys
from math import sqrt
from random import choice
from time import sleep
# Define the colors (RGB)
BLACK = ( 16, 16, 16)
GREEN = (128, 192, 128)
WHITE = (255, 255, 255)
# For easy translation
STRINGS = {
'player': "Player",
'computer': "Computer",
'human': "Human"
}
# Initialize graphics
SCREEN = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
# Set grid position and sizes
INFO = pygame.display.Info()
WIDTH, HEIGHT = INFO.current_w, INFO.current_h
SIZE = min(WIDTH, HEIGHT)
START_X = (WIDTH - SIZE) // 2
START_Y = (HEIGHT - SIZE) // 2
SQUARE_SIZE = SIZE // 3
SYMBOL_SIZE = SIZE // 10
LINE_SIZE = SIZE // 20
# Player selection
COMPUTER = 1
HUMAN = 2
MENU = [
# text, x, y, color
["{} {{}}".format(STRINGS['player']), 4, 4, BLACK],
["1 - {}".format(STRINGS['computer']), 5, 5, WHITE],
["2 - {}".format(STRINGS['human']), 5, 6, WHITE]
]
# Initialize pygame
pygame.init()
# Fonts
FONT = 'freesansbold.ttf'
LARGE_FONT = pygame.font.Font(FONT, SIZE // 16)
SMALL_FONT = pygame.font.Font(FONT, SIZE // 32)
# X and O shapes
X_INNER = LINE_SIZE // sqrt(2)
X_OUTER = (SYMBOL_SIZE - LINE_SIZE / 2) // sqrt(2)
O_INNER = SYMBOL_SIZE - LINE_SIZE
O_OUTER = SYMBOL_SIZE
# Grid coordinates for the squares
POSITION_X = [0, 1, 2, 0, 1, 2, 0, 1, 2]
POSITION_Y = [2, 2, 2, 1, 1, 1, 0, 0, 0]
# The two diagonals
DIAGONALS = [
[0, 4, 8], [2, 4, 6]
]
# All possible ways to place three in a row
ROWS = [
[0, 4, 8], [2, 4, 6], [0, 1, 2], [3, 4, 5],
[6, 7, 8], [0, 3, 6], [1, 4, 7], [2, 5, 8]
]
WIN_COUNTS = [(3, 0), (0, 3)] # Three in a row
# Computer heuristic
COUNT_PRIORITIES = [
[(2, 0), (0, 2), (1, 0)], # Player 1
[(0, 2), (2, 0), (0, 1)] # Player 2
]
SQUARE_PRIORITIES = [
[4], # Center
[0, 2, 6, 8], # Corner
[1, 3, 5, 7] # Remaining
]
def get_key(any_key=False):
"""Get a number key between 1 and 9, or any key"""
while True:
event = pygame.event.wait()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if any_key:
return None
for base in [pygame.K_0, pygame.K_KP0]:
if base + 1 <= event.key <= base + 9:
return event.key - base
def draw_menuitem(player, line, invert=False):
"""Draw an item in the player selection menu"""
item = MENU[line]
text = item[0].format(player + 1)
x = LINE_SIZE * item[1]
y = LINE_SIZE * (5 * player + item[2])
if invert:
color, background = GREEN, item[3]
else:
color, background = item[3], GREEN
text = SMALL_FONT.render(text, True, color, background)
rect = text.get_rect()
rect.left, rect.top = x, y
SCREEN.blit(text, rect)
def player_select():
"""Select which player is to be played by the computer"""
SCREEN.fill(GREEN)
is_computer = [None, None]
for player in range(2):
for line in range(3):
draw_menuitem(player, line)
pygame.display.flip()
while True:
key = get_key()
if key == COMPUTER:
is_computer[player] = True
draw_menuitem(player, 1, True)
pygame.display.flip()
break
if key == HUMAN:
is_computer[player] = False
draw_menuitem(player, 2, True)
pygame.display.flip()
break
sleep(0.5)
return is_computer
def draw_line(start_pos, end_pos):
"""Draw a black line"""
pygame.draw.line(SCREEN, BLACK, start_pos, end_pos, LINE_SIZE)
def draw_grid():
"""Draw the 3 times 3 grid"""
SCREEN.fill(GREEN)
end_x = START_X + SIZE
end_y = START_Y + SIZE
for i in range(0, SIZE, SQUARE_SIZE):
draw_line((START_X + i, START_Y), (START_X + i, end_y))
draw_line((START_X, START_Y + i), (end_x, START_Y + i))
for i in range(9):
text = LARGE_FONT.render(str(i + 1), True, WHITE, GREEN)
rect = text.get_rect()
rect.center = get_position(i)
SCREEN.blit(text, rect)
pygame.display.flip()
def draw_x(x, y, color):
"""Mark a square with an X"""
points = [(x, y - X_INNER),
(x + X_OUTER, y - X_OUTER - X_INNER),
(x + X_OUTER + X_INNER, y - X_OUTER),
(x + X_INNER, y),
(x + X_OUTER + X_INNER, y + X_OUTER),
(x + X_OUTER, y + X_OUTER + X_INNER),
(x, y + X_INNER),
(x - X_OUTER, y + X_OUTER + X_INNER),
(x - X_OUTER - X_INNER, y + X_OUTER),
(x - X_INNER, y),
(x - X_OUTER - X_INNER, y - X_OUTER),
(x - X_OUTER, y - X_OUTER - X_INNER)]
pygame.draw.polygon(SCREEN, color, points)
def draw_o(x, y, color):
"""Mark a square with an O"""
pygame.draw.circle(SCREEN, color, (x, y), O_OUTER)
pygame.draw.circle(SCREEN, GREEN, (x, y), O_INNER)
def draw_mark(player, square, color=WHITE, flip=True):
"""Mark a square"""
x, y = get_position(square)
if player == 1:
draw_x(x, y, color)
elif player == 2:
draw_o(x, y, color)
if flip:
pygame.display.flip()
def square_to_coord(start, position):
"""Convert position to screen coordinates"""
return start + position * SQUARE_SIZE + SQUARE_SIZE // 2
def get_position(number):
"""Get screen coordinates for a square"""
x = square_to_coord(START_X, POSITION_X[number])
y = square_to_coord(START_Y, POSITION_Y[number])
return x, y
def analyze(state):
"""Get player counts for all rows"""
row_state = [[state[s] for s in row] for row in ROWS]
return [(row.count(1), row.count(2)) for row in row_state]
def computer_select(player, counts, state):
"""Choose a square for the computer to play,
using a heuristic that won't always play optimally"""
sl | eep(0.5)
for priority in COUNT_PRIORITIES[player - 1]:
good = [r for r, c in zip(ROWS, counts) if c == priority]
if good:
diagonal = [r for r in good if r in DIAGONALS]
if diagonal:
row = choice(diagonal)
else:
row = choice(good)
for square in row:
if state[square] == 0:
return square
for squares in SQUARE_PRIORITIES:
empty | = [s for s in squares if state[s] == 0]
if empty:
return choice(empty)
def game_over(state, keep_color):
"""Paint non-winning squares black"""
for square in [s for s in range(9) if not s in keep_color]:
draw_mark(state[square], square, color=BLACK, flip=False)
pygame.display.flip()
get_key(any_key=True)
def play_game():
"""Play one game"""
is_computer = player_select()
counts = []
player = 1
state = [0, 0, 0, 0, 0, 0, 0, 0, 0]
draw_grid()
while True:
if is_computer[player - 1]:
square = computer_select(player, counts, state)
else:
while True:
square = get_key() - 1
# Check that a human player chose a valid square
if state[square] == 0:
break
# Mark the square as belonging to the player
state[square] = player
if player == 1 and 0 not in state:
# It's a draw
game_over(state, [])
break
draw_mark(player, square)
counts = analyze(state)
wins = (r for r, c in zip(ROWS, counts) if c in WIN_COUNTS)
win = next(wins, None)
if win:
game_over(state, win)
break
# Other player's turn (1 -> 2, 2 -> 1)
player = 3 - player
while True:
play_game()
|
shumik/skencil-c | Script/select_same_fill_color.py | Python | gpl-2.0 | 3,431 | 0.001457 | # Sketch - A Python-based interactive drawing program
# Copyright (C) 1999 by Bernhard Herzog
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Select all objects in the current layer with the same fill color as
# the currently selected object. This is implemented as an advanced
# script. It doesn't have to deal with undo because it only changes the
# set of currently selected objects and not the objects themselves.
#
# Conceps and Methods:
#
# CurrentProperties():
#
# This document method returns the properties of the currently
# selected object. If more than one objects are selected or no
# object is selected or the selected object doesn't have
# properties, a special property object EmptyProperties is
# returned.
#
# Now, what does this mean? Objects like rectangles, text and
# curves have graphics properties like fill or line patters, line
# width or font whatever is applicable for that particular type.
# Some obejcts have no graphics properties at all, e.g. groups,
# while others can only have some properties, e.g. text objects
# currently can't have a line color (this is really a limitation
# in X11, PostScript wouldn't have problems with that).
#
# All of the properties are stored in a properties object, and
# that is what the CurrentProperties() method returns. Such a
# | properties object has three methods that indicate whether the
# fill-, line- or text properties are valid: HasFill(), HasLine()
# and HasFont(). Only if one of those methods returns true, can
# you safely access the respective properties. The properties are
# publicly readable attributes of the properties object. For the
# EmptyProperties object that may be returned by
# CurrentProperties(), all of these methods return false.
#
import time
def sele | ct_same_fill_color(context):
doc = context.document
select = []
properties = doc.CurrentProperties()
if properties.HasFill():
color = properties.fill_pattern.Color()
layer = doc.ActiveLayer()
doc.SelectNone()
for obj in layer.GetObjects():
if obj.has_fill:
prop = obj.Properties()
if prop.HasFill() and prop.fill_pattern.is_Solid \
and color == prop.fill_pattern.Color():
select.append(obj)
doc.SelectObject(select, Sketch.const.SelectAdd)
# register script
import Sketch.Scripting
Sketch.Scripting.AddFunction('select_same_fill_color',
'Select Same Fill Color',
select_same_fill_color,
script_type = Sketch.Scripting.AdvancedScript)
|
matthewbauer/reddwall | setup.py | Python | mit | 447 | 0.004474 | #!/usr/bin/env python
#from distutils.core import setup
from setuptools import setup
setup(name='ReddWall',
ve | rsion='0.2',
description='Get random wallpapers from Reddit',
author='Matthew Bauer',
author_email='mjbauer95@gmail.com',
url='http://github.com/matthewbauer/reddwall',
install_requires=['requests', 'beautifulsoup4', 'praw', 'wxpython'],
packages=['detools'],
| scripts=['reddwall.py'],
)
|
ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/contrib/sessions/backends/cached_db.py | Python | bsd-3-clause | 1,823 | 0 | """
Cached, database-backed sessions.
"""
from django.conf import settings
from django.contrib.sessions.backends.db import SessionStore as DBStore
from django.core.cache import cache
KEY_PREFIX = "django.contrib.sessions.cached_db"
class SessionStore(DBStore):
"""
Implements cached, database backed sessions.
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
@property
def cache_key(self):
return KEY_PREFIX + self._get_or_create_session_key()
def load(self):
try:
data = cache.get(self.cache_key, None)
except Exception:
| # Some backends (e.g. memcache) raise an exception on invalid
# cache keys. If this happens, reset the session. See #17810.
data = None
if data is None:
data = super(SessionStore, self).load()
cache.set(self.cache_key, data, setti | ngs.SESSION_COOKIE_AGE)
return data
def exists(self, session_key):
if (KEY_PREFIX + session_key) in cache:
return True
return super(SessionStore, self).exists(session_key)
def save(self, must_create=False):
super(SessionStore, self).save(must_create)
cache.set(self.cache_key, self._session, settings.SESSION_COOKIE_AGE)
def delete(self, session_key=None):
super(SessionStore, self).delete(session_key)
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
cache.delete(KEY_PREFIX + session_key)
def flush(self):
"""
Removes the current session data from the database and regenerates the
key.
"""
self.clear()
self.delete(self.session_key)
self.create()
|
socialplanning/opencore | opencore/interfaces/membership.py | Python | gpl-3.0 | 2,171 | 0.001382 | # @@ maybe these should go in a team.py
from zope.interface import Interface, Attribute
from zope.interface import implements
from Products.TeamSpace.interfaces.membership import ITeamMembership
class IOpenMembership(ITeamMembership):
"""
Interface provided by OpenMembership objects.
"""
class IMembershipTransitionEvent(Interface):
"""
A membership object has made a workflow transition to a new state
"""
transition = Attribute('name of the transition')
obj = Attribute('object that made said transition')
class MembershipTransitionEvent(object):
implements(IMembershipTransitionEvent)
def __init__(self, obj, transition):
self.obj = obj
self.transition = transition
class IEmailInvites(Interface):
"""
Interface for a local utility that tracks the project invitations
that have been extended to non-site members (by email address).
"""
def getInvitesByEmailAddress(address):
"""
Returns a BTree, keys are project ids, values are timestamps
of when the invitation was extended.
"""
def getInvitesByProject(proj_id):
"""
Returns a BTree, keys are email addresses, values are
timestamps of when the invitation was extended.
"""
def addInvitation(address, proj_id):
"""
Registers an invitation to the specified email address for the
sp | ecified project.
"""
def removeInvitation(address, proj_id):
"""
Removes | the invitation registered for the specified email
address and project id.
Does nothing if the invitation doesn't exist.
"""
def removeAllInvitesForAddress(address):
"""
Removes registration of all invitations for the specified
email address.
Does nothing if no invitations exist for the specified
address.
"""
def convertInvitesForMember(member):
"""
Converts any invitations for the email address related to the
given member object into actual membership object invitations
which can be approved per the default interface.
"""
|
xuweiliang/Codelibrary | openstack_dashboard/dashboards/admin/volumes_back/cgroups/tables.py | Python | apache-2.0 | 6,186 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugette | xt_lazy as _
from horizon import exceptions
from horizon import tables
from ope | nstack_dashboard.api import cinder
from openstack_dashboard import policy
class CreateVolumeCGroup(policy.PolicyTargetMixin, tables.LinkAction):
name = "create"
verbose_name = _("Create Consistency Group")
url = "horizon:admin:volumes:cgroups:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("volume", "consistencygroup:create"),)
class DeleteVolumeCGroup(policy.PolicyTargetMixin, tables.LinkAction):
name = "deletecg"
verbose_name = _("Delete Consistency Group")
url = "horizon:admin:volumes:cgroups:delete"
classes = ("ajax-modal", "btn-danger")
policy_rules = (("volume", "consistencygroup:delete"), )
class RemoveAllVolumes(policy.PolicyTargetMixin, tables.LinkAction):
name = "remove_vols"
verbose_name = _("Remove Volumes from Consistency Group")
url = "horizon:admin:volumes:cgroups:remove_volumes"
classes = ("ajax-modal",)
policy_rules = (("volume", "consistencygroup:update"), )
class EditVolumeCGroup(policy.PolicyTargetMixin, tables.LinkAction):
name = "edit"
verbose_name = _("Edit Consistency Group")
url = "horizon:admin:volumes:cgroups:update"
classes = ("ajax-modal",)
policy_rules = (("volume", "consistencygroup:update"),)
class ManageVolumes(policy.PolicyTargetMixin, tables.LinkAction):
name = "manage"
verbose_name = _("Manage Volumes")
url = "horizon:admin:volumes:cgroups:manage"
classes = ("ajax-modal",)
policy_rules = (("volume", "consistencygroup:update"),)
def allowed(self, request, cgroup=None):
if hasattr(cgroup, 'status'):
return cgroup.status != 'error'
else:
return False
class CreateSnapshot(policy.PolicyTargetMixin, tables.LinkAction):
name = "create_snapshot"
verbose_name = _("Create Snapshot")
url = "horizon:admin:volumes:cgroups:create_snapshot"
classes = ("ajax-modal",)
policy_rules = (("volume", "consistencygroup:create_cgsnapshot"),)
def allowed(self, request, cgroup=None):
if hasattr(cgroup, 'status'):
return cgroup.status != 'error'
else:
return False
class CloneCGroup(policy.PolicyTargetMixin, tables.LinkAction):
name = "clone_cgroup"
verbose_name = _("Clone Consistency Group")
url = "horizon:admin:volumes:cgroups:clone_cgroup"
classes = ("ajax-modal",)
policy_rules = (("volume", "consistencygroup:create"),)
def allowed(self, request, cgroup=None):
if hasattr(cgroup, 'status'):
return cgroup.status != 'error'
else:
return False
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, cgroup_id):
try:
cgroup = cinder.volume_cgroup_get_with_vol_type_names(request,
cgroup_id)
except Exception:
exceptions.handle(request, _('Unable to display '
'consistency group.'))
return cgroup
class VolumeCGroupsFilterAction(tables.FilterAction):
def filter(self, table, cgroups, filter_string):
"""Naive case-insensitive search."""
query = filter_string.lower()
return [cgroup for cgroup in cgroups
if query in cgroup.name.lower()]
def get_volume_types(cgroup):
vtypes_str = ''
if hasattr(cgroup, 'volume_type_names'):
vtypes_str = ",".join(cgroup.volume_type_names)
return vtypes_str
class VolumeCGroupsTable(tables.DataTable):
STATUS_CHOICES = (
("in-use", True),
("available", True),
("creating", None),
("error", False),
)
STATUS_DISPLAY_CHOICES = (
("available",
pgettext_lazy("Current status of Consistency Group", u"Available")),
("in-use",
pgettext_lazy("Current status of Consistency Group", u"In-use")),
("error",
pgettext_lazy("Current status of Consistency Group", u"Error")),
)
name = tables.WrappingColumn("name",
verbose_name=_("Name"),
link="horizon:admin:volumes:cgroups:detail")
description = tables.Column("description",
verbose_name=_("Description"),
truncate=40)
status = tables.Column("status",
verbose_name=_("Status"),
status=True,
status_choices=STATUS_CHOICES,
display_choices=STATUS_DISPLAY_CHOICES)
availability_zone = tables.Column("availability_zone",
verbose_name=_("Availability Zone"))
volume_type = tables.Column(get_volume_types,
verbose_name=_("Volume Type(s)"))
def get_object_id(self, cgroup):
return cgroup.id
class Meta(object):
name = "volume_cgroups"
verbose_name = _("Volume Consistency Groups")
table_actions = (CreateVolumeCGroup,
VolumeCGroupsFilterAction)
row_actions = (ManageVolumes,
EditVolumeCGroup,
CreateSnapshot,
CloneCGroup,
RemoveAllVolumes,
DeleteVolumeCGroup)
row_class = UpdateRow
status_columns = ("status",)
permissions = ['openstack.services.volume']
|
citruspi/Atlas | atlas/templating/template.py | Python | unlicense | 356 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from jinja2 import Template as Jinja2Template
from atlas.pr | oviders import providers
class Template(Jinja2Template):
def __init__(self, *args, **kwargs):
super(Template, self).__init__(*args, **kwargs)
for provider in providers:
| self.globals[provider] = providers[provider]
|
rollbar/pyrollbar | rollbar/contrib/django/utils.py | Python | mit | 122 | 0 | class MiddlewareMixin(object):
def __init__ | (self, get_response=None):
super(MiddlewareMixin, self).__init__( | )
|
francois-durand/svvamp | svvamp/rules/rule_condorcet_sum_defeats.py | Python | gpl-3.0 | 12,230 | 0.001145 | # -*- coding: utf-8 -*-
"""
Created on 4 dec. 2018, 16:00
Copyright François Durand 2014-2018
fradurand@gmail.com
This file is part of SVVAMP.
SVVAMP is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SVVAMP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SVVAMP. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
from svvamp.rules.rule import Rule
from svvamp.utils.util_cache import cached_property
from svvamp.preferences.profile import Profile
class RuleCondorcetSumDefeats(Rule):
"""Condorcet with sum of defeats.
Examples
--------
>>> profile = Profile(preferences_ut=[
... [ 0. , -0.5, -1. ],
... [ 1. , -1. , 0.5],
... [ 0.5, 0.5, -0.5],
... [ 0.5, 0. , 1. ],
... [-1. , -1. , 1. ],
... ], preferences_rk=[
... [0, 1, 2],
... [0, 2, 1],
... [1, 0, 2],
... [2, 0, 1],
... [2, 1, 0],
... ])
>>> rule = RuleCondorcetSumDefeats()(profile)
>>> rule.demo_results_(log_depth=0) # doctest: +NORMALIZE_WHITESPACE
<BLANKLINE>
************************
* *
* Election Results *
* *
************************
<BLANKLINE>
***************
* Results *
***************
profile_.preferences_ut (reminder) =
[[ 0. -0.5 -1. ]
[ 1. -1. 0.5]
[ 0.5 0.5 -0.5]
[ 0.5 0. 1. ]
[-1. -1. 1. ]]
profile_.preferences_rk (reminder) =
[[0 1 2]
[0 2 1]
[1 0 2]
[2 0 1]
[2 1 0]]
ballots =
[[0 1 2]
[0 2 1]
[1 0 2]
[2 0 1]
[2 1 0]]
scores =
[-0. -2. -1.]
candidates_by_scores_best_to_worst
[0 2 1]
scores_best_to_worst
[-0. -1. -2.]
w = 0
score_w = -0.0
total_utility_w = 1.0
<BLANKLINE>
*********************************
* Condorcet efficiency (rk) *
*********************************
w (reminder) = 0
<BLANKLINE>
condorcet_winner_rk_ctb = 0
w_is_condorcet_winner_rk_ctb = True
w_is_not_condorcet_winner_rk_ctb = False
w_missed_condorcet_winner_rk_ctb = False
<BLANKLINE>
condorcet_winner_rk = 0
w_is_condorcet_winner_rk = True
w_is_not_condorcet_winner_rk = False
w_missed_condorcet_winner_rk = False
<BLANKLINE>
***************************************
* Condorcet efficiency (relative) *
***************************************
w (reminder) = 0
<BLANKLINE>
condorcet_winner_ut_rel_ctb = 0
w_is_condorcet_winner_ut_rel_ctb = True
w_is_not_condorcet_winner_ut_rel_ctb = False
w_missed_condorcet_winner_ut_rel_ctb = False
<BLANKLINE>
condorcet_winner_ut_rel = 0
w_is_condorcet_winner_ut_rel = True
w_is_not_condorcet_winner_ut_rel = False
w_missed_condorcet_winner_ut_rel = False
<BLANKLINE>
***************************************
* Condorcet efficiency (absolute) *
***************************************
w (reminder) = 0
<BLANKLINE>
condorcet_admissible_candidates =
[ True False False]
w_is_condorcet_admissible = True
w_is_not_condorcet_admissible = False
w_missed_condorcet_admissible = False
<BLANKLINE>
weak_condorcet_winners =
[ True False False]
w_is_weak_condorcet_winner = True
w_is_not_weak_condorcet_winner = False
w_missed_weak_condorcet_winner = False
<BLANKLINE>
condorcet_winner_ut_abs_ctb = 0
w_is_condorcet_winner_ut_abs_ctb = True
w_is_not_condorcet_winner_ut_abs_ctb = False
w_missed_condorcet_winner_ut_abs_ctb = False
<BLANKLINE>
condorcet_winner_ut_abs = 0
w_is_condorcet_winner_ut_abs = True
w_is_not_condorcet_winner_ut_abs = False
w_missed_condorcet_winner_ut_abs = False
<BLANKLINE>
resistant_condorcet_winner = nan
w_is_resistant_condorcet_winner = False
w_is_not_resistant_condorcet_winner = True
w_missed_resistant_condorcet_winner = False
>>> rule.demo_manipulation_(log_depth=0) # doctest: +NORMALIZE_WHITESPACE
<BLANKLINE>
*****************************
* *
* Election Manipulation *
* *
*****************************
<BLANKLINE>
*********************************************
* Basic properties of the voting system *
*********************************************
with_two_candidates_reduces_to_plurality = True
is_based_on_rk = True
is_based_on_ut_minus1_1 = False
meets_iia = False
<BLANKLINE>
****************************************************
* Manipulation properties of the voting system *
****************************************************
Condorcet_c_ut_rel_ctb (False) ==> Condorcet_c_ut_rel (False)
|| ||
|| Condorcet_c_rk_ctb (False) ==> Condorcet_c_rk (True) ||
|| || || || || ||
V V || || V V
Condorcet_c_ut_abs_ctb (False) ==> Condorcet_ut_abs_c (True)
|| || || ||
|| V V ||
|| maj_fav_c_rk_ctb (False) ==> maj_fav_c_rk (True) | ||
|| || || ||
V V V | V
majority_favorite_c_ut_ctb (False) ==> majority_favorite_c_ut (True)
|| ||
V V
IgnMC_c_ctb (False) ==> IgnMC_c (True)
|| ||
V V
InfMC_c_ctb (True) ==> InfMC_c (True)
<BLANKLINE>
*****************************************************
* Independence of Irrelevant Alternatives (IIA) *
*****************************************************
w (reminder) = 0
is_iia = True
log_iia: iia_subset_maximum_size = 2.0
example_winner_iia = nan
example_subset_iia = nan
<BLANKLINE>
**********************
* c-Manipulators *
**********************
w (reminder) = 0
preferences_ut (reminder) =
[[ 0. -0.5 -1. ]
[ 1. -1. 0.5]
[ 0.5 0.5 -0.5]
[ 0.5 0. 1. ]
[-1. -1. 1. ]]
v_wants_to_help_c =
[[False False False]
[False False False]
[False False False]
[False False True]
[False False True]]
<BLANKLINE>
************************************
* Individual Manipulation (IM) *
************************************
is_im = nan
log_im: im_option = lazy
candidates_i |
jpardobl/naman | naman/core/pypelib/persistence/PersistenceEngine.py | Python | bsd-3-clause | 2,078 | 0.027911 | import os
import sys
import time
'''
@author: msune,omoya,CarolinaFernandez
@@organization: i2CAT, OFELIA FP7
Persistence engine
Implementes driver-based persistence backend selection
'''
class PersistenceEngine():
#Default Class Attributes
_defaultParser = "RegexParser"
_defaultPersistence = "Django"
#Drivers
_drivers = ["Django","RAWFile"]
#Fill with appropiate path
PATH_TO_DRIVERS="backends"
def __init__(self):
raise Exception("Static class cannot be instanciated")
@staticmethod
def _getDriver(driverName):
print "driver name: %s" %driverName
if driverName == "Django":
PATH = PersistenceEngine.PATH_TO_DRIVERS + '.django.Django'
try:
exec('from ' + PATH + ' import Django')
return Django
except:
raise Exception(driverName + ' persistence driver not found in ' + PersistenceEngine.PATH_TO_DRIVERS)
elif driverName == "RAWFile":
PATH = PersistenceEngine.PATH_TO_DRIVERS + '.rawfile.RAWFile'
try:
exec('from ' + PATH + ' import RAWFile')
return RAWFile
except:
raise Exception(driverName + ' persistence driver not found in ' + PersistenceEngine.PATH_TO_DRIVERS)
else:
raise Exception(driverName + ' not supported')
@stat | icmethod
def save(obj, pBackend, parser=None, **kwargs):
| return PersistenceEngine._getDriver(pBackend).save(obj, parser, **kwargs)
@staticmethod
def load(tableName, pBackend, resolverMappings, parser=None, **kwargs):
return PersistenceEngine._getDriver(pBackend).load(tableName, resolverMappings, parser, **kwargs)
'''
Retrieves every Driver's PolicyRuleTable object for a given name.
This method should be seldom used.
'''
@staticmethod
def loadAll(tableName, pBackend):
return PersistenceEngine._getDriver(pBackend).loadAll(tableName)
'''
Deletes a Driver's PolicyRuleTable object for a given ID.
This method should be seldom used.
'''
@staticmethod
def delete(tableID, pBackend):
return PersistenceEngine._getDriver(pBackend).delete(tableID)
|
jonparrott/google-cloud-python | pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py | Python | apache-2.0 | 40,328 | 0.001364 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.pubsub.v1 Publisher API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.path_template
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.pubsub_v1.gapic import publisher_client_config
from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport
from google.cloud.pubsub_v1.proto import pubsub_pb2
from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-pubsub', ).version
class PublisherClient(object):
"""
The service that an application uses to manipulate topics, and to send
messages to a topic.
"""
SERVICE_ADDRESS = 'pubsub.googleapis.com:443'
"""The default address of the service."""
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_DEFAULT_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/pubsub', )
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.pubsub.v1.Publisher'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provid | ed credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constru | ctor.
kwargs: Additional arguments to pass to the constructor.
Returns:
PublisherClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def topic_path(cls, project, topic):
"""Return a fully-qualified topic string."""
return google.api_core.path_template.expand(
'projects/{project}/topics/{topic}',
project=project,
topic=topic,
)
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
'projects/{project}',
project=project,
)
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None):
"""Constructor.
Args:
transport (Union[~.PublisherGrpcTransport,
Callable[[~.Credentials, type], ~.PublisherGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
'The `client_config` argument is deprecated.',
PendingDeprecationWarning,
stacklevel=2)
else:
client_config = publisher_client_config.config
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.',
PendingDeprecationWarning,
stacklevel=2)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=publisher_grpc_transport.
PublisherGrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = publisher_grpc_transport.PublisherGrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION, )
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def create_topic(self,
name,
labels=None,
message_storage_policy=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.a |
remap/ndn-flow | framework/ndn_iot_python/examples/test_consuming.py | Python | lgpl-3.0 | 2,489 | 0.006027 | #!/usr/bin/python
from pyndn import Name, Interest, Data
from pyndn.util.memory_content_cache import MemoryContentCache
from pyndn.security import KeyChain
from pyndn.threadsafe_face import ThreadsafeFace
from ndn_iot_python.bootstrap.bootstrap import Bootstrap
import time
import sys
import json
import logging
try:
import asyncio
except ImportError:
import trollius as asyn | cio
class AppConsumer():
def __init__(self, face, certificateName, keyChain, dataPrefix):
self._keyChain = keyChain
self._defaultCertificateName = certificateName
self._fa | ce = face
self._dataPrefix = dataPrefix
return
def start(self):
interest = Interest(self._dataPrefix)
self._face.expressInterest(interest, self.onData, self.onTimeout)
print "Interest expressed " + interest.getName().toUri()
return
def onData(self, interest, data):
print "Got data " + data.getName().toUri()
print "Data keyLocator KeyName " + data.getSignature().getKeyLocator().getKeyName().toUri()
def onVerified(data):
print "data verified: " + data.getContent().toRawStr()
return
def onVerifyFailed(data, reason):
print "data verification failed: " + reason
return
self._keyChain.verifyData(data, onVerified, onVerifyFailed)
return
def onTimeout(self, interest):
print "Interest times out " + interest.getName().toUri()
return
if __name__ == '__main__':
try:
import psutil as ps
except Exception as e:
print str(e)
loop = asyncio.get_event_loop()
face = ThreadsafeFace(loop)
bootstrap = Bootstrap(face)
def onSetupComplete(defaultCertificateName, keyChain):
def onUpdateFailed(msg):
print "Trust scheme update failed"
return
def onUpdateSuccess(trustSchemaString, isInitial):
print "Got a trust schema"
if isInitial:
consumer.start()
return
consumer = AppConsumer(face, defaultCertificateName, keyChain, Name("/home/flow/ps-publisher-4"))
bootstrap.startTrustSchemaUpdate(Name("/home/gateway/flow"), onUpdateSuccess, onUpdateFailed)
def onSetupFailed(msg):
print("Setup failed " + msg)
bootstrap.setupDefaultIdentityAndRoot("app.conf", onSetupComplete = onSetupComplete, onSetupFailed = onSetupFailed)
loop.run_forever()
|
ptressel/sahana-eden-madpub | controllers/rms.py | Python | mit | 6,228 | 0.006904 | # -*- coding: utf-8 -*-
""" Request Management System - Controllers """
prefix = request.controller
resourcename = request.function
if prefix not in deployment_settings.modules:
session.error = T("Module disabled!")
redirect(URL(r=request, c="default", f="index"))
# Options Menu (available in all Functions' Views)
menu = [
[T("Home"), False, URL(r=request, f="index")],
| [T("Requests"), False, URL(r=request, f="req"), [
[T("List"), False, URL(r=request, f="req")],
[T("Add"), False, URL(r=request, f="req", args="create")],
# @ToDo Sear | ch by priority, status, location
#[T("Search"), False, URL(r=request, f="req", args="search")],
]],
[T("All Requested Items"), False, URL(r=request, f="ritem")],
]
if session.rcvars:
if "hms_hospital" in session.rcvars:
hospital = db.hms_hospital
query = (hospital.id == session.rcvars["hms_hospital"])
selection = db(query).select(hospital.id, hospital.name, limitby=(0, 1)).first()
if selection:
menu_hospital = [
[selection.name, False, URL(r=request, c="hms", f="hospital", args=[selection.id])]
]
menu.extend(menu_hospital)
if "cr_shelter" in session.rcvars:
shelter = db.cr_shelter
query = (shelter.id == session.rcvars["cr_shelter"])
selection = db(query).select(shelter.id, shelter.name, limitby=(0, 1)).first()
if selection:
menu_shelter = [
[selection.name, False, URL(r=request, c="cr", f="shelter", args=[selection.id])]
]
menu.extend(menu_shelter)
response.menu_options = menu
def index():
""" Module's Home Page
Default to the rms_req list view.
"""
request.function = "req"
request.args = []
return req()
#module_name = deployment_settings.modules[prefix].name_nice
#response.title = module_name
#return dict(module_name=module_name, a=1)
def req():
""" RESTful CRUD controller """
resourcename = request.function # check again in case we're coming from index()
tablename = "%s_%s" % (prefix, resourcename)
table = db[tablename]
# Pre-processor
def prep(r):
response.s3.cancel = r.here()
if r.representation in shn_interactive_view_formats and r.method != "delete":
# Don't send the locations list to client (pulled by AJAX instead)
r.table.location_id.requires = IS_NULL_OR(IS_ONE_OF_EMPTY(db, "gis_location.id"))
#if r.method == "create" and not r.component:
# listadd arrives here as method=None
if not r.component:
table.datetime.default = request.utcnow
table.person_id.default = s3_logged_in_person()
# @ToDo Default the Organisation too
return True
response.s3.prep = prep
# Post-processor
def postp(r, output):
if r.representation in shn_interactive_view_formats:
#if r.method == "create" and not r.component:
# listadd arrives here as method=None
if r.method != "delete" and not r.component:
# Redirect to the Assessments tabs after creation
r.next = r.other(method="ritem", record_id=s3xrc.get_session(prefix, resourcename))
# Custom Action Buttons
if not r.component:
response.s3.actions = [
dict(label=str(T("Open")), _class="action-btn", url=str(URL(r=request, args=["[id]", "update"]))),
dict(label=str(T("Items")), _class="action-btn", url=str(URL(r=request, args=["[id]", "ritem"]))),
]
return output
response.s3.postp = postp
s3xrc.model.configure(table,
#listadd=False, #@todo: List add is causing errors with JS - FIX
editable=True)
return s3_rest_controller(prefix,
resourcename,
rheader=shn_rms_req_rheader)
def shn_rms_req_rheader(r):
""" Resource Header for Requests """
if r.representation == "html":
if r.name == "req":
req_record = r.record
if req_record:
_next = r.here()
_same = r.same()
try:
location = db(db.gis_location.id == req_record.location_id).select(limitby=(0, 1)).first()
location_represent = shn_gis_location_represent(location.id)
except:
location_represent = None
rheader_tabs = shn_rheader_tabs( r,
[(T("Edit Details"), None),
(T("Items"), "ritem"),
]
)
rheader = DIV( TABLE(
TR( TH( T("Message") + ": "),
TD(req_record.message, _colspan=3)
),
TR( TH( T("Time of Request") + ": "),
req_record.datetime,
TH( T( "Location") + ": "),
location_represent,
),
TR( TH( T("Priority") + ": "),
req_record.priority,
TH( T("Document") + ": "),
document_represent(req_record.document_id)
),
),
rheader_tabs
)
return rheader
return None
def ritem():
""" RESTful CRUD controller """
tablename = "%s_%s" % (prefix, resourcename)
table = db[tablename]
s3xrc.model.configure(table, insertable=False)
return s3_rest_controller(prefix, resourcename)
def store_for_req():
store_table = None
return dict(store_table = store_table)
|
stygstra/keras-contrib | keras_contrib/layers/wrappers.py | Python | mit | 94 | 0 | from keras.engine | import Layer
from keras.engine import InputSpec
from .. imp | ort backend as K
|
rajul/tvb-framework | tvb/core/entities/transient/graph_structures.py | Python | gpl-2.0 | 11,862 | 0.005142 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Lia Domide <lia.domide@codemart.ro>
.. moduleauthor:: Ionel Ortelecan <ionel.ortelecan@codemart.ro>
"""
from tvb.core.entities.storage import dao
MAX_SHAPE_SIZE = 50
MIN_SHAPE_SIZE = 10
NODE_DATATYPE_TYPE = "datatype"
NODE_OPERATION_TYPE = "operation"
NODE_OPERATION_GROUP_TYPE = "operationGroup"
class NodeData(object):
"""
Contains the meta-data that will be set on each GRAPH node.
"""
shape_size = None
shape_color = None
shape_type = None
node_type = None
node_entity_id = None
node_subtitle = None
def __init__(self, shape_size, shape_color, shape_type, node_type, node_entity_id, node_subtitle):
self.shape_size = shape_size
self.shape_color = shape_color
self.shape_type = shape_type
self.node_type = node_type
self.node_entity_id = node_entity_id
self.node_subtitle = node_subtitle
def to_json(self):
"""
Returns the JSON-ready representation of this NodeData instance.
"""
instance_json = {"$dim": self.shape_size,
"$color": self.shape_color,
"$type": self.shape_type,
"node_type": self.node_type,
"node_entity_id": self.node_entity_id,
"node_subtitle": self.node_subtitle}
return instance_json
@staticmethod
def build_node_for_datatype(datatype_id, node_subtitle, shape_size=MAX_SHAPE_SIZE, is_group=False):
shape = "circlesGroup" if is_group else "circle"
return NodeData(shape_size, "#83548B", shape, NODE_DATATYPE_TYPE, datatype_id, node_subtitle)
@staticmethod
def build_node_for_operation(operation, group_id=None):
if group_id:
entity_id = group_id
node_type = NODE_OPERATION_GROUP_TYPE
shape = "squaresGroup"
else:
entity_id = operation.id
node_type = NODE_OPERATION_TYPE
shape = "square"
return NodeData(MAX_SHAPE_SIZE, "#660033", shape, node_type, entity_id, str(operation.start_date))
class NodeStructure(object):
"""
Define the full structure of a graph NODE (including meta-data of type NodeData and node Adjiacences)
"""
node_gid = None
name = None
data = None
adjacencies = []
selected = False
def __init__(self, node_gid, node_name):
self.node_gid = node_gid
self.name = node_name
def to_json(self):
"""
Returns the JSON-ready representation of this NodeStructure instance.
"""
instance_json = {"id": self.node_gid,
"name": self.name,
"data": self.data.to_json() if self.data is not None else {},
"adjacencies": [{"nodeFrom": self.node_gid, "nodeTo": adj,
"data": {}} for adj in self.adjacencies]}
return instance_json
@staticmethod
def build_structure_for_datatype(datatype_gid):
datatype = dao.get_datatype_by_gid(datatype_gid)
is_group = dao.is_datatype_group(datatype_gid)
structure = NodeStructure(datatype_gid, datatype.type)
structure.data = NodeData.build_node_for_datatype(datatype.id, datatype.display_name, is_group=is_group)
return structure
@staticmethod
def build_structure_for_operation(operation):
algo = dao.get_algorithm_by_id(operation.fk_from_algo)
structure = NodeStructure(operation.gid, algo.name)
structure.data = NodeData.build_node_for_operation(operation)
return structure
@staticmethod
def build_structure_for_operation_group(operation_group_gid):
group = dao.get_operationgroup_by_gid(operation_group_gid)
operation = dao.get_operations_in_group(group.id, only_first_operation=True)
algo = dao.get_algorithm_by_id(operation.fk_from_algo)
structure = NodeStructure(operation_group_gid, algo.name)
structure.data = NodeData.build_node_for_operation(operation, group.id)
return structure
@staticmethod
def build_artificial_root_structure(adjacencies_nodes):
root_structure = NodeStructure("fakeRootNode", "fakeRootNode")
root_structure.data = NodeData.build_node_for_datatype("fakeRootNode", "Fake root",
shape_size=MAX_SHAPE_SIZE / 2)
root_structure.adjacencies = adjacencies_nodes
return root_structure
class GraphComponent():
"""
Class used for representing a single component of the graph.
One GraphComponent holds multiple lists of NodeStructure instances (for multiple layers).
A GraphComponent will have the foll | owing structure::
input_datatypes operation_parent
| |
| |
V V
[operation_parent] | [output_datatypes] * Currently Selected node
| |
| |
V V
output_datatypes in_operations
"""
input_datatypes = []
operation_parent = []
output_datatypes = []
in_operations = []
def __init__(self, dt_inputs, parent_op, dt_outputs, op_inputs):
self.input_datatypes = dt_inputs
self.operation_parent = parent_op
self.output_datatypes = dt_outputs
self.in_operations = op_inputs
class FullGraphStructure():
"""
This class contains information for the entire graph to be displayed in UI.
It holds a list of GraphComponent instances (e.g. multiple UPLOAD ops).
"""
graph_components = []
def __init__(self, components):
self.graph_components = components
self.fill_shape_size()
self.fill_all_graph_adjiacences()
def prepare_for_json(self):
"""
Returns a list of NodeStructure instances to be serialized for browser-client rendering.
"""
artificial_root_adj = []
for component in self.graph_components:
if len(component.input_datatypes):
artificial_root_adj.extend(self._get_nodes_gids(component.input_datatypes))
else:
artificial_root_adj.extend(self._get_nodes_gids(component.operation_parent))
result_to_serialize = [NodeStructure.build_artificial_root_structure(artificial_root_adj)]
for component in self.graph_components:
for level in [component.input_datatypes, component.operation_parent,
component. |
danielhanchen/sciblox | sciblox (v1)/sciblox.py | Python | mit | 74,277 | 0.041816 | #------------- Daniel Han-Chen 2017
#------------- https://github.com/danielhanchen/sciblox
#------------- SciBlox v0.02
#-------------
maxcats = 15
import warnings
warnings.filterwarnings("ignore")
true = True; TRUE = True
false = False; FALSE = False
import pip
def install(package): pip.main(['install', package])
#-----------------------------
try:
import pandas as pd, numpy as np, scipy, sklearn as sk, seaborn as sb
from copy import copy
from jupyterthemes import jtplot
import matplotlib.pyplot as plt
jtplot.style()
except:
try:
print("Installing packages... Please wait...")
if __name__ == '__main__':
install('pandas'); install('numpy'); install('scipy'); install('scikit-learn');
install('matplotlib'); install('seaborn'); install('lightgbm');
try: install('jupyterthemes');
except: pass;
try: install('sympy');
except: pass;
try:
install('libpython'); install('theano'); install('fancyimpute');
except: pass;
except: pass;
import pandas as pd, numpy as np, scipy, sklearn as sk, seaborn as sb
from copy import copy
import matplotlib.pyplot as plt
try:
from jupyterthemes import jtplot;
jtplot.style()
except: pass;
#-----------------------------
np.set_printoptions(suppress = True)
pd.set_option('display.max_rows', 10)
pd_colour = '#302f2f'
#-------------
#-------------
#-------------
#-------------
#------------------------------------ DATAFRAME METHODS ------------------------------------#
#-------------------- Display options and pandas methods --------------------#
def maxrows(x = 10): pd.set_option('display.max_rows', x)
def maxcat(x = 15): maxcats = x
def tabcolour(x = '#302f2f'): pd_colour = x
#-----------------------------
def percent(x):
if x <= 1: return x
else: return x/100
#-----------------------------
def table(x):
try: return pd.DataFrame(x)
except: return pd.DataFrame(list(x.items()))
def series(x):
try: return pd.Series(x)
except:
first = pd.Series(x[0])
if len(first)!=len(x): return pd.Series(T(x)[0])
else: return first
#-----------------------------
def istable(x): return (type(x) in [pd.DataFrame,pd.Series])*1
def isarray(x): return (type(x) in [np.array,np.ndarray,np.matrix])*1
#-----------------------------
def shape(x):
try: return x.shape
except: return len(x)
#-----------------------------
def head(x, n = 5):
if istable(x)==1: return x.head(n)
| else:
if len(x) > n: return x[:n]
else: return x
def tail(x, n = 5):
if istable(x)==1: return x.tail(n)
else:
if len(x) > n: return x[-n: | ]
else: return x
#-----------------------------
def sample(x, n = 5, ordered = False):
if n > len(x): g = len(x)
else: g = n
if istable(x)==1:
if ordered == False: return x.sample(g)
else: return x.iloc[[int(y*(len(x)/g)) for y in range(g)]]
else:
if ordered == False: return np.random.choice(x, g)
else: return np.array(x)[[int(y*(len(x)/g)) for y in range(g)]]
#-----------------------------
def columns(x):
try: return x.columns.tolist()
except: pass;
def index(x):
try: return x.index.tolist()
except: pass;
#-----------------------------
def reset(x, index = True, column = False, string = False, drop = False):
if index == True and column == False:
if drop == False: return x.reset_index()
else: return x.reset_index()[columns(x)]
else:
y = copy(x)
if type(x)==pd.Series: ss = 0
else: ss = shape(x)[1]
if string == True: y.columns = ["col"+str(y) for y in range(ss)]
else: y.columns = [y for y in range(ss)]
return y
#-----------------------------
def hcat(*args):
a = args[0]
if type(a)==pd.Series: a = table(a)
for b in args[1:]:
if type(a)==list:
if type(b)!=list: b = list(b)
a = a + b
elif isarray(a)==1:
if isarray(b)==0: b = array(b)
a = np.hstack((a,b))
else:
if type(b)!=pd.DataFrame: b = table(b)
a = pd.concat([a,b],1)
del b
return a
def vcat(*args):
a = args[0]
if type(a)==pd.Series: a = table(a)
elif type(a)==list: a = array(a)
for b in args[1:]:
if isarray(a)==1:
if isarray(b)==0: b = array(b)
a = np.vstack((a,b))
else:
if type(b)!=pd.DataFrame: b = table(b)
a = pd.concat([a,b],0)
del b
return a
#-----------------------------
def dtypes(x):
if type(x)==pd.Series:
types = x.dtype
if types==('O' or "string" or "unicode"): return 'obj'
elif types==("int64" or "uint8" or "uint16" or "uint32" or "uint64" or "int8" or "int32" or "int16"): return 'int'
elif types==('float64' or 'float16' or 'float32' or 'float128'): return 'float'
elif types=='bool': return 'bool'
else: return 'date'
else:
dfs = x.dtypes
for f in (dfs.index.tolist()):
dfs[f] = str(dfs[f])
if "int" in dfs[f]: dfs[f] = 'int'
elif "float" in dfs[f]: dfs[f] = "float"
elif "bool" in dfs[f]: dfs[f] = "bool"
elif "O" in dfs[f] or "obj" in dfs[f]: dfs[f] = "obj"
elif "date" in dfs[f]: dfs[f] = "date"
else: dfs[f] = "obj"
return dfs
def dtype(x): return dtypes(x)
def contcol(x):
try: return ((dtypes(x)=="int")|(dtypes(x)=="float")).index[(dtypes(x)=="int")|(dtypes(x)=="float")].tolist()
except: return np.nan
def conts(x):
if type(x) == pd.Series:
if dtype(x) in ["int","float"]: return x
else: return np.nan
else: return x[contcol(x)]
def objcol(x):
try: return (dtypes(x)=="obj").index[dtypes(x)=="obj"].tolist()
except: return np.nan
def objects(x):
if type(x) == pd.Series:
if dtype(x) == "obj": return x
else: return np.nan
else: return x[objcol(x)]
def objs(x): return objects(x)
def notobj(x): return exc(x, objcol(x))
def catcol(x):
if type(x) == pd.Series:
if iscat(x) == True: return x
else: return np.nan
else: return (iscat(x).index[iscat(x)]).tolist()
def classcol(x): return cats(x)
def cats(x): return x[catcol(x)]
def classes(x): return x[catcol(x)]
def iscat(x, cat = maxcats):
return ((dtypes(x)!='float')|(dtypes(x)!='int'))&(nunique(x)<=cat)
#-----------------------------
def nullcol(x): return (count(x)!=len(x)).index[count(x)!=len(x)].tolist()
def nacol(x): return nullcol(x)
def missingcol(x): return nullcol(x)
def notnull(x, row = 1, keep = None, col = 0):
if row!=1: axis = 1
elif col!=0: axis = 0
else: axis = 0
if keep is None:
try: return x.dropna(axis = axis)
except: return x.dropna()
else:
if keep < 1:
if axis==1: keep = len(x)*keep
else: keep = shape(x)[1]*keep
return x.dropna(axis = axis, thresh = keep)
def isnull(x, row = 1, keep = None, col = 0):
if row!=1 or col!=0: axis = 0
else: axis = 1
if keep is None: miss = missing(x, row = axis)!=0
else:
if axis == 1:
if keep < 1: miss = missing(x, row = axis)<=shape(x)[1]*keep
else: miss = missing(x, row = axis)<=keep
else:
if keep < 1: miss = missing(x, row = axis)<=len(x)*keep
else: miss = missing(x, row = axis)<=keep
try: return x.iloc[miss.index[miss]]
except: return x[pd.isnull(x)==True]
def dropna(x, col = None):
if col is None: return x.dropna()
else:
if type(col)!=list: col = list(col)
return x.dropna(subset = col)
#-----------------------------
def diff(want, rem):
w = copy(want)
for j in w:
if j in rem: w.remove(j)
for j in rem:
if j in w: w.remove(j)
return w
def exc(x, l):
if type(l) == str: l = [l]
return x[diff(columns(x),l)]
def drop(x, l): return exc(x, l), x[l]
def pop(x, l): return exc(x, l), x[l]
def append(l, r):
g = copy(l);
if type(g)!= list: g = |
bearstech/ansible | lib/ansible/modules/database/proxysql/proxysql_global_variables.py | Python | gpl-3.0 | 9,148 | 0.000656 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: proxysql_global_variables
version_added: "2.3"
author: "Ben Mildren (@bmildren)"
short_description: Gets or sets the proxysql global variables.
description:
- The M(proxysql_global_variables) module gets or sets the proxysql global
variables.
options:
variable:
description:
- Defines which variable should be returned, or if I(value) is specified
which variable should be updated.
required: True
value:
description:
- Defines a value the variable specified using I(variable) should be set
to.
save_to_disk:
description:
- Save mysql host config to sqlite db on disk to persist the
configuration.
default: True
load_to_runtime:
description:
- Dynamically load mysql host config to runtime memory.
default: True
login_user:
description:
- The username used to authenticate to ProxySQL admin interface.
default: None
login_password:
description:
- The password used to authenticate to ProxySQL admin interface.
default: None
login_host:
description:
- The host used to connect to ProxySQL admin interface.
default: '127.0.0.1'
login_port:
description:
- The port used to connect to ProxySQL admin interface.
default: 6032
config_file:
description:
- Specify a config file from which login_user and login_password are to
be read.
default: ''
'''
EXAMPLES = '''
---
# This example sets the value of a variable, saves the mysql admin variables
# config to disk, and dynamically loads the mysql admin variables config to
# runtime. It uses supplied credentials to connect to the proxysql admin
# interface.
- proxysql_global_variables:
login_user: 'admin'
login_password: 'admin'
variable: 'mysql-max_connections'
value: 4096
# This example gets the value of a variable. It uses credentials in a
# supplied config file to connect to the proxysql admin interface.
- proxysql_global_variables:
config_file: '~/proxysql.cnf'
variable: 'mysql-default_query_delay'
'''
RETURN = '''
stdout:
description: Returns the mysql variable supplied with it's associted value.
returned: Returns the current variable and value, or the newly set value
for the variable supplied..
type: dict
"sample": {
"changed": false,
"msg": "The variable is already been set to the supplied value",
"var": {
"variable_name": "mysql-poll_timeout",
"variable_value": "3000"
}
}
'''
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect
from ansible.module_utils._text import to_native
try:
import MySQLdb
import MySQLdb.cursors
except ImportError:
MYSQLDB_FOUND = False
else:
MYSQLDB_FOUND = True
# ===========================================
# proxysql module specific support methods.
#
def perform_checks(module):
if module.params["login_port"] < 0 \
or module.params["login_port"] > 65535:
module.fail_json(
msg="login_port must be a valid unix port number (0-65535)"
)
if not MYSQLDB_FOUND:
module.fail_json(
msg="the python mysqldb module is required"
)
def save_config_to_disk(variable, cursor):
if variable.startswith("admin"):
cursor.execute("SAVE ADMIN VARIABLES TO DISK")
else:
cursor.execute("SAVE MYSQL VARIABLES TO DISK")
return True
def load_config_to_runtime(variable, cursor):
if variable.startswith("admin"):
cursor.execute("LOAD ADMIN VARIABLES TO RUNTIME")
else:
cursor.execute("LOAD MYSQL VARIABLES TO RUNTIME")
return True
def check_config(variable, value, cursor):
query_string = \
"""SELECT count(*) AS `variable_count`
FROM global_variables
WHERE variable_name = %s and variable_value = %s"""
query_data = \
[variable, value]
cursor.execute(query_string, query_data)
check_count = cursor.fetchone()
return (int(check_count['variable_count']) > 0)
def get_config(variable, cursor):
query_string = \
"""SELECT *
FROM global_variables
WHERE variable_name = %s"""
query_data = \
[variable, ]
cursor.execute(query_string, query_data)
row_count = cursor.rowcount
resultset = cursor.fetchone()
if row_count > 0:
return resultset
else:
return False
def set_config(variable, value, cursor):
query_string = \
"""UPDATE global_variables
SET variable_value = %s
WHERE variable_name = %s"""
query_data = \
[value, variable]
cursor.execute(query_string, query_data)
return True
def manage_config(variable, save_to_disk, load_to_runtime, cursor, state):
if state:
if save_to_disk:
save_config_to_disk(variable, cursor)
if load_to_runtime:
load_config_to_runtime(variable, cursor)
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None, type='str'),
login_password=dict(default=None, no_log=True, type='str'),
login_host=dict(default="127.0.0.1"),
login_unix_socket=dict(default=None),
login_port=dict(default=6032, type='int'),
config_file=dict(default="", type='path'),
variable=dict(required=True, type='str'),
value=dict(),
save_to_disk=dict(default=True, type='bool'),
load_to_runtime=dict(default=True, type='bool')
),
supports_check_mode=True
)
perform_checks(module)
login_user = module.params["login_user"]
login_password = module.params["login_password"]
config_file = module.params["config_file"]
variable = module.params["variable"]
value = module.params["value"]
save_to_disk = module.params["save_to_disk"]
load_to_runtime = module.params["load_to_runtime"]
cursor = None
try:
cursor = mysql_connect(module,
login_user,
login_password,
config_file,
cursor_class=MySQLdb.cursors.DictCursor)
except MySQLdb.Error as e:
module.fail_json(
msg="unable to connect to ProxySQL Admin Module.. %s" % to_native(e)
)
result = {}
if not value:
try:
if get_config(variable, cursor):
result['changed'] = False
result['msg'] = \
"Returned the variable and it's current value"
result['var'] = get_config(variable, cursor)
else:
module.fail_json(
msg="The variable \"%s\" was not found" % variable
)
except MySQLdb.Error as e:
module.fail_json(
msg="unable to get config.. %s" % to_native(e)
)
else:
try:
if get_config(variable, cursor):
if not check_config(variable, value, cursor):
if not module.check_mode:
result['changed'] = set_config(variable, value, cursor)
result['msg'] = \
"Set the variable to the supplied value"
result['var'] = get_config(var | iable, cursor)
manage_config(variable,
save_to_disk,
load_to_runtime,
| cursor,
|
vienin/vlaunch | tools/createvm.py | Python | gpl-2.0 | 5,196 | 0.013857 | #!/usr/bin/env python
# UFO-launcher - A multi-platform virtual machine launcher for the UFO OS
#
# Copyright (c) 2008-2009 Agorabox, Inc.
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import sys
import xml.dom.minidom as dom
import uuid as uuid_lib
import os
from optparse import OptionParser
import logging
import shutil
def set_xml_attr (xmlfile, element, attr, value):
xml = dom.parse(xmlfile)
element = xml.getElementsByTagName(element)[0]
element.setAttribute(attr, value)
open(xmlfile, 'w').write(xml.toxml())
# CREATE VM SCRIPT
usage = "%prog -v vm_name -o vbox_user_home (full path) [ -s WIN|MAC ][ -f virtual_disk_file -t virtual_disk_format (VDI|VMDK)]"
description = "Create a virtual box user home with specified vm"
version="%prog 0.1"
# Define options
parser = OptionParser(usage=usage, description=description, version=version)
parser.add_option("-o", "--vbox-user-home", dest="home", default="",
help="virtual box home directory")
parser.add_option("-v", "--virtual-machine", dest="vm", default="",
help="virtual machine name")
parser.add_option("-f", "--disk-file", dest="hd", default="",
help="virtual disk file path")
parser.add_option("-t", "--disk-format", dest="type", default="VMDK",
help="virtual disk format")
parser.add_option("-s", "--os", dest="os", default="LIN",
help="target os type")
(options, args) = parser.parse_args()
if options.vm == "" or options.home == "":
parser.error("You must specify a vbox uer home and the machine name")
# redirect vbox user home
os.environ["VBOX_USER_HOME"] = options.home
# create vm
print ("VBoxManage createvm -name " + options.vm + " -ostype Fedora -register")
os.system("VBoxManage createvm -name " + options.vm + " -ostype Fedora -register")
os.system("VBoxManage modifyvm " + options.vm + " -ioapic on -boot1 disk -boot2 none -boot3 none -vram 32 -memory 1024 -nictype1 82540EM -biosbootmenu disabled -audio pulse") # -usb on -usbehci on
# add disk
if options.hd != "":
os.system("VBoxManage openmedium disk " + options.hd)
os.system("VBoxManage modifyvm " + options.vm + " -hda " + options.hd)
# Update virtual disk path from absolute to relative
os.mkdir(os.path.join(options.home, "HardDisks"))
#shutil.copy(options.hd, os.path.join(options.home, "HardDisks"))
set_xml_attr(os.path.join(options.home, "VirtualBox.xml"), "HardDisk",
"location", os.path.join("HardDisks", os.path.basename(options.hd)))
set_xml_attr(os.path.join(options.home, "VirtualBox.xml"), "HardDisk",
"format", os.path.basename(options.type))
# setting virtual box extradatas
os.system('VBoxManage setextradata global "GUI/MaxGuestResolution" "any"')
os.system('VBoxMana | ge setextradata global "GUI/Input/AutoCapture" "true"')
os.system('VBoxManage setextradata global "GUI/SuppressMessages" ",remindAboutAutoCapture,confirmInputCapture,remindAboutMouseIntegrationOn,remindAboutMouseIntegrationOff,remindAboutInaccessibleMedia,remindAboutWrongColorDepth,confirmGoingFullscreen"')
os.system('VBoxManage setextradata global "GUI/TrayIcon/Enabled" "false"')
os.system('VBox | Manage setextradata global "GUI/UpdateCheckCount" "2"')
os.system('VBoxManage setextradata global "GUI/UpdateDate" "never"')
# setting virtual machine extradatas
os.system('VBoxManage setextradata ' + options.vm + ' "GUI/SaveMountedAtRuntime" "false"')
os.system('VBoxManage setextradata ' + options.vm + ' "GUI/Fullscreen" "on"')
os.system('VBoxManage setextradata ' + options.vm + ' "GUI/Seamless" "off"')
os.system('VBoxManage setextradata ' + options.vm + ' "GUI/LastCloseAction" "powerOff"')
os.system('VBoxManage setextradata ' + options.vm + ' "GUI/AutoresizeGuest" "on"')
# workaround until we deploy multi-platform build infrastructure
if options.os == "WIN":
set_xml_attr(os.path.join(options.home, "Machines", options.vm, options.vm + ".xml"), "AudioAdapter",
"driver", "DirectSound")
set_xml_attr(os.path.join(options.home, "Machines", options.vm, options.vm + ".xml"), "VirtualBox",
"version", "1.7-windows")
set_xml_attr(os.path.join(options.home, "VirtualBox.xml"), "VirtualBox",
"version", "1.7-windows")
if options.os == "MAC":
set_xml_attr(os.path.join(options.home, "Machines", options.vm, options.vm + ".xml"), "AudioAdapter",
"driver", "CoreAudio")
set_xml_attr(os.path.join(options.home, "Machines", options.vm, options.vm + ".xml"), "VirtualBox",
"version", "1.7-macosx")
set_xml_attr(os.path.join(options.home, "VirtualBox.xml"), "VirtualBox",
"version", "1.7-macosx")
|
tdickers/mitmproxy | mitmproxy/console/grideditor.py | Python | mit | 19,995 | 0.002001 | from __future__ import absolute_import, print_function, division
import copy
import os
import re
import urwid
from mitmproxy import filt
from mitmproxy import script
from mitmproxy import utils
from mitmproxy.console import common
from mitmproxy.console import signals
from netlib.http import cookies
from netlib.http import user_agents
FOOTER = [
('heading_key', "enter"), ":edit ",
('heading_key', "q"), ":back ",
]
FOOTER_EDITING = [
('heading_key', "esc"), ":stop editing ",
]
class TextColumn:
subeditor = None
def __init__(self, heading):
self.heading = heading
def text(self, obj):
return SEscaped(obj or "")
def blank(self):
return ""
def keypress(self, key, editor):
if key == "r":
if editor.walker.get_current_value() is not None:
signals.status_prompt_path.send(
self,
prompt = "Read file",
callback = editor.read_file
)
elif key == "R":
if editor.walker.get_current_value() is not None:
signals.status_prompt_path.send(
editor,
prompt = "Read unescaped file",
callback = editor.read_file,
args = (True,)
)
elif key == "e":
o = editor.walker.get_current_value()
if o is not None:
n = editor.master.spawn_editor(o.encode("string-escape"))
n = utils.clean_hanging_newline(n)
editor.walker.set_current_value(n, False)
editor.walker._modified()
elif key in ["enter"]:
editor.walker.start_edit()
else:
return key
class SubgridColumn:
def __init__(self, headin | g, subeditor):
self.heading = heading
self.subeditor = subeditor
def text(self, obj):
p = cookies._format_pairs(obj, sep="\n")
| return urwid.Text(p)
def blank(self):
return []
def keypress(self, key, editor):
if key in "rRe":
signals.status_message.send(
self,
message = "Press enter to edit this field.",
expire = 1000
)
return
elif key in ["enter"]:
editor.master.view_grideditor(
self.subeditor(
editor.master,
editor.walker.get_current_value(),
editor.set_subeditor_value,
editor.walker.focus,
editor.walker.focus_col
)
)
else:
return key
class SEscaped(urwid.WidgetWrap):
def __init__(self, txt):
txt = txt.encode("string-escape")
w = urwid.Text(txt, wrap="any")
urwid.WidgetWrap.__init__(self, w)
def get_text(self):
return self._w.get_text()[0]
def keypress(self, size, key):
return key
def selectable(self):
return True
class SEdit(urwid.WidgetWrap):
def __init__(self, txt):
txt = txt.encode("string-escape")
w = urwid.Edit(edit_text=txt, wrap="any", multiline=True)
w = urwid.AttrWrap(w, "editfield")
urwid.WidgetWrap.__init__(self, w)
def get_text(self):
return self._w.get_text()[0].strip()
def selectable(self):
return True
class GridRow(urwid.WidgetWrap):
def __init__(self, focused, editing, editor, values):
self.focused, self.editing, self.editor = focused, editing, editor
errors = values[1]
self.fields = []
for i, v in enumerate(values[0]):
if focused == i and editing:
self.editing = SEdit(v)
self.fields.append(self.editing)
else:
w = self.editor.columns[i].text(v)
if focused == i:
if i in errors:
w = urwid.AttrWrap(w, "focusfield_error")
else:
w = urwid.AttrWrap(w, "focusfield")
elif i in errors:
w = urwid.AttrWrap(w, "field_error")
self.fields.append(w)
fspecs = self.fields[:]
if len(self.fields) > 1:
fspecs[0] = ("fixed", self.editor.first_width + 2, fspecs[0])
w = urwid.Columns(
fspecs,
dividechars = 2
)
if focused is not None:
w.set_focus_column(focused)
urwid.WidgetWrap.__init__(self, w)
def get_edit_value(self):
return self.editing.get_text()
def keypress(self, s, k):
if self.editing:
w = self._w.column_widths(s)[self.focused]
k = self.editing.keypress((w,), k)
return k
def selectable(self):
return True
class GridWalker(urwid.ListWalker):
"""
Stores rows as a list of (rows, errors) tuples, where rows is a list
and errors is a set with an entry of each offset in rows that is an
error.
"""
def __init__(self, lst, editor):
self.lst = [(i, set([])) for i in lst]
self.editor = editor
self.focus = 0
self.focus_col = 0
self.editing = False
def _modified(self):
self.editor.show_empty_msg()
return urwid.ListWalker._modified(self)
def add_value(self, lst):
self.lst.append((lst[:], set([])))
self._modified()
def get_current_value(self):
if self.lst:
return self.lst[self.focus][0][self.focus_col]
def set_current_value(self, val, unescaped):
if not unescaped:
try:
val = val.decode("string-escape")
except ValueError:
signals.status_message.send(
self,
message = "Invalid Python-style string encoding.",
expire = 1000
)
return
errors = self.lst[self.focus][1]
emsg = self.editor.is_error(self.focus_col, val)
if emsg:
signals.status_message.send(message = emsg, expire = 1)
errors.add(self.focus_col)
else:
errors.discard(self.focus_col)
self.set_value(val, self.focus, self.focus_col, errors)
def set_value(self, val, focus, focus_col, errors=None):
if not errors:
errors = set([])
row = list(self.lst[focus][0])
row[focus_col] = val
self.lst[focus] = [tuple(row), errors]
self._modified()
def delete_focus(self):
if self.lst:
del self.lst[self.focus]
self.focus = min(len(self.lst) - 1, self.focus)
self._modified()
def _insert(self, pos):
self.focus = pos
self.lst.insert(
self.focus,
[
[c.blank() for c in self.editor.columns], set([])
]
)
self.focus_col = 0
self.start_edit()
def insert(self):
return self._insert(self.focus)
def add(self):
return self._insert(min(self.focus + 1, len(self.lst)))
def start_edit(self):
col = self.editor.columns[self.focus_col]
if self.lst and not col.subeditor:
self.editing = GridRow(
self.focus_col, True, self.editor, self.lst[self.focus]
)
self.editor.master.loop.widget.footer.update(FOOTER_EDITING)
self._modified()
def stop_edit(self):
if self.editing:
self.editor.master.loop.widget.footer.update(FOOTER)
self.set_current_value(self.editing.get_edit_value(), False)
self.editing = False
self._modified()
def left(self):
self.focus_col = max(self.focus_col - 1, 0)
self._modified()
def right(self):
self.focus_col = min(self.focus_col + 1, len(self.editor.columns) - 1)
self._modified()
def tab_next(self):
self.stop_edit()
if self.focus_col < len(self.editor.columns) - 1:
self.focus_col += 1
elif self.focus != len(self.lst) - 1: |
aakechin/BRCA-analyzer | install.py | Python | gpl-3.0 | 2,506 | 0.034318 | # This script configurates BRCA-analyzer
import argparse,os
import subprocess as sp
thisDir=os.path.dirname(os.path.realpath(__file__))+'/'
par=argparse.ArgumentParser(description='This script configures BRCA-analyzer')
par.add_argument('--bwa','-bwa',dest='bwaDir',type=str,help='destination of BWA. If it can be started with command bwa, type 0 (Default: 0)',default='0',required=True)
par.add_argument('--samtools','-sam',dest='samDir',type=str,help='destination of samtools. If it can be started with command samtools, type 0 (Default: 0)',default='0',required=True)
##par.add_argument('--bcftools','-bcf',dest='bcfDir',type=str,help='destination of bcftools. If it can be started with command bcftools, type 0 (Default: 0)',default='0',required=True)
par.add_argument('--picard','-picard',dest='picardDir',type=str,help='destination of picard.jar (version 2.0.1). For example, ~/picard-2.0.1/dist/',required=True)
par.add_argument('--gatk','-gatk',dest='gatkDir',type=str,help='destination of GenomeAnalysisTK.jar (version 3.6). For example, ~/GenomeAnalysisTK-3.6/',required=True)
par.add_argument('--snpeff','-snpeff',dest='snpeffDir',type=str,help='destination of snpEff.jar. For example, ~/snpEff/',required=True)
par.add_argument('--annovar','-annovar',dest='annovarDir',type=str,help='destination of annovar. For example, ~/annovar/',required=True)
par.add_argument('--cutprimers','-cutprimers',dest='cutprimersDir',type=str,help='destination of cutPrimers. For example, ~/cutPrimers/. It is not required. But if you are going to trim primer sequences from BAM-file, we recommend to use it',required=False)
args=par.parse_args()
if args.bwaDir=='0':
args.bwaDir=''
elif args.bwaDir[-1]!='/':
args.bwaDir+='/'
if args.samDir=='0':
args.samDir=''
elif args.samDir[-1]!='/':
args.samDir+='/'
##if args.bcfDir=='0':
## args.bcfDir=''
##elif args.bcfDir[-1]!='/':
## args.bcfDir+='/'
if args.picardDir[-1]!='/':
args.picardDir+='/'
if args.gatkDir[-1]!='/':
args.gatkDir+='/'
if args.snpeffDir[-1]!='/':
args.snpeffDir+='/'
if args.annovarDir[-1]!='/':
args.annov | arDir+='/'
if args.cutprimersDir and args.cutprimersDir[-1]!='/':
args.cutprimersDir+='/'
file=open(thisDir+'config.txt','w')
##file.write('\n'.join([args | .bwaDir,args.samDir,args.bcfDir,args.picardDir,args.gatkDir,args.snpeffDir,args.annovarDir]))
file.write('\n'.join([args.bwaDir,args.samDir,args.picardDir,args.gatkDir,args.snpeffDir,args.annovarDir,args.cutprimersDir]))
file.close()
|
schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Screens/EpgSelection.py | Python | gpl-2.0 | 58,072 | 0.026708 | from time import localtime, time, strftime, mktime
from enigma import eServiceReference, eTimer, eServiceCenter, ePoint
from Screen import Screen
from Screens.HelpMenu import HelpableScreen
from Components.About import about
from Components.ActionMap import HelpableActionMap, HelpableNumberActionMap
from Components.Button import Button
from Components.config import config, configfile, ConfigClock
from Components.EpgList import EPGList, EPGBouquetList, TimelineText, EPG_TYPE_SINGLE, EPG_TYPE_SIMILAR, EPG_TYPE_MULTI, EPG_TYPE_ENHANCED, EPG_TYPE_INFOBAR, EPG_TYPE_INFOBARGRAPH, EPG_TYPE_GRAPH, MAX_TIMELINES
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.ServiceEvent import ServiceEvent
from Components.Sources.Event import Event
from Components.UsageConfig import preferredTimerPath
from Screens.TimerEdit import TimerSanityConflict
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.PictureInPicture import PictureInPicture
from Screens.Setup import Setup
from TimeDateInput import TimeDateInput
from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT
from TimerEntry import TimerEntry, InstantRecordTimerEntry
from ServiceReference import ServiceReference
mepg_config_initialized = False
# PiPServiceRelation installed?
try:
from Plugins.SystemPlugins.PiPServiceRelation.plugin import getRelationDict
plugin_PiPServiceRelation_installed = True
except:
plugin_PiPServiceRelation_installed = False
class EPGSelection(Screen, HelpableScreen):
EMPTY = 0
ADD_TIMER = 1
REMOVE_TIMER = 2
ZAP = 1
def __init__(self, session, service = None, zapFunc = None, eventid = None, bouquetChangeCB=None, serviceChangeCB = None, EPGtype = None, StartBouquet = None, StartRef = None, bouquets = None):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.zapFunc = zapFunc
self.serviceChangeCB = serviceChangeCB
self.bouquets = bouquets
graphic = False
if EPGtype == 'single':
self.type = EPG_TYPE_SINGLE
elif EPGtype == 'infobar':
self.type = EPG_TYPE_INFOBAR
elif EPGtype == 'enhanced':
self.type = EPG_TYPE_ENHANCED
elif EPGtype == 'graph':
self.type = EPG_TYPE_GRAPH
if config.epgselection.graph_type_mode.value == "graphics":
graphic = True
elif EPGtype == 'infobargraph':
self.type = EPG_TYPE_INFOBARGRAPH
if config.epgselection.infobar_type_mode.value == "graphics":
graphic = True
elif EPGtype == 'multi':
self.type = EPG_TYPE_MULTI
else:
self.type = EPG_TYPE_SIMILAR
if not self.type == EPG_TYPE_SINGLE:
self.StartBouquet = StartBouquet
self.StartRef = StartRef
self.servicelist = None
self.longbuttonpressed = False
self.ChoiceBoxDialog = None
self.ask_time = -1
self.closeRecursive = False
self.eventviewDialog = None
self.eventviewWasShown = False
self.currch = None
self.session.pipshown = False
self.cureventindex = None
if plugin_PiPServiceRelation_installed:
self.pipServiceRelation = getRelationDict()
else:
self.pipServiceRelation = {}
self.zapnumberstarted = False
self.NumberZapTimer = eTimer()
self.NumberZapTimer.callback.append(self.dozumberzap)
self.NumberZapField = None
self.CurrBouquet = None
self.CurrService = None
self["number"] = Label()
self["number"].hide()
self['Service'] = ServiceEvent()
self['Event'] = Event()
self['lab1'] = Label(_('Please wait while gathering data...'))
self.key_green_choice = self.EMPTY
self['key_red'] = Button(_('IMDb Search'))
self['key_green'] = Button(_('Add Timer'))
self['key_yellow'] = Button(_('EPG Search'))
self['key_blue'] = Button(_('Add AutoTimer'))
self['dialogactions'] = HelpableActionMap(self, 'WizardActions',
{
'back': (self.closeChoiceBoxDialog, _('Close dialog')),
}, -1)
self['dialogactions'].csel = self
self["dialogactions"].setEnabled(False)
self['okactions'] = HelpableActionMap(self, 'OkCancelActions',
{
'cancel': (self.closeScreen, _('Exit EPG')),
'OK': (self.OK, _('Zap to channel (setup in menu)')),
'OKLong': (self.OKLong, _('Zap to channel and close (setup in menu)'))
}, -1)
self['okactions'].csel = self
self['colouractions'] = HelpableActionMap(self, 'ColorActions',
{
'red': (self.redButtonPressed, _('IMDB search for current event')),
'redlong': (self.redlongButtonPressed, _('Sort EPG List')),
'green': (self.greenButtonPressed, _('Add/Remove timer for current event')),
'yellow': (self.yellowButtonPressed, _('Search for similar events')),
'greenlong': (self.showTimerList, _('Show Timer List')),
'blue': (self.blueButtonPressed, _('Add a auto timer for current event')),
'bluelong': (self.blueButtonPressedLong, _('Show AutoTimer List'))
}, -1)
self['colouractions'].csel = self
self['recordingactions'] = HelpableActionMap(self, 'InfobarInstantRecord',
{
'ShortRecord': (self.recButtonPressed, _('Add a record timer for current event')),
'LongRecord': (self.reclongButtonPressed, _('Add a zap timer for current event'))
}, -1)
self['recordingactions'].csel = self
if self.type == EPG_TYPE_SIMILAR:
self.currentService = service
self.eventid = eventid
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
elif self.type == EPG_TYPE_SINGLE:
self.currentService = ServiceReference(service)
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'info': (self.Info, _('Show detailed event info')),
'epg': (self.Info, _('Show detailed event info')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevPage, _('Move up a page')),
'right': (self.nextPage, _('Move down a page')),
'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
elif self.type == EPG_TYPE_INFOBAR or self.type == EPG_TYPE_ENHANCED:
if self.type == EPG_TYPE_INFOBAR:
self.skinName = 'QuickEPG'
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'nextService': (self.nextPage, _('Move down a page')),
'prevService': (self.prevPage, _('Move up a page')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = se | lf
self['epgcursoractions'] = HelpableActionMap(self, 'DirectionActions',
{
'left': (self.prevService, _('Goto previous channel')),
'right': (self.nextService, _('Goto next channel')),
| 'up': (self.moveUp, _('Goto previous channel')),
'down': (self.moveDown, _('Goto next channel'))
}, -1)
self['epgcursoractions'].csel = self
elif self.type == EPG_TYPE_ENHANCED:
self['epgactions'] = HelpableActionMap(self, 'EPGSelectActions',
{
'nextBouquet': (self.nextBouquet, _('Goto next bouquet')),
'prevBouquet': (self.prevBouquet, _('Goto previous bouquet')),
'nextService': (self.nextService, _('Goto next channel')),
'prevService': (self.prevService, _('Goto previous channel')),
'input_date_time': (self.enterDateTime, _('Goto specific data/time')),
'info': (self.Info, _('Show detailed event info')),
'infolong': (self.InfoLong, _('Show single epg for current channel')),
'menu': (self.createSetup, _('Setup menu'))
}, -1)
self['epgactions'].csel = self
self['epgcursoractions'] = HelpableActionMap(sel |
SELO77/seloPython | 3.X/ex/timeEx.py | Python | mit | 198 | 0.030303 | from time | import time
timeShow = time()
print(timeShow)
print(type(range(0,5)))
# for item in range(0,5):
# print(item)
for index,item in enumerate(range(0,5)):
print(index)
print(item)
| |
LovingYoung/xdom-mn | xdommn/data.py | Python | mit | 1,135 | 0.000881 | class SingletonType(type):
def __call__(cls, *args, **kwargs):
try:
return cls.__instance
except AttributeError:
cls.__instance = super(SingletonType, cls).__call__(
*args, **kwargs)
return cls.__instance
class Data(object):
__metaclass__ = SingletonType
def __init__(self, | controlle | rs=None):
if controllers is not None:
self.controllers = controllers
else:
self.controllers = dict()
self.name_map = dict()
self.reverse_name_map = dict()
self.name_map_index = 1
def getNextName(self, node_name, prefix):
name = "%s%d" % (prefix, self.name_map_index)
self.name_map[node_name] = name
self.reverse_name_map[name] = node_name
self.name_map_index += 1
return name
def getBackEndName(self, node_name):
return self.name_map[node_name]
def getFrontEndName(self, base_name):
return self.reverse_name_map[base_name]
def addSameName(self, name):
self.name_map[name] = name
self.reverse_name_map[name] = name |
hasura/quickstart-docker-git | python-flask/app/conf/gunicorn_config.py | Python | mit | 275 | 0.003636 | impo | rt os
import multiprocessing
bind = "0.0.0.0:8080"
workers = (multiprocessing.cpu_count() * 2) + 1
accesslog = "-"
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
loglevel = "debug"
capture_output = True
enable_stdio_inheritan | ce = True
|
lucienfostier/gaffer | python/GafferUI/ScrolledContainer.py | Python | bsd-3-clause | 5,893 | 0.034787 | ##########################################################################
#
# Copyright (c) 2011, John Haddon. All rights reserved.
# Copyright (c) 2011-2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import GafferUI
from Qt import QtCore
from Qt import QtGui
from Qt import QtWidgets
class ScrolledContainer( GafferUI.ContainerWidget ) :
ScrollMode = GafferUI.Enums.ScrollMode
def __init__( self, horizontalMode=GafferUI.ScrollMode.Automatic, verticalMode=GafferUI.ScrollMode.Automatic, borderWidth=0, **kw ) :
GafferUI.ContainerWidget.__init__( self, _ScrollArea(), **kw )
self._qtWidget().setViewportMargins( borderWidth, borderWidth, borderWidth, borderWidth )
self._qtWidget().setWidgetResizable( True )
self.setHorizontalMode( horizontalMode )
self.setVerticalMode( verticalMode )
self.__child = None
def addChild( self, child ) :
if self.getChild() is not None :
raise Exception( "ScrolledContainer can only hold one child" )
self.setChild( child )
def removeChild( self, child ) :
assert( child is self.__child )
child._qtWidget().setParent( None )
child._applyVisibility()
self.__child = None
def setChild( self, child ) :
if self.__child :
self.removeChild( self.__child )
if child is not None :
oldParent = child.parent()
if oldParent is not None :
oldParent.removeChild( child )
self._qtWidget().setWidget( child._qtWidget() )
child._applyVisibility()
self.__child = child
def getChild( self ) :
return self.__child
def setHorizontalMode( self, mode ) :
self._qtWidget().setHorizon | talScrollBarPolicy( GafferUI.ScrollMode._toQt( mode ) )
def getHorizontalMode( self ) :
p = self._qtWidget().horizontalScrollBarPolicy()
return GafferUI.ScrollMode._fromQt( p[0] )
def setVerticalMode( self, mode ) :
self._qtWidget().setVerticalScrollBarP | olicy( GafferUI.ScrollMode._toQt( mode ) )
def getVerticalMode( self ) :
p = self._qtWidget().verticalScrollBarPolicy()
return GafferUI.ScrollMode._fromQt( p[1] )
def _revealDescendant( self, descendant ) :
self._qtWidget().ensureWidgetVisible( descendant._qtWidget() )
# Private implementation - a QScrollArea derived class which is a bit more
# forceful about claiming size - it always asks for enough to completely show
# the contained widget.
class _ScrollArea( QtWidgets.QScrollArea ) :
def __init__( self ) :
QtWidgets.QScrollArea.__init__( self )
self.__marginLeft = 0
self.__marginRight = 0
self.__marginTop = 0
self.__marginBottom = 0
def setWidget( self, widget ) :
QtWidgets.QScrollArea.setWidget( self, widget )
widget.installEventFilter( self )
def setViewportMargins( self, left, top, right, bottom ) :
QtWidgets.QScrollArea.setViewportMargins( self, left, top, right, bottom )
self.__marginLeft = left
self.__marginRight = right
self.__marginTop = top
self.__marginBottom = bottom
def sizeHint( self ) :
w = self.widget()
if not w :
return QtWidgets.QScrollArea.sizeHint( self )
wSize = w.sizeHint()
width = self.__marginLeft + self.__marginRight + wSize.width()
if self.verticalScrollBarPolicy()==QtCore.Qt.ScrollBarAlwaysOn :
width += self.verticalScrollBar().sizeHint().width()
height = self.__marginTop + self.__marginBottom + wSize.height()
if self.horizontalScrollBarPolicy()==QtCore.Qt.ScrollBarAlwaysOn :
height += self.horizontalScrollBar().sizeHint().height()
return QtCore.QSize( width, height )
def eventFilter( self, widget, event ) :
if widget is self.widget() and isinstance( event, QtGui.QResizeEvent ) :
# Ask for our geometry to be recalculated if possible. This allows
# us to expand and contract with our child.
self.updateGeometry()
# I don't know why this is necessary. If it's removed then when the
# child widget resizes and the ScrolledContainer is resized up to fit,
# the scroll bar flickers on and off briefly. This can be seen in the
# OpDialogue with any op with collapsible parameter sections. Ideally
# we would find a better fix, or at least understand this one.
while widget is not None :
if widget.layout() is not None :
widget.layout().invalidate()
widget = widget.parent()
return False
|
igemsoftware/SYSU-Software2013 | project/Python27_32/web/encrypt.py | Python | mit | 2,590 | 0.028958 | #coding=utf-8
##
# @file encrypt.py
# @brief tools for encrypting
# @author Jiexin Guo
# @version 1.0
# @date 2013-07-28
# @copyright 2013 SYSU-Software. All rights reserved.
# This project is released under MIT License.
import rsa
import os,sys
import binascii
import hashlib
base = [str(x) for x in range(10)] + [ chr(x) for x in range(ord('A'),ord('A')+6)]
# --------------------------------------------------------------------------
##
# @brief get the SHA1 digest of a string
#
# @param pwd the string need to get its SHA1
#
# @returns return the SHA1 digest of a string
#
# --------------------------------------------------------------------------
def getPasswordSHA1(pwd):
if len(pwd)!=40:
m = hashlib.sha1()
m.update(pwd)
return m.hexdigest()
else:
return pwd
# --------------------------------------------------------------------------
##
# @brief get a string of number to its hex value string
#
# @param string_num string of number
#
# @returns return the hex string
#
# --------------------------------------------------------------------------
def dec2hex(string_num):
num = int(string_num)
mid = []
while True:
if num == 0: break
num,rem = divmod(num, 16)
mid.append(base[rem])
return ''.join([str(x) for x in mid[::-1]])
# --------------------------------------------------------------------------
##
# @brief the class that can prov | ide RSA method
# ----------------------------------------------------------------------------
class Encrypt:
def __init__ (self):
(pub_key, priv_key) = rsa.newkeys(1024)
self.publicKey=pub_key
self.privateKey=priv_key
# ---------- | ----------------------------------------------------------------
##
# @brief get the public key
#
# @param self
#
# @returns return the public key
#
# --------------------------------------------------------------------------
def getPublicKey(self):
return self.publicKey
# --------------------------------------------------------------------------
##
# @brief decrypt a string
#
# @param self
# @param crypto the crypto string
#
# @returns return the original string using the privateKey
#
# --------------------------------------------------------------------------
def decrypt(self,crypto):
crypto = binascii.a2b_hex(crypto)
print crypto
return rsa.decrypt(crypto,self.privateKey)
if __name__ == "__main__":
(pub_key, priv_key) = rsa.key.newkeys(128)
print dec2hex(pub_key.n)
print len('ba424b4defb0f99797400547324c4a9b7264ef8a')
#crypto = rsa.encrypt('hello', pub_key)
#print rsa.decrypt(crypto, priv_key)
|
ahaym/eden | languages/km.py | Python | mit | 359,695 | 0.019014 | # -*- coding: utf-8 -*-
{
"A location that specifies the geographic area for this region. This can be a location from the location hierarchy, or a 'group location', or a location that has a boundary for the area.": "ទីតាំងដែលបញ្ជាក់ផ្ទៃភូមិសាស្ត្រសម្រាប់តំបន់នេះ ។ វាអាចជាទីតាំងមកពីឋានានុក្រមទីតាំង ឬ 'ក្រុមទីតាំង' ឬទីតាំងដែលមានព្រំដែនសម្រាប់តំបន់នេះ ។",
"A volunteer is defined as active if they've participated in an average of 8 or more hours of Program work or Trainings per month in the last year": 'អ្នកស្ម័គ្រចិត្តត្រូវបានកំណត់ថាជាមនុស្សសកម្ម លុះត្រាតែពួកគេបានចូលរួមនៅក្នុងការងាររបស់កម្មវិធីឲ្យបានជាមធ្យម 8 ម៉ោង និងលើសពីនេះ ឬនៅក្នុងវគ្គបណ្ដុះបណ្ដាលមួយខែម្ដងក្នុងឆ្នាំកន្លងទៅ ',
"Acronym of the organization's name, eg. IFRC.": 'អក្សរកាត់របស់ឈ្មោះអង្គការ ឧ. IFRC ។',
"Add Person's Details": 'បន្ថែមសេចក្ដីលម្អិតអំពីមនុស្សថ្មី',
"Add Person's Details": 'បន្ថែមសេចក្ដីលម្អិតអំពីមនុស្ស',
"Address of an image to use for this Layer in the Legend. This allows use of a controlled static image rather than querying the server automatically for what it provides (which won't work through GeoWebCache anyway).": 'អាសយដ្ឋានរូបភាពត្រូវបានប្រើសម្រាប់ស្រទាប់នៅក្នុងគំនូសតាងនេះ ។ វាអនុញ្ញាតឲ្យអ្នកប្រើរូបភាពឋិតិវន្ដដែលបានឆ្លងកាត់ការត្រួតពិនិត្យជាជាងការសុំឲ្យម៉ាស៊ីនបម្រើផ្ដល់នូវអ្វីដែលវាអាចផ្ដល់ដោយស្វ័យប្រវត្តិ (ទោះយ៉ាងណា វានឹងមិនដំណើរការតាមរយៈ GeoWebCache នោះទេ) ។',
"Children's Education": 'ការអប់រំកុមារ',
"Couldn't open %s!": "'មិនអាចបើក %s!",
"Create 'More Info'": "បង្កើត 'ព័ត៌មានបន្ថែម'",
"Edit 'More Info'": "កែសម្រួល 'ព័ត៌មានបន្ថែម'",
"Edit Person's Details": 'កែសម្រួលសេចក្ដីលម្អិតរបស់មនុស្សម្នាក់',
"Enter a name to search for. You may use % as wildcard. Press 'Search' without input to list all items.": "បញ្ចូលឈ្មោះសម្រាប់ស្វែងរក ។ អ្នកអាចនឹងប្រើ % ជាតួអក្សរជំនួស ។ ចុច 'ស្វែងរក' ដោយមិនចាំបាច់បញ្ចូលធាតុទាំងអស់ ។",
"Go to %(url)s, sign up & then register your application. You can put any URL in & you only need to select the 'modify the map' permission.": "ទៅកាន់ %(url)s ចុះឈ្មោះ បន្ទាប់មកចុះឈ្មោះកម្មវិធីរបស់អ្នក ។ អ្នកអាចវាយបញ្ចូល URL ណាមួយក៏បាន ដោយអ្នកគ្រាន់តែជ្រើសសិទ្ធិ 'ធ្វើការកែប្រែផែនទី' ប៉ុណ្ណោះ ។",
"If this configuration is displayed on the GIS config menu, give it a name to use in the menu. The name for a personal map configuration will be set to the user's name.": 'ប្រសិនបើការកំណត់រចនាសម្ព័ន្ធត្រូវបានបង្ហាញនៅលើម៉ឺនុយកំណត់រចនាសម្ព័ន្ធ GIS សូមផ្ដល់ឈ្មោះដល់វា ដើម្បីប្រើនៅក្នុងម៉ឺនុយ ។ ឈ្មោះសម្រាប់ការកំណត់រចនសម្ព័ន្ធផែនទីផ្ទាល់ខ្លួននឹងត្រូវបានកំណត់ទៅជាឈ្មោះរបស់អ្នកប្រើ ។',
"If this field is populated then a user who specifies this Organization when signing up will be assigned as a Staff of this Organization unless their domain doesn't match the domain field.": 'ប្រសិនបើវាលនេះត្រូវបានគណនា ពេលនោះមានតែអ្នកប្រើដែលបញ្ជាក់អំពីអង្គការនេះ ក្នុងពេលចុះឈ្មោះនឹងត្រូវបានកំណត់ជាបុគ្គលិករបស់អង្គការនេះ លុះត្រាតែដែនរបស់ពួកគេមិនដំណូចនឹងវាលដែននេះ ។',
"If you don't see the Cluster in the list, you can add a new one by clicking link 'Add New Cluster'.": "ប្រសិនបើអ្នកមិនឃើញ ក្រុមនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមបញ្ជីថ្មីដោយចុចលើតំណ 'បន្ថែមក្រុមថ្មី' ។",
"If you don't see the Organization in the list, you can add a new one by clicking link 'Create Organization'.": "ប្រសិនបើអ្នកមិនឃើញ អង្គការនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមអង្គការថ្មីដោយចុចលើតំណ 'បន្ថែមអង្គការ' ។",
"If you don't see the Sector in the list, you can add a new one by clicking link 'Create Sector'.": "ប្រសិនបើអ្នកមិនឃើញ ផ្នែកនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមផ្នែកថ្មីដោយចុចលើតំណ 'បន្ថែមផ្នែកថ្មី' ។",
"If you don't see the Type in the list, you can add a new one by clicking link 'Add Region'.": "ប្រសិនបើអ្នកមិនឃើញប្រភេទនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមប្រភេទថ្មីដោយចុចលើតំណ 'បន្ថែមតំបន់' ។",
"If you don't see the Type in the list, you can add a new one by clicking link 'Create Facility Type'.": "ប្រសិនបើអ្នកមិនឃើញ ប្រភេទនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមប្រភេទថ្មីដោយចុចលើតំណ 'បន្ថែមប្រភេទបរិក្ខារ' ។",
"If you don't see the Type in the list, you can add a new one by clicking link 'Create Office Type'.": "ប្រសិនបើអ្នកមិនឃើញប្រភេទនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមប្រភេទថ្មីដោយចុចលើតំណ 'បន្ថែមប្រភេទការិយាល័យ' ។",
"If you don't see the Type in the list, you can add a new one by clicking link 'Create Organization Type'.": "ប្រសិនបើអ្នកមិនឃើញប្រភេទនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមប្រភេទថ្មីដោយចុចលើតំណ 'បន្ថែមប្រភេទអង្គការ' ។",
"If you don't see the activity in the list, you can add a new one by clicking link 'Create Activity'.": "ប្រសិនបើអ្នកមិនឃើញសកម្មភាពនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមសកម្មភាពថ្មីដោយចុចលើតំណ 'បន្ថែមសកម្មភាព' ។",
"If you don't see the beneficiary in the list, you can add a new one by clicking link 'Add Beneficiaries'.": "ប្រសិនបើអ្នកមិនឃើញអ្នកទទួលផលនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមអ្នកទទួលផលថ្មីដោយចុចលើតំណ 'បន្ថែមអ្នកទទួលផល' ។",
"If you don't see the campaign in the list, you can add a new one by clicking link 'Create Campaign'.": "ប្រសិនបើអ្នកមិនឃើញយុទ្ធនាការនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមយុទ្ធនាការដោយចុចលើតំណ 'បន្ថែមយុទ្ធនាការ' ។",
"If you don't see the community in the list, you can add a new one by clicking link 'Create Community'.": "ប្រសិនបើអ្នកមិនឃើញសហគមន៍នៅក្នុងបញ្ជីនេះទេ អ្នកអាចប | ន្ថែមសហគមន៍ថ្មីដោយចុចលើតំណ 'បន្ថែមសហគមន៍' ។",
"If you don't see the location in the list, you can add a new one by clicking link 'Create Location'.": "ប្រសិនបើអ្នកមិនឃើញទីតាំងនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមប | ញ្ជីថ្មីដោយចុចលើតំណ 'បន្ថែមទីតាំង' ។",
"If you don't see the milestone in the list, you can add a new one by clicking link 'Create Milestone'.": "ប្រសិនបើអ្នកមិនឃើញគោលវាស់វែងនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមគោលវាស់វែងថ្មីដោយចុចលើតំណ 'បន្ថែមគោលវាស់វែង' ។",
"If you don't see the project in the list, you can add a new one by clicking link 'Create Project'.": "ប្រសិនបើអ្នកមិនឃើញគម្រោងនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមគម្រោងថ្មីដោយចុចលើតំណ 'បន្ថែមគម្រោង' ។",
"If you don't see the type in the list, you can add a new one by clicking link 'Create Activity Type'.": "ប្រសិនបើអ្នកមិនឃើញប្រភេទនៅក្នុងបញ្ជីនេះទេ អ្នកអាចបន្ថែមប្រភេទថ្មីដោយចុចលើតំណ 'បន្ថែមប្រភេទសកម្មភាព' ។",
"If you enter a foldername then the layer will appear in this folder in the Map's layer switcher. A sub-folder can be created by separating names with a '/'": "ប្រសិនបើអ្នកបញ្ចូលឈ្មោះថត ពេលនោះស្រទាប់នឹងបង្ហាញនៅលើថតនៅក្នុងកម្មវិធីប្ដូរស្រទាប់របស់ផែនទី ។ ថតរងអាចត្រូវបានបង្កើតឡើងដោយការញែកឈ្មោះដោយអក្សរ '/'",
"Last Month's Work": 'ការងារខែមុន',
"Last Week's Work": 'ការងារកាលពីសប្ដាហ៍មុន',
"Level is higher than parent's": 'កម្រិតនេះខ្ពស់ជាងកម្រិតមេ',
"List Persons' Details": 'រាយបញ្ជីសេចក្ដីលម្អិតអំពីមនុស្សម្នាក់ៗ',
"Need a 'url' argument!": "ត្រូវការអាគុយម៉ង់ 'url'!",
"No UTC offset found. Please set UTC offset in your 'User Profile' details. Example: UTC+0530": "រកមិនឃើញអុហ្វសិត UTC ទេ ។ សូមកំណត់អុហ្វសិត UTC នៅក្នុងសេចក្ដីលម្អិតអំពី 'ទម្រង់អ្នកប្រើ' របស់អ្នក ។ ឧទាហរណ៍ ៖ UTC+0530",
"Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.": "ជាជម្រើស ។ ឈ្មោះជួរឈរធរណីមាត្រ ។ នៅក្នុង PostGIS វាគឺជាលំនាំដើមសម្រាប់ 'the_geom' ។",
"Parent level should be higher than this record's level. Parent level is": 'កម្រិតមេគួរតែខ្ពស់ជាងកម្រិតរបស់កំណត់ត្រានេះ ។ កម្រិតមេគឺ',
"Person's Details added": 'សេចក្ដីលម្អិតរបស់មនុស្សត្រូវបានបន្ថែម',
"Person's Details deleted": 'សេចក្ដីលម្អិតរបស់មនុស្សត្រូវបានលុប',
"Person's Details updated": 'សេចក្ដីលម្អិតរបស់មនុស្សត្រូវបានធ្វើបច្ចុប្បន្នភាព',
"Person's Details": 'សេចក្ដីលម្អិតរបស់មនុស្ស',
"Persons' Details": 'សេចក្ដីលម្អិតរបស់មនុស្ស',
"Please provide as much detail as you can, including the URL(s) where the bug occurs or you'd like the new feature to go.": 'សូមផ្ដល់សេចក្ដីលម្អិតឲ្យបានច្រើនច្រើនតាមតែអាចធ្វើបាន ដោយរួមបញ្ចួល URL(s) ដែលជាកន្លែងដែលកំហុសកើតឡើង ឬអ្នកអាចជ្រើសលក្ខណពិសេសថ្មី ដើម្បីទៅបន្ត ។',
"Policy or |
Oisota/Breakout | setup.py | Python | gpl-3.0 | 2,098 | 0.029075 | """
Setup Module
This module is used to make a distribution of
the game using distutils.
"""
from distutils.core import setup
setup(
name = 'Breakout',
version = '1.0',
description = 'A remake of the classic video game',
author = 'Derek Morey',
author_email = 'dman6505@gmail.com',
license = 'GPL',
url = 'https://github.com/Oisota/Breakout',
download_url = 'https://github.com/Oisota/Breakout/archive/master.zip',
keywords = ['breakout', 'arcade', 'game', 'pygame', 'python',],
platforms = ['linux', 'windows'],
scripts = ['breakout.py','breakout-editor.py'],
packages = ['breakout','breakout.game','breakout.utils','breakout.editor'],
package_data = {'breakout':['assets/images/*.gif',
'assets/images/*.png',
'assets/sounds/*.wav',
'assets/levels/*.json']},
requires = ['sys', 'os', 'random', 'tkinter', 'pygame', 'json'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Framework :: | Pygame',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Appr | oved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Arcade'],
long_description =
"""
Breakout
--------
This is a remake of the classic game Breakout. I made this game for the sole
purpose of educating myself about python, pygame, and game development in general.
Feel free to use or modify my code in any way.
"""
)
|
WayneDW/Sentiment-Analysis-in-Event-Driven-Stock-Price-Movement-Prediction | archived/preprocessing_v2.py | Python | mit | 8,714 | 0.009066 | #!/usr/bin/python
import os
import en
import nltk
import json
import numpy as np
import operator
import datetime
from sklearn.feature_extraction.text import CountVectorizer
from nltk.corpus import reuters
from keras.preprocessing import sequence
from crawler_reuters import news_Reuters
# def gen_financial_top_words(maxN=40000): # generate corpus based on Reuters news
# if not os.path.isfile('./input/topWords.json'):
# wordCnt = {}
# for field in reuters.fileids():
# for word in reuters.words(field):
# word = unify_word(word)
# if word in nltk.corpus.stopwords.words('english'):
# continue
# wordCnt[word] = wordCnt.get(word, 0) + 1
# sorted_wordCnt = sorted(wordCnt.items(), key=operator.itemgetter(1), reverse=True)
# wordCnt = {} # reset wordCnt
# for i in sorted_wordCnt[:maxN]: wordCnt[i[0]] = i[1] # convert list to dict
# with open('./input/topWords.json', 'w') as fout: json.dump(wordCnt, fout, indent=4)
# else: return
def unify_word(word): # went -> go, apples -> apple, BIG -> big
try: word = en.verb.present(word) # unify tense
except: pass
try: word = en.noun.singular(word) # unify noun
except: pass
return word.lower()
def dateGenerator(numdays): # generate N days until now, eg [20151231, 20151230]
base = datetime.datetime.today()
date_list = [base - datetime.timedelta(days=x) for x in range(0, numdays)]
for i in range(len(date_list)): date_list[i] = date_list[i].strftime("%Y%m%d")
return set(date_list)
'''
The following function is a little complicated.
It consists of the following steps
1, load top words dictionary, load prices data to make correlation
2, build feature matrix for training data
2.1 tokenize sentense, check if the word belongs to the top words, unify the format of words
2.2 create word2idx/idx2word list, and a list to count the occurence of words
2.3 concatenate multi-news into a single one if they happened at the same day
2.4 limit the vocabulary size to e.g. 2000, and let the unkown words as the last one
2.5 map full dict to truncated dict, pad the sequence to the same length, done
3, project the test feature in the word2idx for the traning data
'''
def build_FeatureMatrix(max_words=60, n_vocab=2000):
# step 1, load top words dictionary, load prices data to make correlation
if not os.path.isfile('./input/topWords.json'):
gen_financial_top_words()
# with open('./input/topWords.json') as data_file:
# topWords = json.load(data_file)
with open('./input/stockPrices.json') as data_file:
priceDt = json.l | oad(data_file)
# step 2, build feature matrix for training data
loc = './input/'
input_files = [f for f in | os.listdir(loc) if f.startswith('news_reuters.csv')]
word2idx = {'START': 0, 'END': 1}
idx2word = ['START', 'END']
current_idx = 2
word_idx_cnt = {0: float('inf'), 1: float('inf')}
dp = {} # only consider one news for a company everyday
cnt = 0
testDates = dateGenerator(100)
stopWords = set(nltk.corpus.stopwords.words('english'))
for file in input_files:
for line in open(loc + file):
line = line.strip().split(',')
if len(line) != 5: continue
ticker, name, day, headline, body = line
if ticker not in priceDt: continue # skip if no corresponding company found
if day not in priceDt[ticker]: continue # skip if no corresponding date found
# # avoid repeating news
if ticker not in dp: dp[ticker] = {}
if day not in dp[ticker]: dp[ticker][day] = {'feature':[], 'label':[]}
# if ticker not in dp: dp[ticker] = set()
# if day in dp[ticker]: continue
# dp[ticker].add(day)
# 2.1 tokenize sentense, check if the word belongs to the top words, unify the format of words
tokens = nltk.word_tokenize(headline) + nltk.word_tokenize(body)
tokens = [unify_word(t) for t in tokens]
tokens = [t for t in tokens if t in stopWords]
#tokens = [t for t in tokens if t in topWords]
# 2.2 create word2idx/idx2word list, and a list to count the occurence of words
for t in tokens:
if t not in word2idx:
word2idx[t] = current_idx
idx2word.append(t)
current_idx += 1
idx = word2idx[t]
word_idx_cnt[idx] = word_idx_cnt.get(idx, 0) + 1
if day in testDates: continue # this step only considers training set
sentence_by_idx = [word2idx[t] for t in tokens]
print("training", cnt, ticker); cnt += 1
#sentences.append(sentence_by_idx)
dp[ticker][day]['feature'].append(sentence_by_idx)
dp[ticker][day]['label'] = round(priceDt[ticker][day], 6)
# 2.3 concatenate multi-news into a single one if they happened at the same day
sentences, labels, sentenceLen = [], [], []
for ticker in dp:
for day in dp[ticker]:
res = []
for i in dp[ticker][day]['feature']: res += i
sentenceLen.append(len(res))
sentences.append(res)
labels.append(dp[ticker][day]['label'])
sentenceLen = np.array(sentenceLen)
for percent in [50, 70, 80, 90, 95, 99]:
print("Sentence length %d%% percentile: %d" % (percent, np.percentile(sentenceLen, percent)))
# 2.4 limit the vocabulary size to e.g. 2000, and let the unkown words as the last one
sorted_word_idx_cnt = sorted(word_idx_cnt.items(), key=operator.itemgetter(1), reverse=True)
word2idx_small = {}
new_idx = 0
idx_new_map = {}
for idx, count in sorted_word_idx_cnt[:n_vocab]:
word = idx2word[idx]
word2idx_small[word] = new_idx
idx_new_map[idx] = new_idx
new_idx += 1
# let 'unknown' be the last token
word2idx_small['UNKNOWN'] = new_idx
unknown = new_idx
# 2.5 map full dict to truncated dict, pad the sequence to the same length, done
sentences_small = []
new_label = []
for sentence, label in zip(sentences, labels):
if len(sentence) > 1:
new_sentence = [idx_new_map[idx] if idx in idx_new_map else unknown for idx in sentence]
sentences_small.append(new_sentence)
new_label.append(label)
pad_small = np.matrix(sequence.pad_sequences(sentences_small, maxlen=max_words))
pad_small = pad_small.astype('int').astype('str')
new_label = np.matrix(new_label).astype('str')
featureMatrix = np.concatenate((pad_small, new_label.T), axis=1)
np.savetxt('./input/featureMatrix.csv', featureMatrix, fmt="%s")
# step 3, project the test feature in the word2idx for the traning data
dp = {}; cnt = 0
for file in input_files:
for line in open(loc + file):
line = line.strip().split(',')
if len(line) != 5: continue
ticker, name, day, headline, body = line
if day not in testDates: continue # this step only considers test set
if ticker not in priceDt: continue # continue if no corresponding prices information found
if day not in priceDt[ticker]: continue
# modify repeating news
if ticker not in dp: dp[ticker] = {}
if day not in dp[ticker]: dp[ticker][day] = {'feature':[], 'label':[]}
cnt += 1
tokens = nltk.word_tokenize(headline) + nltk.word_tokenize(body)
tokens = [unify_word(t) for t in tokens]
tokens = [t for t in tokens if t in stopWords]
#tokens = [t for t in tokens if t in topWords]
sentence_by_idx = [word2idx_small[t] for t in tokens if t in word2idx_small]
dp[ticker][day]['feature'].append(sentence_by_idx)
dp[ticker][day]['label'] = round(priceDt[ticker][day], 6)
print("test", cnt)
sentences_test, labels_test = [], []
for ticker in dp:
for day in dp[ticker]:
res = []
for i in dp |
lms-io/lms | src/user.py | Python | mit | 2,311 | 0.028992 | from bottle import route,request
from cassandra.cluster import Cluster
import uuid, collections, traceback, redis, thread, bcrypt, configparser, requests, sys, jsonpickle, random, os, zipfile, shutil, appcontext, permission
config = configparser.ConfigParser()
config.read('config. | ini')
def redis():
return appcontext.redis()
def db():
return appcontext.db()
def exists(username, password):
usr = appcontext.db().execute('SELECT username, password, organization_uid from user_by_organization where username=%s', (username,)) | [0]
match = usr.password == bcrypt.hashpw(password.encode('utf-8'), usr.password.encode('utf-8'))
if match:
return usr.organization_uid
return None
def create(organization_uid, username, password, firstName="", lastName=""):
ins = "INSERT INTO user_by_organization (organization_uid, username, password) VALUES (%s,%s,%s);"
db().execute(ins, (organization_uid, username, bcrypt.hashpw(password, bcrypt.gensalt())))
ins = "INSERT INTO user (organization_uid, username) VALUES (%s,%s);"
db().execute(ins, (organization_uid, username))
permission.grant(username, 'USER:UPDATE:'+username)
permission.grant(username, 'LOGIN')
return ""
def update(organization_uid, username, password, firstName="", lastName=""):
delete(organization_uid, username)
create(organization_uid, username, password, firstName, lastName)
return ""
def list(organization_uid):
rows = appcontext.db().execute('SELECT username,organization_uid from user where organization_uid=%s', (organization_uid,))
d = []
for r in rows:
d.insert(0,{'username':r.username,'organization_uid':str(r.organization_uid)})
return d
def get(organization_uid, username):
r = appcontext.db().execute('SELECT username,firstName,lastName,organization_uid from user where username=%s and organization_uid=%s', (username,organization_uid))
d = {'username':r[0].username,'firstName':r[0].firstname,'lastName':r[0].lastname, 'organization_uid':r[0].organization_uid}
return d
def delete(organization_uid, username):
ins = "delete from user where username = %s and organization_uid=%s;"
db().execute(ins, (username,organization_uid))
ins = "delete from user_by_organization where organization_uid = %s and username = %s;"
db().execute(ins, (organization_uid,username))
|
MRN-Code/pl2mind | tools/rois.py | Python | gpl-2.0 | 11,580 | 0.008031 | """
This module is designed for using afni to find regions of interest from a nifti file.
"""
__author__ = "Devon Hjelm"
__copyright__ = "Copyright 2014, Mind Research Network"
__credits__ = ["Devon Hjelm"]
__licence__ = "3-clause BSD"
__email__ = "dhjelm@mrn.org"
__maintainer__ = "Devon Hjelm"
import argparse
import itertools
import logging
import multiprocessing as mp
from nipy import load_image
from nipy import save_image
import numpy as np
import pickle
from pl2mind import logger
import pprint
import re
from scipy import (reshape, zeros, where, std, argmax, sqrt, ceil, floor, sign,
negative, linspace, double, float16)
import subprocess
from sys import stdout
logger = logger.setup_custom_logger("pl2mind", logging.DEBUG)
# These are general names of regions for use elsewhere.
singles = ["Postcentral Gyrus",
"Cingulate Gyrus",
"Thalamus",
"Superior Frontal Gyrus",
"Pyramis",
"Caudate",
"Declive",
"Cuneus",
"Ulvula",
"Medial Frontal Gyrus",
"Precuneus",
"Lingual Gyrus",
"Paracentral Lobule",
"Semi-Lunar Lobule",
"Posterior Cingulate",
"Culmen",
"Cerebellar Tonsil",
"Cingulate Gyrus",
"Middle Frontal Gyrus",
"Anterior Cingulate"
]
# Larger functional regions. Not used here, but can be referenced.
SC = ["Caudate","Putamen","Thalamus","Caudate Tail","Caudate Body","Caudate Head"]
AUD = ["Transverse Temporal Gyrus","Superior Temporal Gyrus"]
SM = ["Superior Parietal Lobule","Paracentral Lobule","Postcentral Gyrus","Precentral Gyrus"] #missing sma
VIS = ["Fusiform Gyrus","Lingual Gyrus","Middle Occipital Gyrus","Superior Occipital Gyrus","Inferior Occipital Gyrus","Cuneus","Middle Temporal Gyrus"] #missing calcarine gyrus
CC = ["Inferior Temporal Gyrus","Insula","Inferior Frontal Gyrus","Inferior Parietal Lobule","Middle Frontal Gyrus","Parahippocampal Gyrus"] #missing mcc
DM = ["Precuneus","Superior Frontal Gyrus","Posterior Cingulate","Anterior Cingulate","Angular Gyrus"]
CB = ["Cerebellar Tonsil","Pyramis"]
def lat_opposite(side):
"""
Returns the lateral opposite as defined by the keyword pair {"Right", "Left"}
"""
if side == "Right": return "Left"
elif side == "Left": return "Right"
else: raise ValueError("Lateral side error, (%s)" % side)
def check_pair(toproi, rois, lr_cm):
toproi_split = toproi.split(" ",1)
both = False
if toproi_split[0] in ["Left", "Right"]:
for roi in rois:
roi_split = roi.split(" ",1)
if (roi_split[1] == toproi_split[1]) & (roi_split[0] == lat_opposite(toproi_split[0])):
both = True
if both:
toproi = " ".join(["(L+R)",toproi_split[1]])
else:
if abs(lr_cm) < 9:
toproi = toproi.split(" ",1)[1]
return toproi
def find_clusters_from_3D(fnifti, thr):
"""
Function to use afni command line to find clusters from a 3D nifti.
TODO(dhjelm): change this to use nipy functions.
Parameters
----------
fnifti: nifti file
Nifti file to process.
thr: float
Threshold used for clusters.
Returns
-------
cluster: a list of floats
"""
cmd = ("3dclust "
"-1Dformat -quiet -nosum -2thresh -2 %.2f "
"-dxyz=1 2 80 2>/dev/null" % thr)
awk = "awk '{ print $1\"\t\"$2\"\t\"$3\"\t\"$4\"\t\"$5\"\t\"$6\"\t\"$11\"\t\"$14\"\t\"$15\"\t\"$16}'"
cmdline = cmd + " '%s'| " % fnifti + awk
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
if "#**" in out.split(): return []
cluster = float16(out.split())
return cluster
def find_clusters_from_4D(fnifti, i, thr):
"""
Function to use afni command line to find clusters from a 4D nifti.
TODO(dhjelm): change this to use nipy functions.
Parameters
----------
fnifti: nifti file
Nifti file to process.
i: integer
Index of the feature in the nifti file.
thr: float
Threshold used for clusters.
Returns
-------
clusters: list of tuples of floats
List of 3d clusters.
"""
assert isinstance(i, int)
assert isinstance(thr, (int, float))
cmd = ("3dclust "
"-1Dformat -quiet -nosum -1dindex %d -1tindex %d -2thresh -2 %.2f "
"-dxyz=1 2 80 2>/dev/null" %
(i, i, thr))
awk = "awk '{ print $1\"\t\"$2\"\t\"$3\"\t\"$4\"\t\"$5\"\t\"$6\"\t\"$11\"\t\"$14\"\t\"$15\"\t\"$16}'"
cmdline = cmd + " '%s'| " % fnifti + awk
proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
if "#**" in out.split(): return []
clusters = float16(out.split())
return clusters
def check_grey(coords):
"""
Function to check if a particular cluster corresponds to grey matter.
Note: this function uses the CA_N27_GW atlas. Other metrics could be used, but this feature needs
to be added.
Parameters
----------
coords: tuple or list of floats
Coordinates, should have length 3
Returns
-------
prob: float
probability of grey matter
"""
assert len(coords) == 3
atlas = "CA_N27_GW"
# where am I command.
waicmd = "whereami -atlas %s -space MNI %d %d %d 2>/dev/null" % ((atlas, ) + tuple(coords))
proc = subprocess.Popen(waicmd, stdout=subprocess.PIPE, shell=True)
(out,err) = proc.communicate()
lines = out.split("\n")
patt = re.compile(" Focus point: grey \(p = ([0-9]\.[0-9]*)\)")
prob = double([m.group(1) for m in [patt.match(line) for line in lines] if m])
assert len(prob) == 1
return prob[0]
def return_region(coords, atlas):
"""
Function to return the region of interest as defined by the atlas.
Parameters
----------
cluster: tuple or list of floats
Coordinates. Should be length 3
atlas: string
Name of atlas to use.
Returns
-------
region: list of strings
This should be a list of all the regions corresponding to a coordinate.
"""
assert atlas in ["CA_N27_ML", "CA_ML_18_MNIA", "TT_Daemon"], "Atlas %s not supported yet." % atlas
assert len(coords) == 3
# where am I command.
waicmd = "whereami -atlas %s -space MNI %d %d %d 2>/dev/null" % ((atlas, ) + tuple(coords))
proc = subprocess.Popen(waicmd, stdout=subprocess.PIPE, shell=True)
(out,err) = proc.communicate()
#if ROIONAME != "": rf.write(out)
lines = out.split("\n")
patt = re.compile(" Focus point: (.*)")
region = [m.group(1) for m in [patt.match(line) for line in lines] if m]
for i in range(len(region)):
region[i] = " ".join(region[i].split())
return region
def find_region_names(coords):
"""
Get region names from a set of atlases.
Note: only 3 atlases are currently supported, but more could be added in the future.
Parameters
----------
coords: tuple or list of floats
Coordinates. Should be length 3.
Returns
-------
rois: list of strings
List of regions of interest corresponding to a cluster coordinate.
"""
assert len(coords) == 3
n27 = "CA_N27_ML"
mnia = "CA_ML_18_MNIA"
tt = "TT_Daemon"
rois = []
for atlas in [n27, mnia, tt]:
rois += return_region(coords, atlas)
rois = list(set(rois))
return rois
def get_cluster_info(clusters):
if len(clusters) == 0:
logger.warn("No clusters found for feature")
return {}
cluster_dict = {}
intensity_sum = 0
# Retrieve information on all the clusters.
for c in range(len(clusters) // 10):
cs = clusters[c * 10: (c+1) * 10]
intensity_sum += abs(cs[0] * cs[6]) |
cm = tuple([cs[x] for x in [1, 2, 3]])
coords = tuple([cs[x] for x in [7, 8, 9]])
rois = find_region_names(coords)
# grey_value = check_grey(coords)
| cluster_dict[c] = dict(
coords = coords,
volume = cs[0],
cm = |
Geoportail-Luxembourg/qgis-pag-plugin | resources.py | Python | mit | 78,761 | 0.000063 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: mer. 14. sept. 10:51:53 2016
# by: The Resource Compiler for PyQt (Qt v4.8.5)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x07\xea\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x56\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x33\x2d\x63\x30\x31\x31\x20\x36\x36\
\x2e\x31\x34\x35\x36\x36\x31\x2c\x20\x32\x30\x31\x32\x2f\x30\x32\
\x2f\x30\x36\x2d\x31\x34\x3a\x35\x36\x3a\x32\x37\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78 | \x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x7 | 9\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x46\x41\x41\x34\x37\x42\x41\x33\x39\x43\x33\
\x45\x43\x42\x30\x45\x31\x37\x42\x37\x35\x41\x42\x36\x45\x46\x31\
\x36\x33\x37\x45\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\
\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\
\x3a\x41\x30\x36\x41\x32\x42\x30\x34\x34\x45\x46\x33\x31\x31\x45\
\x35\x41\x39\x46\x34\x43\x45\x35\x34\x36\x42\x46\x35\x30\x36\x43\
\x32\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\
\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x41\x30\x36\
\x41\x32\x42\x30\x33\x34\x45\x46\x33\x31\x31\x45\x35\x41\x39\x46\
\x34\x43\x45\x35\x34\x36\x42\x46\x35\x30\x36\x43\x32\x22\x20\x78\
\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\
\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\
\x43\x53\x36\x20\x4d\x61\x63\x69\x6e\x74\x6f\x73\x68\x22\x3e\x20\
\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\
\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\
\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x30\x32\x38\
\x30\x31\x31\x37\x34\x30\x37\x32\x30\x36\x38\x31\x31\x38\x32\x32\
\x41\x39\x45\x32\x39\x34\x32\x44\x34\x36\x37\x46\x38\x22\x20\x73\
\x74\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\
\x22\x46\x41\x41\x34\x37\x42\x41\x33\x39\x43\x33\x45\x43\x42\x30\
\x45\x31\x37\x42\x37\x35\x41\x42\x36\x45\x46\x31\x36\x33\x37\x45\
\x31\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\
\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\
\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\
\x22\x3f\x3e\xa5\xc4\x4e\x8c\x00\x00\x04\x2a\x49\x44\x41\x54\x78\
\xda\xa4\x56\x0d\x6c\x13\x65\x18\x7e\xee\x7a\xd7\x5b\x69\xbb\xb6\
\x63\x73\x30\xb7\x6e\xe2\xa6\x64\xc0\x88\x2e\x21\x4c\x8c\x0e\xa3\
\x62\x66\xfc\x99\xd1\xb0\x18\x41\x89\xb8\x60\x08\x1a\x71\x44\x13\
\x51\x87\x99\x4a\x4c\x74\x2e\x41\x8c\x66\x21\x43\x09\x86\x80\xa8\
\x81\xe8\x26\x46\x61\x66\x4a\xe6\xb2\x01\x4a\x36\xc3\xd8\x9f\xeb\
\x3a\x3b\xb6\xfe\xdc\xfa\x73\xd7\xbb\xfb\xfc\x7a\x55\xb4\xe9\x75\
\x5b\xf0\x4b\xee\x9a\xfb\xbe\xf7\x7b\x9f\xf7\x7b\xdf\xe7\x7d\xbe\
\x32\x23\xce\x95\x0f\x38\x76\x3d\xbb\xcf\x5c\x5e\xea\xc6\x3c\x83\
\x31\xf3\x00\xcf\x01\xaa\x06\x22\xc7\x01\x4d\xcb\x68\xab\x0c\x8f\
\xcd\xa8\x13\xbe\x97\x11\x78\xeb\x03\x0f\x59\xe0\x88\x75\x9f\x27\
\x91\x8e\x4e\x12\x3d\x7d\x96\x68\x92\x3c\xaf\xfd\xec\x91\x93\x7e\
\xd6\x5c\xb1\xbc\x00\x0b\x1c\x5a\x30\x04\x7f\x63\x0b\xfc\x7b\x5a\
\x20\x75\x9f\x9b\xd7\x9e\x2b\xbe\xde\xc9\x91\xb8\x92\xbe\x42\x08\
\xd4\xc9\x29\x30\xd6\x45\x60\x6d\x56\x10\x45\x81\x7f\xf7\xbb\x88\
\xb4\xff\x00\x46\xc8\xa2\x8f\x19\x34\xb0\xd4\x2d\xe1\x28\x54\xaf\
\x0f\x5c\x69\xf1\xbf\x93\xaa\x0a\xd6\x08\x59\xea\xbd\x08\x4f\xd5\
\x43\x90\xba\x7a\x40\x54\x05\x81\xc6\xf7\x69\xce\x65\xb0\x0e\x1b\
\xa4\x9e\x5e\xc4\x4e\xff\x88\x78\xff\xe0\xd5\x60\x42\xcd\x07\x30\
\x76\xf3\xed\x10\xdb\x8e\xe9\xdf\x29\xa7\x30\x4c\xc5\xb4\x1f\x24\
\x18\x81\x29\x3f\x0f\x0c\xcf\xc3\xb6\xf9\x11\xc4\x87\xc6\xa8\xd3\
\x4b\x60\x58\xba\x45\x8b\x23\xfc\xe5\x29\xc4\x7e\xee\x83\x44\x9f\
\x04\xb8\xed\xf1\x5a\x58\x6a\xaa\x29\x13\x98\x14\x5f\x86\x27\x88\
\x9c\xfc\x1e\x6a\x28\x08\x36\xd7\x95\x34\xca\xcb\x81\x26\x8a\x90\
\x7f\xfd\x3d\xc1\x25\x0a\x22\x40\xfa\xa9\x97\xd6\x44\x84\x50\xb9\
\x12\x5c\x49\x21\xb2\xb7\x3d\x89\xac\xdb\x2a\xd3\x7c\x19\x02\x98\
\x57\x94\x51\x06\x06\x11\xda\x7f\x08\x72\xef\x6f\xf0\xde\x59\x07\
\x5f\xdd\x56\x68\xbe\x69\x1a\x20\x03\xd6\x69\x47\x6e\xeb\x5e\x10\
\x49\x86\x65\xc3\x1d\x70\xec\x78\x0a\xc1\x37\xf7\x19\xd2\xd6\x10\
\xc0\xfa\x68\x0d\xb2\x96\xdf\x82\xf0\xe1\xaf\x70\x65\xfb\xeb\x88\
\x0f\x5c\x06\xcb\x39\x68\xf0\x2c\x34\x55\x02\x77\x83\x1b\x7c\x59\
\x49\x92\x00\xd1\x18\xb8\x65\x45\x14\x34\x5b\x4f\xe3\x82\x00\xd8\
\xc5\x4e\xe4\x1d\x6c\x86\xa9\x68\x29\xe4\x0b\x03\xd0\xb3\x9a\x78\
\x09\x02\xf8\xe2\x12\x90\x98\xa4\x33\xc4\x72\xf7\x3a\x84\x8f\x7d\
\x93\xdc\x93\x4b\xd3\x38\x13\x9c\x1b\x40\x19\xfe\x03\xb3\x47\x4f\
\x40\xee\xbb\x08\x61\xcd\x6a\x38\x1a\x9e\x49\x70\x8d\x32\x49\x86\
\xa9\xb0\x40\xcf\x37\x5f\x5e\x06\xf3\xad\x2b\x00\x93\x49\xb7\x49\
\x9c\x24\xd6\xd9\x4d\x9d\x07\xc0\xe6\x38\xd3\x7b\x21\xa5\xb8\x1d\
\x9d\x98\xde\xb9\x87\x1e\x5b\x86\x50\x51\xae\x47\x0f\x86\x80\x73\
\xbb\xc1\x15\x2e\x81\x32\x31\x49\xa3\x97\xe9\x7a\x04\x53\x5b\x1a\
\xe0\x7a\xed\x79\xd8\xb7\x6e\xc4\xe4\x83\xf5\x74\x3d\x1f\x3c\x4d\
\xd5\x9c\x00\xf6\x2d\x8f\x81\xb5\x58\x20\x53\x3a\x46\xdb\x3b\x91\
\xbd\x7d\x13\x84\x75\x95\xb0\xac\xaf\xd2\xeb\x20\xb6\x1d\x45\xf4\
\xbb\x2e\x9d\xc6\x62\xdb\x21\xda\x0f\xdd\xb0\x6d\xaa\x85\x32\xea\
\xc1\xe2\xe6\xdd\x34\x1f\x06\x19\x0f\x7f\xd1\x61\x2c\x24\x9a\x66\
\x38\x3d\xf3\xea\x7b\x64\xb4\x70\x2d\x19\x36\xdd\x48\x2e\x63\x09\
\xf1\xde\xbb\x99\x68\x62\xd8\x58\xbb\xba\x7a\x08\x9b\x59\x3a\x99\
\xb4\xa9\xd0\xfe\x4f\x21\x7e\x74\x18\xdc\xd2\xeb\xa8\xaa\xf2\xd4\
\x44\x40\x7c\x70\x18\x86\x72\x33\x17\x8b\x32\x0d\xf9\xfc\x00\xe2\
\xbe\x11\x48\xf4\x97\x75\x39\xa8\x2a\x48\x90\x87\xfa\x69\x8f\xec\
\x80\x32\x32\x9e\x64\xd7\x42\x01\xb4\xd0\x6c\xaa\x98\x45\xa2\x88\
\x9e\x3a\x03\x56\xb0\xc3\xf9\xd2\x36\xe4\x9f\x68\x85\xeb\x95\x17\
\x21\xdc\xb4\x0a\xe1\x6f\xbf\xc6\x78\xc5\x5d\x08\xec\xfd\x70\xe1\
\x35\x88\x74\x9c\x21\x13\xd5\x75\x24\xf4\xf1\x67\x24\x7c\xbc\x9d\
\x78\xef\x79\x82\x0c\xc2\x49\xc4\x4f\x8e\xa7\xd8\xa9\x41\x91\x4c\
\xd6\xd6\x93\x4b\xb0\x91\x2b\xcf\x35\xa6\xd5\x20\x23\x80\x78\xf0\
\x73\x32\x5a\xb4\x86\x3a\xcd\xa1\x9b\x9d\x64\x88\x5d\x46\xa6\x77\
\xbd\x6d\x5c\xcc\x5f\x2e\xd0\x82\x17\x90\xf1\xd5\x35\x44\x8b\xc6\
\x52\x00\xb8\x4c\x29\x4a\x28\x68\x56\xf5\x5a\x88\xad\x47\xc0\x70\
\x26\x58\xee\x5f\x4f\x1b\x6d\x55\x86\x4a\x26\x33\xcd\x58\x2d\x69\
\xe4\xe0\xe6\xbc\x91\xdc\x05\x70\xbd\xf1\xc2\xbc\xc5\x97\xce\xf6\
\xd1\x7e\x9f\xd1\x2f\xa1\xc4\x65\x94\x82\x9d\x88\xee |
Mhynlo/SickRage | sickbeard/webapi.py | Python | gpl-3.0 | 114,568 | 0.002889 | # coding=utf-8
# Author: Dennis Lutter <lad1337@gmail.com>
# Author: Jonathon Saine <thezoggy@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
# TODO: break this up into separate files
# pylint: disable=line-too-long,too-many-lines,abstract-method
# pylint: disable=no-member,method-hidden,missing-docstring,invalid-name
import datetime
import io
import os
import re
import time
import traceback
import urllib
import sickbeard
from sickbeard import classes, db, helpers, image_cache, logger, network_timezones, sbdatetime, search_queue, \
ui
from sickbeard.common import ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, \
SNATCHED_PROPER, UNAIRED, UNKNOWN, WANTED, statusStrings
from sickbeard.versionChecker import CheckVersion
from sickrage.helper.common import dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, timeFormat, try_int
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import CantUpdateShowException, ShowDirectoryNotFoundException, ex
from sickrage.helper.quality import get_quality_string
from sickrage.media.ShowBanner import ShowBanner
from sickrage.media.ShowFanArt import ShowFanArt
from sickrage.media.ShowNetworkLogo import ShowNetworkLogo
from sickrage.media.ShowPoster import ShowPoster
from sickrage.show.ComingEpisodes import ComingEpisodes
from sickrage.show.History import History
from sickrage.show.Show import Show
from sickrage.system.Restart import Restart
from sickrage.system.Shutdown import Shutdown
try:
import json
except ImportError:
# pylint: disable=import-error
import simplejson as json
# pylint: disable=import-error
from tornado.web import RequestHandler
indexer_ids = ["indexerid", "tvdbid"]
RESULT_SUCCESS = 10 # only use inside the run methods
RESULT_FAILURE = 20 # only use inside the run methods
RESULT_TIMEOUT = 30 # not used yet :(
RESULT_ERROR = 40 # only use outside of the run methods !
RESULT_FATAL = 50 # only use in Api.default() ! this is the "we encountered an internal error" error
RESULT_DENIED = 60 # only use in Api.default() ! this is the access denied error
result_type_map = {
RESULT_SUCCESS: "success",
RESULT_FAILURE: "failure",
RESULT_TIMEOUT: "timeout",
RESULT_ERROR: "error",
RESULT_FATAL: "fatal",
RESULT_DENIED: "denied",
}
# basically everything except RESULT_SUCCESS / success is bad
# noinspection PyAbstractClass
class ApiHandler(RequestHandler):
""" api class that returns json results """
version = 5 # use an int since float-point is unpredictable
def __init__(self, *args, **kwargs):
super(ApiHandler, self).__init__(*args, **kwargs)
# def set_default_headers(self):
# self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def get(self, *args, **kwargs):
kwargs = self.request.arguments
# noinspection PyCompatibility
for arg, value in kwargs.iteritems():
if len(value) == 1:
kwargs[arg] = value[0]
args = args[1:]
# set the output callback
# default json
output_callback_dict = {
'default': self._out_as_json,
'image': self._out_as_image,
}
access_msg = u"API :: " + self.request.remote_ip + " - gave correct API KEY. ACCESS GRANTED"
logger.log(access_msg, logger.DEBUG)
# set the original call_dispatcher as the local _call_dispatcher
_call_dispatcher = self.call_dispatcher
# if profile was set wrap "_call_dispatcher" in the profile function
if 'profile' in kwargs:
from profilehooks import profile
_call_dispatcher = profile(_call_dispatcher, immediate=True)
del kwargs["profile"]
try:
out_dict = _call_dispatcher(args, kwargs)
except Exception as e: # real internal error oohhh nooo :(
logger.log(u"API :: " + ex(e), logger.ERROR)
error_data = {
"error_msg": ex(e),
"args": args,
"kwargs": kwargs
}
out_dict = _responds(RESULT_FATAL, error_data,
"SickRage encountered an internal error! Please report to the Devs")
if 'outputType' in out_dict:
output_callback = output_callback_dict[out_dict['outputType']]
else:
output_callback = output_callback_dict['default']
# noinspection PyBroadException
try:
self.finish(output_callback(out_dict))
except Exception:
pass
def _out_as_image(self, _dict):
self.set_header('Content-Type', _dict['image'].get_media_type())
return _dict['image'].get_media()
def _out_as_json(self, _dict):
self.set_header("Content-Type", "application/json;charset=UTF-8")
try:
out = json.dumps(_dict, ensure_ascii=Fals | e, sort_keys=True)
callback = self.get_query_argument('callback', None) or self.get_query_argument('jsonp', None)
if callback:
out = callback + '(' + out + ');' # wrap with | JSONP call if requested
except Exception as e: # if we fail to generate the output fake an error
logger.log(u"API :: " + traceback.format_exc(), logger.DEBUG)
out = '{{"result": "{0}", "message": "error while composing output: {1}"}}'.format(result_type_map[RESULT_ERROR], ex(e))
return out
def call_dispatcher(self, args, kwargs): # pylint:disable=too-many-branches
""" calls the appropriate CMD class
looks for a cmd in args and kwargs
or calls the TVDBShorthandWrapper when the first args element is a number
or returns an error that there is no such cmd
"""
logger.log(u"API :: all args: '" + str(args) + "'", logger.DEBUG)
logger.log(u"API :: all kwargs: '" + str(kwargs) + "'", logger.DEBUG)
commands = None
if args:
commands, args = args[0], args[1:]
commands = kwargs.pop("cmd", commands)
out_dict = {}
if commands:
commands = commands.split("|")
multi_commands = len(commands) > 1
for cmd in commands:
cur_args, cur_kwargs = self.filter_params(cmd, args, kwargs)
if len(cmd.split("_")) > 1:
cmd, cmd_index = cmd.split("_")
else:
cmd_index = None
logger.log(u"API :: " + cmd + ": cur_kwargs " + str(cur_kwargs), logger.DEBUG)
if not (cmd in ('show.getbanner', 'show.getfanart', 'show.getnetworklogo', 'show.getposter') and
multi_commands): # skip these cmd while chaining
try:
if cmd in function_mapper:
func = function_mapper.get(cmd) # map function
to_call = func(cur_args, cur_kwargs)
to_call.rh = self
cur_out_dict = to_call.run() # call function and get response
elif _is_int(cmd):
to_call = TVDBShorthandWrapper(cur_args, cur_kwargs, cmd)
to_call.rh = self
cur_out_dict = to_call.run()
else:
cur_out_dict = _responds(RESULT_ERROR, "No such cmd: '" + cmd + "'")
except ApiErr |
faush01/MBCon | default.py | Python | gpl-2.0 | 296 | 0 | # Gnu General Public License - see LICENSE.TXT
from resources.lib.simple_logging import SimpleLogging
from resources.lib.functi | ons import mainEntryPoint
log = SimpleLogging('default')
log.debug("About to enter mainEntryPoint()")
mainEntryPoint()
| # clear done and exit.
# sys.modules.clear()
|
subeax/grab | test/case/spider_task.py | Python | mit | 8,629 | 0.011589 | from unittest import TestCase
#try:
# import cPickle as pickle
#except ImportError:
# import pickle
import grab.spider.base
from grab import Grab
from grab.spider import Spider, Task, Data, SpiderMisuseError, NoTaskHandler
from test.server import SERVER
class SimpleSpider(Spider):
base_url = 'http://google.com'
def task_baz(self, grab, task):
self.SAVED_ITEM = grab.response.body
class TestSpider(TestCase):
def setUp(self):
SERVER.reset()
def test_task_priority(self):
#SERVER.RESPONSE['get'] = 'Hello spider!'
#SERVER.SLEEP['get'] = 0
#sp = SimpleSpider()
#sp.add_task(Task('baz', SERVER.BASE_URL))
#sp.run()
#self.assertEqual('Hello spider!', sp.SAVED_ITEM)
# Automatic random priority
grab.spider.base.RANDOM_TASK_PRIORITY_RANGE = (10, 20)
bot = SimpleSpider(priority_mode='random')
bot.setup_queue()
task = Task('baz', url='xxx')
self.assertEqual(task.priority, None)
bot.add_task(task)
self.assertTrue(10 <= task.priority <= 20)
# Automatic constant priority
grab.spider.base.DEFAULT_TASK_PRIORITY = 33
bot = SimpleSpider(priority_mode='const')
bot.setup_queue()
task = Task('baz', url='xxx')
self.assertEqual(task.priority, None)
bot.add_task(task)
self.assertEqual(33, task.priority)
# Automatic priority does not override explictily setted priority
grab.spider.base.DEFAULT_TASK_PRIORITY = 33
bot = SimpleSpider(priority_mode='const')
bot.setup_queue()
task = Task('baz', url='xxx', priority=1)
self.assertEqual(1, task.priority)
bot.add_task(task)
self.assertEqual(1, task.priority)
self.assertRaises(SpiderMisuseError,
lambda: SimpleSpider(priority_mode='foo'))
def test_task_url(self):
bot = SimpleSpider()
bot.setup_queue()
task = Task('baz', url='xxx')
self.assertEqual('xxx', task.url)
bot.add_task(task)
self.assertEqual('http://google.com/xxx', task.url)
self.assertEqual(None, task.grab_config)
g = Grab(url='yyy')
task = Task('baz', grab=g)
bot.add_task(task)
self.assertEqual('http://google.com/yyy', task.url)
self.assertEqual('http://google.com/yyy', task.grab_config['url'])
def test_task_clone(self):
bot = SimpleSpider()
bot.setup_queue()
task = Task('baz', url='xxx')
bot.add_task(task.clone())
# Pass grab to clone
task = Task('baz', url='xxx')
g = Grab()
g.setup(url='zzz')
bot.add_task(task.clone(grab=g))
# Pass grab_config to clone
task = Task('baz', url='xxx')
g = Grab()
g.setup(url='zzz')
bot.add_task(task.clone(grab_config=g.config))
def test_task_clone_with_url_param(self):
task = Task('baz', url='xxx')
task.clone(url='http://yandex.ru/')
def test_task_useragent(self):
bot = SimpleSpider()
bot.setup_queue()
g = Grab()
g.setup(url=SERVER.BASE_URL)
g.setup(user_agent='Foo')
task = Task('baz', grab=g)
bot.add_task(task.clone())
bot.run()
self.assertEqual(SERVER.REQUEST['headers']['User-Agent'], 'Foo')
#def test_task_relative_url_error(self):
#class SimpleSpider(Spider):
#def task_one(self, grab, task):
#yield Task('second', '/')
#bot = SimpleSpider()
#bot.setup_queue()
#bot.add_task(Task('one', SERVER.BASE_URL))
#bot.run()
def test_task_nohandler_error(self):
class TestSpider(Spider):
pass
bot = TestSpider()
bot.setup_queue()
bot.add_task(Task('page', url=SERVER.BASE_URL))
self.assertRaises(NoTaskHandler, bot.run)
def test_task_raw(self):
class TestSpider(Spider):
def prepare(self):
self.codes = []
def task_page(self, grab, task):
self.codes.append(grab.response.code)
SERVER.RESPONSE['code'] = 502
bot = TestSpider(network_try_limit=1)
bot.setup_queue()
bot.add_task(Task('page', url=SERVER.BASE_URL))
bot.add_task(Task('page', url=SERVER.BASE_URL))
bot.run()
self.assertEqual(0, len(bot.codes))
bot = TestSpider(network_try_limit=1)
bot.setup_queue()
bot.add_task(Task('page', url=SERVER.BASE_URL, raw=True))
bot.add_task(Task('page', url=SERVER.BASE_URL, raw=True))
bot.run()
self.assertEqual(2, len(bot.codes))
def test_task_callback(self):
class TestSpider(Spider):
def task_page(self, grab, task):
self.meta['tokens'].append('0_handler')
class FuncWithState(object):
def __init__(self, tokens):
self.tokens = tokens
def __call__(self, grab, task):
self.tokens.append('1_func')
tokens = []
func = FuncWithState(tokens)
bot = TestSpider()
bot.meta['tokens'] = tokens
bot.setup_queue()
# classic handler
bot.add_task(Task('page', url=SERVER.BASE_URL))
# callback option overried classic handler
bot.add_task(Task('page', url=SERVER.BASE_URL, callback=func))
# callback and null task name
bot.add_task(Task(name=None, url=SERVER.BASE_URL, callback=func))
# callback and default task name
bot.add_task(Task(url=SE | RVER.BASE_URL, callback=func))
bot.run()
self.assertEqual(['0_handler', '1_func', '1_func', '1_func'],
sorted(tokens))
#def test_task_callback_serialization(self): |
# 8-(
# FIX: pickling the spider instance completely does not work
# 8-(
#class FuncWithState(object):
#def __init__(self, tokens):
#self.tokens = tokens
#def __call__(self, grab, task):
#self.tokens.append('func')
#tokens = []
#func = FuncWithState(tokens)
#bot = SimpleSpider()
#bot.setup_queue()
##bot.add_task(Task(url=SERVER.BASE_URL, callback=func))
#dump = pickle.dumps(bot)
#bot2 = pickle.loads(dump)
#bot.run()
#self.assertEqual(['func'], tokens)
# Deprecated
# TODO: Change to middleware then it will be ready
#def test_task_fallback(self):
#class TestSpider(Spider):
#def prepare(self):
#self.tokens = []
#def task_page(self, grab, task):
#self.tokens.append('task')
#def task_page_fallback(self, task):
#self.tokens.append('fallback')
#SERVER.RESPONSE['code'] = 403
#bot = TestSpider(network_try_limit=2)
#bot.setup_queue()
#bot.add_task(Task('page', url=SERVER.BASE_URL))
#bot.run()
#self.assertEqual(bot.tokens, ['fallback'])
# Deprecated
# TODO: Change to middleware then it will be ready
#def test_task_fallback_yields_new_task(self):
#class TestSpider(Spider):
#def prepare(self):
#self.tokens = []
#def task_page(self, grab, task):
#self.tokens.append('task')
#SERVER.RESPONSE['code'] = 403
#yield Task('page2', url=SERVER.BASE_URL)
#def task_page_fallback(self, task):
#self.tokens.append('fallback')
#SERVER.RESPONSE['code'] = 200
#self.add_task(Task('page', url=SERVER.BASE_URL))
#def task_page2(self, grab, task):
#pass
#def task_page2_fallback(self, task):
#self.tokens.append('fallback2')
#SERVER.RESPONSE['code'] = 403
#bot = TestSpider(network_try_limit=2)
#bot.setup_queue()
#bot.add_task(Task('page', url=SERVER.BASE_URL))
#bot.run()
#self.assertEqual(bot.tokens, ['fallback', 'task', 'fallback2'])
de |
gagneurlab/concise | docs/autogen.py | Python | mit | 12,495 | 0.00032 | # -*- coding: utf-8 -*-
'''
General documentation architecture:
Home
Index
- Getting started
Getting started to concise
Layers
- Preprocessing
Genomic Sequence Preprocessing
RNA Structure Preprocessing
Spline-position Preprocessing
- Data
Encode
Attract
Losses
Metrics
Eval metrics
Optimizers
Initializers
Regularizers
- effects
- Utils
fasta
model_data
pwm
splines
Contributing
'''
from __future__ import print_function
from __future__ import unicode_literals
import re
import inspect
import os
import shutil
import sys
if sys.version[0] == '2':
reload(sys)
sys.setdefaultencoding('utf8')
import concise
from concise import utils
from concise.utils import fasta
from concise.utils import helper
from concise.utils import model_data
from concise.utils import pwm, load_motif_db
from concise.utils import splines
from concise.data import encode, attract, hocomoco
from concise.preprocessing import sequence, splines, structure
from concise import constraints
from concise import eval_metrics
from concise import metrics
from concise import hyopt
from concise import initializers
from concise import layers
from concise import losses
from concise import optimizers
from concise import regularizers
from concise import effects
EXCLUDE = {
'Optimizer',
'Wrapper',
'get_session',
'set_session',
'CallbackList',
'serialize',
'deserialize',
'get',
}
PAGES = [
{
'page': 'preprocessing/sequence.md',
'functions': [
sequence.encodeSequence,
sequence.pad_sequences,
sequence.encodeDNA,
sequence.encodeRNA,
sequence.encodeCodon,
sequence.encodeAA,
]
},
{
'page': 'preprocessing/splines.md',
'classes': [
splines.EncodeSplines,
],
'functions': [
splines.encodeSplines,
]
},
{
'page': 'preprocessing/structure.md',
'functions': [
structure.encodeRNAStructure,
]
},
{
'page': 'layers.md',
'functions': [
layers.InputDNA,
layers.InputRNA,
layers.InputRNAStructure,
layers.InputCodon,
layers.InputAA,
layers.InputSplines,
],
'classes': [
layers.SplineT,
layers.SplineWeight1D,
layers.ConvSequence,
layers.ConvDNA,
layers.ConvRNA,
layers.ConvRNAStructure,
layers.ConvAA,
layers.ConvCodon,
# layers.ConvSplines,
layers.GlobalSumPooling1D,
],
},
{
'page': 'losses.md',
'all_module_functions': [losses],
},
{
'page': 'metrics.md',
'all_module_functions': [metrics],
},
{
'page': 'eval_metrics.md',
'all_module_functions': [eval_metrics],
},
{
'page': 'initializers.md',
'all_module_functions': [initializers],
'all_module_classes': [initializers],
},
{
'page': 'regularizers.md',
# 'all_module_functions': [regularizers],
# 'all_module_classes': [regularizers],
'classes': [
regularizers.SplineSmoother,
]
},
{
'page': 'optimizers.md',
'all_module_classes': [optimizers],
'functions': [
optimizers.data_based_init
]
},
{
'page': 'effects.md',
'functions': [
effects.effect_from_model,
effects.gradient_pred,
effects.dropout_pred,
effects.ism,
]
},
{
'page': 'utils/fasta.md',
'all_module_functions': [utils.fasta],
},
{
'page': 'utils/model_data.md',
'all_module_functions': [utils.model_data],
},
{
'page': 'utils/pwm.md',
'classes': [utils.pwm.PWM],
'functions': [
load_motif_db,
]
},
{
'page': 'utils/splines.md',
'classes': [utils.splines.BSpline]
},
{
'page': 'hyopt.md',
'classes': [
hyopt.CMongoTrials,
hyopt.CompileFN,
],
'functions': [
hyopt.test_fn,
hyopt.eval_model,
]
},
{
'page': 'data/encode.md',
'functions': [
encode.get_metadata,
encode.get_pwm_list,
]
},
{
'page': 'data/attract.md',
'functions': [
attract.get_metadata,
attract.get_pwm_list,
]
},
{
'page': 'data/hocomoco.md',
'functions': [
hocomoco.get_metadata,
hocomoco.get_pwm_list,
]
},
]
# TODO
ROOT = 'http://concise.io/'
def get_earliest_class_that_defined_member(member, cls):
ancestors = get_classes_ancestors([cls])
result = None
for ancestor in ancestors:
if member in dir(ancestor):
result = ancestor
if not result:
return cls
return result
def get_classes_ancestors(classes):
ancestors = []
for cls in classes:
ancestors += cls.__bases__
filtered_ancestors = []
for ancestor in ancestors:
if ancestor.__name__ in ['object']:
continue
filtered_ancestors.append(ancestor)
if filtered_ancestors:
return filtered_ancestors + get_classes_ancestors(filtered_ancestors)
else:
return filtered_ancestors
def get_function_signature(function, method=True):
signature = getattr(function, '_legacy_support_signature', None)
if signature is None:
signature = inspect.getargspec(function)
defaults = signature.defaults
if method:
args = signature.args[1:]
else:
args = signature.args
if defaults:
kwargs = zip(args[-len(defaults):], defaults)
args = args[:-len(defaults)]
else:
kwargs = []
st = '%s.%s(' % (function.__module__, function.__name__)
for a in args:
st += str(a) + ', '
for a, v in kwargs:
if isinstance(v, str):
v = '\'' + v + '\''
st += str(a) + '=' + str(v) + ', '
if kwargs or args:
return st[:-2] + ')'
else:
return st + ')'
def get_class_signature(cls):
try:
class_signature = get_function_signature(cls.__init__)
class_signature = class_signature.replace('__init__', cls.__name__)
except:
# in case the class inherits from object and does not
# define __init__
class_signature = cls.__module__ + '.' + cls.__name__ + '()'
return class_signature
def class_to_docs_link(cls):
module_name = cls.__module__
assert module_name[:8] == 'concise.'
module_name = module_name[8:]
link = ROOT + module_name.replace('.', '/') + '#' + cls.__name__.lower()
return link
def class_to_source_link(cls):
module_name = cls.__module__
assert module_name[:8] == 'concise.'
path = module_name.replace('.', '/')
path += '.py'
line = inspect.getsourcelines(cls)[-1]
link = 'https://github.com/avsecz/concise/blob/master/' + path + '#L' + str(line)
return '[[source]](' + link + ')'
def code_snippet(snippet):
result = '```python\n'
result += snippet + '\n'
result += '```\n'
return result
def process_class_docstring(docstring):
docstring = re.sub(r'\n # (.*)\n',
r'\n __\1__\n\n',
docstring)
docstring = re.sub(r' ([^\s\\]+):(.*)\n',
r' - __\1__:\2\n',
docstring)
docstring = docstring.replace(' ' * 5, '\t\t')
docstring = docstring.replace(' ' * 3, '\t')
docstring = docstring.replace(' ', '')
return docstring
| def | process_function_docstring(docstring):
docstring = re.sub(r'\n # (.*)\n',
r'\n __\1__\n\n',
docstring)
docstring = re.sub(r'\n # (.*)\n',
r'\n __\1__\n\n',
docstring)
docstring = re.sub(r' ([^\s\\]+):(.*)\n',
|
maseaver/todomaker | todomaker.py | Python | mit | 13,220 | 0.004992 | import sys
import json
from tkinter import filedialog
def getFiles(failedargs):
newfiles = []
if failedargs:
if failedargs == -1:
answer = input("""\
You didn't specify any files as input, would you like to continue? y/n
>""")
if answer in ["n", ""]:
raise SystemExit
else:
answer = input("""\
One or more of the command line arguments couldn't be opened, would you like to
specify additional input files? y/n
>""")
if answer in ["n", ""]:
return newfiles, -1
prompt = """\
Please select the filename of one of the files you want to input to this
script, or hit enter with the filename box empty to finish."""
filename = "foo"
while filename:
print(prompt)
filename = filedialog.askopenfilename()
if filename:
try:
with open(filename) as f:
pass
except OSError as e:
error = """\
Input failed for filename "%s".""" % file
print(error % file)
print(e)
else:
newfiles.append(filename)
return newfiles, -1
def getFormats(files):
neededKeys = ["prefix", "catchall", "keywords", "sep"]
default = {"prefix" : "##",
"catchall" : "!!",
"keywords" : ["game", "program", "task", "notes"],
"sep" : ","}
answer = "foo"
formatList = []
for file in files:
if answer not in "dD":
answer = input("""\
Would you like to select a JSON file to serve as the format specifier for:
{0}
or would you like to use the default format?
prefix: "{1[prefix]}"
catchall: "{1[catchall]}"
keywords: "{1[keywords]!s}"
(keyword) sep(arator) : "{1[sep]}"
Your options are:
Y: Yes, select a format specifier
N: No, but continue asking, I might for a later file
S: Enter from scratch
D: No, use the default format for all files
If you successfully load or enter a format, you'll have the option of setting
it as the default for the remaining files. If your load attemp | t is
unsuccessful, you can either try again, do it fr | om scratch, or quit the script.
> """.format(file, default))
if answer not in ["y", "Y", "n", "N", "d", "D", "s", "S"]:
answer = "n"
print("Didn't get that, so I'll assume \"no.\"")
formatList.append(default)
elif answer in "yY":
while True:
formatFilename = filedialog.askopenfilename()
try:
with open(formatFilename) as f:
temp = json.load(f)
except OSError as e:
error = """\
Input failed for filename "%s".""" % formatFilename
print(error)
print(e)
temp = {}
if keepTrying(file) == "s":
for key in neededKeys:
temp[key] = scratch(key)
if keepFormat(temp):
default = temp
formatList.append(temp)
elif keepTrying:
continue
else:
formatList.append(default)
except ValueError as e:
error = """\
"%s" doesn't appear to be properly formatted for JSON deserialization.\
""" % formatFilename
print(error)
print(e)
temp = {}
if keepTrying(file) == "s":
for key in neededKeys:
temp[key] = scratch(key)
if keepFormat(temp):
default = temp
formatList.append(temp)
elif keepTrying:
continue
else:
formatList.append(default)
else:
missing = []
incorrectKeywords = False
for key in neededKeys:
if key in temp:
if key == "keywords":
if type(temp[key]) != type([]):
incorrectKeywords = True
else:
missing.append(key)
if missing:
for item in missing:
print("Key %s is not in the object loaded."
% item)
temp[item] = scratch(item)
if incorrectKeywords:
print("Keywords not a list/JSON array.")
temp["keywords"] = scratch("keywords")
if keepFormat(temp):
default = temp
formatList.append(temp)
elif answer in "sS":
temp = {}
for key in neededKeys:
temp[key] = scratch(key)
if keepFormat(temp):
default = temp
formatList.append(temp)
else:
formatList.append(default)
else:
formatList.append(default)
return formatList
def keepTrying(file):
prompt = """\
Try to find another format file for %s? y/n (use default)/s (make from scratch)
/q (quit script)
> """ % file
answer = input(prompt)
if answer in "yY":
return True
elif answer in "qQ":
raise SystemExit
elif answer in "sS":
return "s"
elif answer in "nN":
return False
else:
print("Didn't get that, so I'll assume \"no.\"")
return False
def scratch(key):
if key == keywords:
value = []
keyword = "foo"
while keyword:
keyword = input("""\
Enter a keyword, or the empty string if you're done. {} keyword(s) entered.
> """.format(len(value)))
if keyword:
value.append(keyword)
elif not value:
keyword = input("""\
It's fine to leave "keywords" empty, that actually won't do anything
particularly untoward, but I just want to check that that's what you want. If
not, enter something now.
> """)
if keyword:
value.append(keyword)
else:
value = input("""\
Enter a value for {}.
> """.format(key))
if value:
return value
else:
value = input("""\
I just want to make sure, you want to leave {} blank? If not, enter something.
> """.format(key))
return value
def keepFormat(form):
print(form)
answer = input("""\
Do you want to keep the format you just made or loaded as the default? y/n
> """)
if answer in "yY":
return True
elif answer in "nN":
return False
else:
print("Didn't get that, so I'll assume \"no.\"")
return False
def getTodos(file, form):
newkeyed = {"chunks" : []}
newkeyed.update(dict.fromkeys(form["keywords"], []))
with open(file) as f:
catchall = False
chunk = []
chunky = {"chunks"}
linenum = 0
for line in f:
linenum = linenum + 1
comment, catchall, keys = lineLogic( |
tchx84/social-sugar | extensions/web/twitter/twitter/twr_status.py | Python | gpl-2.0 | 7,122 | 0.009267 | #!/usr/bin/env python
#
# Copyright (c) 2013 Martin Abente Lahaye. - tch@sugarlabs.org
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import json
from gi.repository import GObject
import twr_error
from twr_object import TwrObject
class TwrStatus(GObject.GObject):
UPDATE_URL = 'https://api.twitter.com/1.1/statuses/update.json'
UPDATE_WITH_MEDIA_URL = 'https://api.twitter.com/1.1/statuses/'\
'update_with_media.json'
SHOW_URL = 'https://api.twitter.com/1.1/statuses/show.json'
RETWEET_URL = 'https://api.twitter.com/1.1/statuses/retweet/%s.json'
RETWEETS_URL = 'https://api.twitter.com/1.1/statuses/retweets/%s.json'
DESTROY_URL = 'https://api.twitter.com/1.1/statuses/destroy/%s.json'
__gsignals__ = {
'status-updated': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
'status-updated-failed': (GObject.SignalFlags.RUN_FIRST,
None, ([str])),
'status-downloaded': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
'status-downloaded-failed': (GObject.SignalFlags.RUN_FIRST,
None, ([str])),
'status-destroyed': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
'status-destroyed-failed': (GObject.SignalFlags.RUN_FIRST,
None, ([str])),
'retweet-created': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
'retweet-created-failed': (GObject.SignalFlags.RUN_FIRST,
None, ([str])),
'retweets-downloaded': (GObject.SignalFlags.RUN_FIRST,
None, ([object])),
'retweets-downloaded-failed': (GObject.SignalFlags.RUN_FIRST,
None, ([str]))}
def __init__(self, status_id=None):
GObject.GObject.__init__(self)
self._status_id = status_id
def update(self, status, reply_status_id=None):
self._update(self.UPDATE_URL,
status,
None,
reply_status_id)
def update_with_media(self, status, filepath, reply_status_id=None):
self._update(self.UPDATE_WITH_MEDIA_URL,
status,
filepath,
reply_status_id)
def _update(self, url, status, filepath=None, reply_status_id=None):
self._check_is_not_created()
params = [('status', (status))]
if reply_status_id is not None:
params += [('in_reply_to_status_id', (reply_status_id))]
GObject.idle_add(self._post,
url,
params,
filepath,
self.__completed_cb,
self.__failed_cb,
'status-updated',
'status-updated-failed')
def show(self):
self._check_is_created()
GObject.idle_add(self._get,
self.SHOW_URL,
[('id', (self._status_id))],
self.__completed_cb,
self.__failed_cb,
'status-downloaded',
'status-downloaded-failed')
def destroy(self):
self._check_is_created()
GObject.idle_add(self._post,
self.DESTROY_URL % self._status_id,
None,
None,
self.__completed_cb,
self.__failed_cb,
'status-destroyed',
'status-destroyed-failed')
def retweet(self):
self._check_is_created()
GObject.idle_add(self._post,
self.RETWEET_URL % self._status_id,
None,
None,
self.__completed_cb,
self.__failed_cb,
'retweet-created',
'retweet-created-failed')
def retweets(self):
self._check_is_created()
GObject.idle_add(self._get,
self.RETWEETS_URL % self._status_id,
[],
self.__completed_cb,
self.__failed_cb,
'retweets-downloaded',
'retweets-downloaded-failed')
def _check_is_not_created(self):
if self._status_id is not None:
raise twr_error.TwrStatusAlreadyCreated('Status already created')
def _check_is_created(self):
if self._status_id is None:
raise twr_error.TwrStatusNotCreated('Status not created')
def _get(self, url, params,
completed_cb, failed_cb, completed_data, failed_data):
object = TwrObject()
object.connect('transfer-completed', completed_cb, completed_data)
object.connect('transfer-failed', failed_cb, failed_data)
object.request('GET', url, params)
def _post(self, url, params, filepath,
completed_cb, failed_cb, completed_data, failed_data):
object = TwrObject()
object.connect('transfer-completed', complet | ed_cb, completed_data)
object.connect('transfer-failed', failed_cb, failed_data)
object.request('POST', url, params, filepath)
def __completed_cb(self, object, data, signal):
try:
info = json.loads(data)
if 'err | ors' in info.keys():
raise twr_error.TwrStatusError(str(info['errors']))
if self._status_id is None and 'id_str' in info.keys():
self._status_id = str(info['id_str'])
self.emit(signal, info)
except Exception, e:
print '__completed_cb crashed with %s' % str(e)
def __failed_cb(self, object, message, signal):
self.emit(signal, message)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.