repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
tgbugs/pyontutils
|
test/test_oboio.py
|
Python
|
mit
| 6,002
| 0.002832
|
import os
import shutil
import unittest
import pytest
from pyontutils import obo_io as oio
from .common import temp_path
obo_test_string = """format-version: 1.2
ontology: uberon/core
subsetdef: cumbo "CUMBO"
treat-xrefs-as-has-subclass: EV
import: http://purl.obolibrary.org/obo/uberon/chebi_import.owl
treat-xrefs-as-reverse-genus-differentia: TGMA part_of NCBITaxon:44484
[Term]
id: UBERON:0000003
xref: SCTID:272650008
relationship: in_lateral_side_of UBERON:0000033 {gci_relation="part_of", gci_filler="NCBITaxon:7776", notes="hagfish have median nostril"} ! head
!relationship: in_lateral_side_of UBERON:0000034 {gci_filler="NCBITaxon:7776", gci_relation="part_of", notes="hagfish have median nostril"} ! can't use this due to robot non-determinism
comment: robot does reorder the gci_ so that relation always comes before filler
property_value: external_definition "One of paired external openings of the nasal chamber.[AAO]" xsd:string {date_retrieved="2012-06-20", external_class="AAO:0000311", ontology="AAO", source="AAO:EJS"}
replaced_by: GO:0045202
consider: FMA:67408
[Term]
id: UBERON:0000033
name: head
comment: needed to prevent robot from throwing a null pointer on the relationship axiom above
[Term]
id: UBERON:0000034
[Typedef]
id: in_lateral_side_of
property_value: seeAlso FMA:86003
name: in_lateral_side_of
comment: id needed to prevent robot from throwing a null pointer on the relationship axiom above
comment: apparently also have to have name strangely enough and robot doesn't roundtrip random comments
is_transitive: true
"""
class TMHelper:
parse = oio.TVPair._parse_modifiers
serialize = oio.TVPair._format_trailing_modifiers
class TestOboIo(unittest.TestCase):
@classmethod
def setUpClass(cls):
if temp_path.exists():
shutil.rmtree(temp_path)
temp_path.mkdir()
@classmethod
def tearDownClass(cls):
shutil.rmtree(temp_path)
def test_parse_trailing_modifiers(self):
thm = TMHelper()
lines = (
(('relationship: part_of UBERON:0000949 '
'{source="AAO", source="FMA", source="XAO"} ! endocrine system'),
(('source', 'AAO'), ('source', 'FMA'), ('source', 'XAO'))),
('{oh="look", a="thing!"}', (('oh', 'look'), ('a', 'thing!'))),
('some rand
|
ome values {oh="look", a="thing!"} ! yay!', (('oh', 'look'), ('a', 'thing!'))),
('some rando}me values {oh="l{ook", a="t{hing!"} ! yay!', (('oh', 'l{ook'), ('a', 't{hing!'))),
('some rando}me values {oh="l{ook", a="t}hing!"} ! yay!', (('oh', 'l{ook'), ('a', 't}hing!'))),
)
bads = [(expect, actual) for line, expect in lines
for _, actual in (thm.parse(line),)
if actual != expect]
|
assert not bads, '\n' + '\n\n'.join(f'{e}\n{a}' for e, a in bads)
def test_construct_simple_file(self):
of = oio.OboFile()
ids_names = [['123', 'test'],
['234', 'yee'],
['345', 'haw'],
['456', 'oio']]
terms = [oio.Term(id=i, name=n) for i, n in ids_names]
of.add(*terms)
str(of)
def test_header_treat_xrefs(self):
of = oio.OboFile()
test_tag = 'treat-xrefs-as-is_a'
tags_values = [
[test_tag, 'TEMP:test1'],
[test_tag, 'TEMP:test2'],
]
tvpairs = [oio.TVPair(tag=t, value=v) for t, v in tags_values]
of.header.add(*tvpairs)
tv = of.asObo()
assert len(tv.split(test_tag)) > 2, tv
def test_property_value_bug(self):
def _test(string):
pv = oio.Property_value.parse(string)
assert pv.value() == string
tv = oio.TVPair(string)
assert str(tv) == string
return pv, tv
minimal = ('property_value: any " ! " xsd:string')
pv, tv = _test(minimal)
darn = ('property_value: external_ontology_notes "see also MA:0002165 !'
' lieno-pancreatic vein" xsd:string {external_ontology="MA"}')
pv, tv = _test(darn)
ouch = ('property_value: editor_note "TODO -'
' this string breaks the parser A:0 ! wat" xsd:string')
pv, tv = _test(ouch)
hrm = ('property_value: editor_note "TODO -'
' consider relationship to UBERON:0000091 ! bilaminar disc" xsd:string')
pv, tv = _test(hrm)
def test_robot(self):
of1 = oio.OboFile(data=obo_test_string)
obo1 = of1.asObo(stamp=False)
obor1 = of1.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
of2 = oio.OboFile(data=obo1)
obo2 = of2.asObo(stamp=False)
# can't test against obor2 because obo1 reordered the trailing qualifiers
# and since there is seemingly no rational way to predict those, we simply
# preserve the ordering that we got
obor2 = of2.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
of3 = oio.OboFile(data=obor1)
obo3 = of3.asObo(stamp=False)
obor3 = of3.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
print(obo1)
print(obo2)
print(obor1)
print(obor2)
assert obo1 == obo2 == obo3 != obor1
assert obor1 == obor3
@pytest.mark.skipif(not shutil.which('robot'), reason='robot not installed')
def test_robot_rt(self):
of = oio.OboFile(data=obo_test_string)
obor1 = of.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
rtp = temp_path / 'robot-test.obo'
robot_path = temp_path / 'robot-test.test.obo'
of.write(rtp, stamp=False, version=oio.OBO_VER_ROBOT)
cmd = f'robot convert -vvv -i {rtp.as_posix()} -o {robot_path.as_posix()}'
wat = os.system(cmd)
if wat:
raise ValueError(wat)
datas = []
for path in (rtp, robot_path):
with open(path, 'rt') as f:
datas.append(f.read())
ours, rob = datas
assert ours == rob
|
heke123/chromium-crosswalk
|
native_client_sdk/src/build_tools/test_sdk.py
|
Python
|
bsd-3-clause
| 8,061
| 0.009924
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script for a testing an existing SDK.
This script is normally run immediately after build_sdk.py.
"""
import argparse
import os
import subprocess
import sys
import buildbot_common
import build_projects
import build_sdk
import build_version
import parse_dsc
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SDK_SRC_DIR = os.path.dirname(SCRIPT_DIR)
SDK_LIBRARY_DIR = os.path.join(SDK_SRC_DIR, 'libraries')
SDK_DIR = os.path.dirname(SDK_SRC_DIR)
SRC_DIR = os.path.dirname(SDK_DIR)
OUT_DIR = os.path.join(SRC_DIR, 'out')
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
def StepBuildExamples(pepperdir):
for config in ('Debug', 'Release'):
build_sdk.BuildStepMakeAll(pepperdir, 'getting_started',
'Build Getting Started (%s)' % config,
deps=False, config=config)
build_sdk.BuildStepMakeAll(pepperdir, 'examples',
'Build Examples (%s)' % config,
deps=False, config=config)
def StepCopyTests(pepperdir, toolchains, build_experimental):
buildbot_common.BuildStep('Copy Tests')
# Update test libraries and test apps
filters = {
'DEST': ['tests']
}
if not build_experimental:
filters['EXPERIMENTAL'] = False
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
build_projects.UpdateHelpers(pepperdir, clobber=False)
build_projects.UpdateProjects(pepperdir, tree, clobber=False,
toolchains=toolchains)
def StepBuildLibraries(pepperdir, sanitizer):
for config in ('Debug', 'Release'):
title = 'Build Libs (%s)[sanitizer=%s]' % (config, sanitizer)
build_sdk.BuildStepMakeAll(pepperdir, 'src', title, config=config,
args=GetSanitizerArgs(sanitizer))
def StepBuildTests(pepperdir, sanitizer):
for config in ('Debug', 'Release'):
title = 'Build Tests (%s)' % config
if sanitizer:
title += '[sanitizer=%s]' % sanitizer
build_sdk.BuildStepMakeAll(pepperdir, 'tests', title, deps=False,
config=config, args=GetSanitizerArgs(sanitizer))
def GetSanitizerArgs(sanitizer):
if sanitizer == 'valgrind':
return ['TOOLCHAIN=linux', 'RUN_UNDER=valgrind']
elif sanitizer == 'address':
return ['TOOLCHAIN=linux', 'ASAN=1']
elif sanitizer == 'thread':
return ['TOOLCHAIN=linux', 'TSAN=1']
return []
def StepRunSelLdrTests(pepperdir, sanitizer):
filters = {
'SEL_LDR': True
}
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
def RunTest(test, toolchain, config, arch=None):
args = ['STANDALONE=1', 'TOOLCHAIN=%s' % toolchain]
args += GetSanitizerArgs(sanitizer)
if arch is not None:
args.append('NACL_ARCH=%s' % arch)
build_projects.BuildProjectsBranch(pepperdir, test, clean=False,
deps=False, config=config,
args=args + ['run'])
if getos.GetPlatform() == 'win':
# On win32 we only support running on the system
# arch
archs = (getos.GetSystemArch('win'),)
elif getos.GetPlatform() == 'mac':
# We only ship 32-bit version of sel_ldr on mac.
archs = ('x86_32',)
else:
# On linux we can run both 32 and 64-bit, and arm (via qemu)
archs = ('x86_64', 'x86_32', 'arm')
for root, projects in tree.iteritems():
for project in projects:
if sanitizer:
sanitizer_name = '[sanitizer=%s]' % sanitizer
else:
sanitizer_name = ''
title = 'standalone test%s: %s' % (sanitizer_name,
os.path.basename(project['NAME']))
location = os.path.join(root, project['NAME'])
buildbot_common.BuildStep(title)
configs = ('Debug', 'Release')
# On linux we can run the standalone tests natively using the host
# compiler.
if getos.GetPlatform() == 'linux':
if sanitizer:
configs = ('Debug',)
for config in configs:
RunTest(location, 'linux', config)
if sanitizer:
continue
for toolchain in ('clang-newlib', 'glibc', 'pnacl'):
for arch in archs:
for config in configs:
RunTest(location, toolchain, config, arch)
def StepRunBrowserTests(toolchains, experimental):
buildbot_common.BuildStep('Run Tests')
args = [
sys.executable,
os.path.join(SCRIPT_DIR, 'test_projects.py'),
'--retry-times=3',
]
if experimental:
args.append('-x')
for toolchain in toolchains:
args.extend(['-t', toolchain])
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
buildbot_common.ErrorExit('Error running tests.')
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--experimental', help='build experimental tests',
action='store_true')
parser.add_argument('--sanitizer',
help='Run sanitizer (asan/tsan/valgrind) tests',
action='store_true')
parser.add_argument('--verbose', '-v', help='Verbose output',
action='store_true')
parser.add_argument('phases', nargs="*")
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# of the build.
del os.environ['NACL_SDK_ROOT']
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete test_sdk.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options = parser.parse_args(args)
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
toolchains = ['clang-newlib', 'glibc', 'pnacl']
toolchains.append(getos.GetPlatform())
if options.verbose:
build_projects.verbose = True
phases = [
('build_examples', StepBuildExamples, pepperdir),
('copy_tests', StepCopyTests, pepperdir, toolchains, options.experimental),
('build_tests', StepBuildTests, pepperdir, None),
]
if options.sanitizer:
if getos.GetPlatform() != 'linux':
buildbot_common.ErrorExit('sanitizer tests only run on linux.')
clang_dir = os.path.join(SRC_DIR, 'third_party', 'llvm-build',
'Release+Asserts', 'bin')
os.environ['PATH'] = clang_dir + os.pathsep + os.environ['PATH']
phases += [
('bui
|
ld_libs_asan', StepBuildLibraries, pepperdir, 'address'),
('build_libs_tsan', StepBuildLibraries, pepperdir, 'thread'),
('build_tests_asan', StepBuildTests, pepperd
|
ir, 'address'),
('build_tests_tsan', StepBuildTests, pepperdir, 'thread'),
('sel_ldr_tests_asan', StepRunSelLdrTests, pepperdir, 'address'),
('sel_ldr_tests_tsan', StepRunSelLdrTests, pepperdir, 'thread'),
# TODO(sbc): get valgrind installed on the bots to enable this
# configuration
#('sel_ldr_tests_valgrind', StepRunSelLdrTests, pepperdir, 'valgrind')
]
else:
phases += [
('sel_ldr_tests', StepRunSelLdrTests, pepperdir, None),
('browser_tests', StepRunBrowserTests, toolchains, options.experimental),
]
if options.phases:
phase_names = [p[0] for p in phases]
for arg in options.phases:
if arg not in phase_names:
msg = 'Invalid argument: %s\n' % arg
msg += 'Possible arguments:\n'
for name in phase_names:
msg += ' %s\n' % name
parser.error(msg.strip())
for phase in phases:
phase_name = phase[0]
if options.phases and phase_name not in options.phases:
continue
phase_func = phase[1]
phase_args = phase[2:]
phase_func(*phase_args)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
buildbot_common.ErrorExit('test_sdk: interrupted')
|
dongyoungy/dbseer_middleware
|
rs-sysmon2/plugins/dstat_snooze.py
|
Python
|
apache-2.0
| 737
| 0
|
class dstat_plugin(dstat):
def __init__(self):
self.name = 'snooze'
self.vars = ('snooze',)
self.type = 's'
self.width = 6
self.scale = 0
self.before = time.time()
def extract(self):
|
now = time.time()
if loop != 0:
self.val['snooze'] = now - self.before
else:
self.val['s
|
nooze'] = self.before
if step == op.delay:
self.before = now
def show(self):
if self.val['snooze'] > step + 1:
return ansi['default'] + ' -'
color = 'white'
if step != op.delay:
color = 'gray'
snoze, c = fchg(self.val['snooze'], 6, 1000)
return ansi[color] + snoze
|
gw-sd-2016/Codir
|
codirSublime/SocketIO/websocket/_core.py
|
Python
|
gpl-2.0
| 16,014
| 0.000812
|
"""
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
from __future__ import print_function
import six
import socket
if six.PY3:
from base64 import encodebytes as base64encode
else:
from base64 import encodestring as base64encode
import struct
import threading
# websocket modules
from ._exceptions import *
from ._abnf import *
from ._socket import *
from ._utils import *
from ._url import *
from ._logging import *
from ._http import *
from ._handshake import *
from ._ssl_compat import *
"""
websocket python client.
=========================
This version support only hybi-13.
Please see http://tools.ietf.org/html/rfc6455 for protocol.
"""
def create_connection(url, timeout=None, **options):
"""
connect to url and return websocket object.
Connect to url and return the WebSocket object.
Passing optional timeout parameter will set the timeout on the socket.
If no timeout is supplied,
the global default timeout set
|
ting returned by getdefauttimeout() is used.
You can customize using 'options'.
If you set "header" list object, you can set your own custom header.
>>> conn = create_connection("ws://echo.websocket.org/",
... header=["User-Agent: MyProgram",
... "x-custom: header"])
timeout: socket timeout
|
time. This value is integer.
if you set None for this value,
it means "use default_timeout value"
options: "header" -> custom http header list or dict.
"cookie" -> cookie value.
"origin" -> custom origin url.
"host" -> custom host header string.
"http_proxy_host" - http proxy host name.
"http_proxy_port" - http proxy port. If not set, set to 80.
"http_no_proxy" - host names, which doesn't use proxy.
"http_proxy_auth" - http proxy auth infomation.
tuple of username and password.
default is None
"enable_multithread" -> enable lock for multithread.
"sockopt" -> socket options
"sslopt" -> ssl option
"subprotocols" - array of available sub protocols.
default is None.
"skip_utf8_validation" - skip utf8 validation.
"""
sockopt = options.get("sockopt", [])
sslopt = options.get("sslopt", {})
fire_cont_frame = options.get("fire_cont_frame", False)
enable_multithread = options.get("enable_multithread", False)
skip_utf8_validation = options.get("skip_utf8_validation", False)
websock = WebSocket(sockopt=sockopt, sslopt=sslopt,
fire_cont_frame=fire_cont_frame,
enable_multithread=enable_multithread,
skip_utf8_validation=skip_utf8_validation)
websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
websock.connect(url, **options)
return websock
class WebSocket(object):
"""
Low level WebSocket interface.
This class is based on
The WebSocket protocol draft-hixie-thewebsocketprotocol-76
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
We can connect to the websocket server and send/recieve data.
The following example is a echo client.
>>> import websocket
>>> ws = websocket.WebSocket()
>>> ws.connect("ws://echo.websocket.org")
>>> ws.send("Hello, Server")
>>> ws.recv()
'Hello, Server'
>>> ws.close()
get_mask_key: a callable to produce new mask keys, see the set_mask_key
function's docstring for more details
sockopt: values for socket.setsockopt.
sockopt must be tuple and each element is argument of sock.setscokopt.
sslopt: dict object for ssl socket option.
fire_cont_frame: fire recv event for each cont frame. default is False
enable_multithread: if set to True, lock send method.
skip_utf8_validation: skip utf8 validation.
"""
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
fire_cont_frame=False, enable_multithread=False,
skip_utf8_validation=False):
"""
Initalize WebSocket object.
"""
self.sock_opt = sock_opt(sockopt, sslopt)
self.handshake_response = None
self.sock = None
self.connected = False
self.get_mask_key = get_mask_key
# These buffer over the build-up of a single frame.
self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)
if enable_multithread:
self.lock = threading.Lock()
else:
self.lock = NoLock()
def __iter__(self):
"""
Allow iteration over websocket, implying sequential `recv` executions.
"""
while True:
yield self.recv()
def __next__(self):
return self.recv()
def next(self):
return self.__next__()
def fileno(self):
return self.sock.fileno()
def set_mask_key(self, func):
"""
set function to create musk key. You can custumize mask key generator.
Mainly, this is for testing purpose.
func: callable object. the fuct must 1 argument as integer.
The argument means length of mask key.
This func must be return string(byte array),
which length is argument specified.
"""
self.get_mask_key = func
def gettimeout(self):
"""
Get the websocket timeout(second).
"""
return self.sock_opt.timeout
def settimeout(self, timeout):
"""
Set the timeout to the websocket.
timeout: timeout time(second).
"""
self.sock_opt.timeout = timeout
if self.sock:
self.sock.settimeout(timeout)
timeout = property(gettimeout, settimeout)
def getsubprotocol(self):
"""
get subprotocol
"""
if self.handshake_response:
return self.handshake_response.subprotocol
else:
return None
subprotocol = property(getsubprotocol)
def getstatus(self):
"""
get handshake status
"""
if self.handshake_response:
return self.handshake_response.status
else:
return None
status = property(getstatus)
def getheaders(self):
"""
get handshake response header
"""
if self.handshake_response:
return self.handshake_response.headers
else:
return None
headers = property(getheaders)
def connect(self, url, **options):
"""
Connect to url. url is websocket url scheme.
ie. ws://host:port/resource
You can customize using 'options'.
If you set "header" list object, you can set your own custom header.
>>> ws = WebSocket()
>>> ws.connect("ws://echo.websocket.org/",
... header=["User-Agent: MyProgram",
... "x-custom: header"])
timeout: socket timeout time. This value is integer.
if you set None for this value,
it means "use default_timeout value"
options: "header" -> custom http header
|
xpavlus/parabaramba
|
venv/lib/python2.7/site-packages/cherrypy/test/test_proxy.py
|
Python
|
gpl-3.0
| 5,136
| 0.000195
|
import cherrypy
from cherrypy.test import helper
script_names = ["", "/path/to/myapp"]
class ProxyTest(helper.CPWebCase):
def setup_server():
# Set up site
cherrypy.config.update({
'tools.proxy.on': True,
'tools.proxy.base': 'www.mydomain.test',
})
# Set up application
class Root:
def __init__(self, sn):
# Calculate a URL outside of any requests.
self.thisn
|
ewpage = cherrypy.url(
"/this/new/page", script_name=sn)
def pageurl(self):
return self.thisnewpage
pageurl.exposed = True
def index(self):
raise cherrypy.HTTPRedirect('dummy')
index.exposed = True
def remoteip(self):
return cherr
|
ypy.request.remote.ip
remoteip.exposed = True
def xhost(self):
raise cherrypy.HTTPRedirect('blah')
xhost.exposed = True
xhost._cp_config = {'tools.proxy.local': 'X-Host',
'tools.trailing_slash.extra': True,
}
def base(self):
return cherrypy.request.base
base.exposed = True
def ssl(self):
return cherrypy.request.base
ssl.exposed = True
ssl._cp_config = {'tools.proxy.scheme': 'X-Forwarded-Ssl'}
def newurl(self):
return ("Browse to <a href='%s'>this page</a>."
% cherrypy.url("/this/new/page"))
newurl.exposed = True
for sn in script_names:
cherrypy.tree.mount(Root(sn), sn)
setup_server = staticmethod(setup_server)
def testProxy(self):
self.getPage("/")
self.assertHeader('Location',
"%s://www.mydomain.test%s/dummy" %
(self.scheme, self.prefix()))
# Test X-Forwarded-Host (Apache 1.3.33+ and Apache 2)
self.getPage(
"/", headers=[('X-Forwarded-Host', 'http://www.example.test')])
self.assertHeader('Location', "http://www.example.test/dummy")
self.getPage("/", headers=[('X-Forwarded-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/dummy" %
self.scheme)
# Test multiple X-Forwarded-Host headers
self.getPage("/", headers=[
('X-Forwarded-Host', 'http://www.example.test, www.cherrypy.test'),
])
self.assertHeader('Location', "http://www.example.test/dummy")
# Test X-Forwarded-For (Apache2)
self.getPage("/remoteip",
headers=[('X-Forwarded-For', '192.168.0.20')])
self.assertBody("192.168.0.20")
#Fix bug #1268
self.getPage("/remoteip",
headers=[
('X-Forwarded-For', '67.15.36.43, 192.168.0.20')
])
self.assertBody("67.15.36.43")
# Test X-Host (lighttpd; see https://trac.lighttpd.net/trac/ticket/418)
self.getPage("/xhost", headers=[('X-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/blah" %
self.scheme)
# Test X-Forwarded-Proto (lighttpd)
self.getPage("/base", headers=[('X-Forwarded-Proto', 'https')])
self.assertBody("https://www.mydomain.test")
# Test X-Forwarded-Ssl (webfaction?)
self.getPage("/ssl", headers=[('X-Forwarded-Ssl', 'on')])
self.assertBody("https://www.mydomain.test")
# Test cherrypy.url()
for sn in script_names:
# Test the value inside requests
self.getPage(sn + "/newurl")
self.assertBody(
"Browse to <a href='%s://www.mydomain.test" % self.scheme
+ sn + "/this/new/page'>this page</a>.")
self.getPage(sn + "/newurl", headers=[('X-Forwarded-Host',
'http://www.example.test')])
self.assertBody("Browse to <a href='http://www.example.test"
+ sn + "/this/new/page'>this page</a>.")
# Test the value outside requests
port = ""
if self.scheme == "http" and self.PORT != 80:
port = ":%s" % self.PORT
elif self.scheme == "https" and self.PORT != 443:
port = ":%s" % self.PORT
host = self.HOST
if host in ('0.0.0.0', '::'):
import socket
host = socket.gethostname()
expected = ("%s://%s%s%s/this/new/page"
% (self.scheme, host, port, sn))
self.getPage(sn + "/pageurl")
self.assertBody(expected)
# Test trailing slash (see
# https://github.com/cherrypy/cherrypy/issues/562).
self.getPage("/xhost/", headers=[('X-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/xhost"
% self.scheme)
|
hirokiky/django-websettings
|
runtest.py
|
Python
|
mit
| 1,437
| 0.001392
|
import os
import sys
import django
def main():
"""
Standalone django model test with a 'memory-only-django-installation'.
You can play with a django model without a complete django app installation.
http://www.djangosnippets.org/snippets/1044/
"""
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
os.environ["DJANGO_SETTINGS_MODULE"] = "django.conf.global_settings"
from django.conf import global_settings
global_settings.INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.contenttypes',
'websettings',
)
global_settings.DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
global_settings.MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
global_settings.SECRET_KEY = "secret_key_for_testing"
global_settings.ROOT_URLCONF = "websettings.urls"
global_settings.WEBSETTINGS_MODULE = 'websettings.tests.settingstore'
from django.test.utils impor
|
t get_runner
test_runner = get_runner(global_settings)
test_runner = test_runner()
failures = test_runner.run_tests
|
(['websettings'])
sys.exit(failures)
if __name__ == '__main__':
main()
|
YFFY/Supervisor
|
CountMan/monitor/queryer.py
|
Python
|
gpl-2.0
| 854
| 0.005855
|
#! /usr/bin/env python
# --*-- coding:utf-8 --*--
import os
import sys
sys.path.append(os.path.split(os.path.split(os.path.ab
|
spath(sys.path[0]))[0])[0])
from CountMan.monitor.util import *
from CountMan.monitor.setting import *
class Queryer(object):
def __init__(self):
self.dao = DatabaseInterface()
self.dataSet = dict()
self
|
.logger = getLogger('root')
def getData(self):
for queryKey in QUERYPARAM:
self.dataSet[queryKey] = getResponse(QUERYPARAM.get(queryKey))
@property
def set2db(self):
self.getData()
self.logger.info('get query data: {0} success'.format(self.dataSet))
self.dao.insertCollection(self.dataSet)
if __name__ == '__main__':
q = Queryer()
if ISDEBUG:
import cProfile
cProfile.run("q.set2db")
else:
q.set2db
|
tingelst/pyversor
|
pyversor/c3d/directions.py
|
Python
|
bsd-3-clause
| 1,721
| 0.000581
|
# Copyright (c) 2015, Lars Tingelstad
# All rights reserved.
#
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pyversor nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE
|
IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IM
|
PLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Operations on directions in 3D conformal geometric algebra."""
from __pyversor__.c3d.directions import (
DirectionVector, DirectionBivector, DirectionTrivector)
|
chrisRubiano/djangoblog
|
blog/urls.py
|
Python
|
mit
| 500
| 0.018
|
from django.conf.urls import
|
include, url
from . import views
urlpatterns = [
url(r'^$', views.post_list), #URL para lista de todos los post
url(r'^post/(?P<pk>[0-9]+)/$', views.post_detail), #URL para ver los detalles del post
url(r'^post/new/$', views.post_new, name='post_new'), #URL para crear un nuevo post sin el panel de admin
url(r'^post/(?P<pk>[0-9]+)/edit/$', views.post_edit, name='post_edit'), #URL para editar los post sin e
|
l panel de admin
]
|
instinct-vfx/rez
|
src/rez/cli/bundle.py
|
Python
|
apache-2.0
| 1,729
| 0.001157
|
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the Rez Project
'''
Bundle a context and its packages into a relocatable dir.
'''
from __future__ import print_function
import os
import os.path
import sys
def setup_parser(parser, completions=False):
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-s", "--skip-non-relocatable", action="store_true",
help="leave non-relocatable packages non-bundled, rather than raise an error")
group.add_argument(
"-f", "--force", action="store_true",
help="bundle package even if it isn't relocatable (use at your own risk)")
group.add_argument(
"-n", "--no-lib-patch", action="store_true",
help="don't apply library patching within the bundle")
parser.add_argument(
"RXT",
help="context to bundle")
parser.add_argument(
"DEST_DIR",
help="directory to create bundle in; must not exist")
def command(opts, parser, extra_arg_
|
groups=None):
from rez.utils.logging_ import print_error
from rez.bundle_context import bundle_context
from rez.resolved_context import ResolvedContext
rxt_filepath = os.path.abspath(os.path.expanduser(opts.RXT))
dest_dir = os.path.abspath(os.path.expanduser(opts.DEST_DIR))
# sanity checks
if not os.path.exists(rxt_filepath):
print_error("File does not exist: %s", rxt_filepath)
sys.exit(1)
context =
|
ResolvedContext.load(rxt_filepath)
bundle_context(
context=context,
dest_dir=dest_dir,
force=opts.force,
skip_non_relocatable=opts.skip_non_relocatable,
verbose=opts.verbose,
patch_libs=(not opts.no_lib_patch)
)
|
crmccreary/openerp_server
|
openerp/addons/account/report/account_central_journal.py
|
Python
|
agpl-3.0
| 5,320
| 0.004511
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
from common_report_header import common_report_header
#
# Use period and Journal for selection or resources
#
class journal_print(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(journal_print, self).__init__(cr, uid, name, context=context)
self.period_ids = []
self.journal_ids = []
self.localcontext.update({
'time': time,
'lines': self.lines,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_filter': self._get_filter,
'get_fiscalyear': self._get_fiscalyear,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_sortby': self._get_sortby,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'display_currency':self._display_currency,
'get_target_move': self._get_target_move,
})
def set_context(self, objects, data, ids, report_type=None):
obj_move = self.pool.get('account.move.line')
new_ids = ids
self.query_get_clause = ''
self.target_move = data['form'].get('target_move', 'all')
if (data['model'] == 'ir.ui.menu'):
new_ids = 'active_ids' in data['form'] and data['form']['active_ids'] or []
self.query_get_clause = 'AND '
self.query_get_clause += obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids)
if new_ids:
self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),))
res = self.cr.fetchall()
self.period_ids, self.journal_ids = zip(*res)
return super(journal_print, self).set_context(objects, data, ids, report_type=report_type)
def lines(self, period_id, journal_id):
move_state = ['draft','posted']
if self.target_move == 'posted':
move_stat
|
e = ['posted']
self.cr.execute('SELECT a.currency_id, a.code, a.name, c.symbol AS currency_code, l.currency_id, l.amount_currency, SUM(debit) AS debit, SUM(credit) AS credit \
from account_move_line l \
LEFT JOIN account_move am ON (l.move_id=am.id) \
LEFT JOIN account_account a ON (l.account_id=a.id) \
L
|
EFT JOIN res_currency c on (l.currency_id=c.id) WHERE am.state IN %s AND l.period_id=%s AND l.journal_id=%s '+self.query_get_clause+' GROUP BY a.id, a.code, a.name,l.amount_currency,c.symbol, a.currency_id,l.currency_id', (tuple(move_state), period_id, journal_id))
return self.cr.dictfetchall()
def _set_get_account_currency_code(self, account_id):
self.cr.execute("SELECT c.symbol as code "\
"FROM res_currency c,account_account as ac "\
"WHERE ac.id = %s AND ac.currency_id = c.id"%(account_id))
result = self.cr.fetchone()
if result:
self.account_currency = result[0]
else:
self.account_currency = False
def _get_account(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).company_id.name
return super(journal_print,self)._get_account(data)
def _get_fiscalyear(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).fiscalyear_id.name
return super(journal_print,self)._get_fiscalyear(data)
def _display_currency(self, data):
if data['model'] == 'account.journal.period':
return True
return data['form']['amount_currency']
report_sxw.report_sxw('report.account.central.journal', 'account.journal.period', 'addons/account/report/account_central_journal.rml', parser=journal_print, header='internal')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
DigitalCampus/django-oppia
|
tests/viz/management/commands/test_cartodb_update.py
|
Python
|
gpl-3.0
| 2,062
| 0
|
from io import StringIO
import re
import httpretty
from django.core.management import call_command
from oppia.test import OppiaTestCase
from settings import constants
from settings.models import SettingProperties
from tests.utils import get_file_contents
class CartoDBUpdateTest(OppiaTestCase):
fixtures = ['tests/test_user.json',
'tests/test_oppia.json',
'tests/test_quiz.json',
'tests/test_permissions.json',
'default_badges.json',
'tests/test_course_permissions.json',
'tests/test_viz.json']
cartodb_valid_response = './oppia/fixtures/tests/cartodb/200_valid.json'
cartodb_uri_regex = re.compile(
"https://[A-Za-z0-9-]+.cartodb.com/api/v2/sql??(?:&?[^=&]*=[^=&]*)*")
@httpretty.activate
def test_cartodb_output(self):
cartodb_response = get_file_contents(self.cartodb_valid_response)
httpretty.register_uri(httpretty.GET,
self.cartodb_uri_regex,
body=cartodb_response)
SettingProperties.set_string(constants.OPPIA_CARTODB_ACCOUNT,
"account")
SettingProperties.set_string(constants.OPPIA_CARTODB_KEY,
"FAKE_APIKEY")
SettingProperties.set_string(constants.OPPIA_HOSTNAME, "localhost")
out = StringIO()
call_comm
|
and('cartodb_update', stdout=out)
@httpretty.activate
def test_cartodb_no_key_account(self):
cartodb_response = get_file_contents(self.cartodb_valid_response)
httpretty.register_uri(httpretty.GET,
self.cartodb_uri_regex,
body=cartodb_response)
SettingProperties.set_string(constan
|
ts.OPPIA_CARTODB_ACCOUNT, None)
SettingProperties.set_string(constants.OPPIA_CARTODB_KEY, None)
SettingProperties.set_string(constants.OPPIA_HOSTNAME, None)
out = StringIO()
call_command('cartodb_update', stdout=out)
|
arunkgupta/gramps
|
gramps/plugins/gramplet/backlinks.py
|
Python
|
gpl-2.0
| 7,865
| 0.002924
|
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011 Nick Hall
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#
from gramps.gui.listmodel import ListModel, NOSORT
from gramps.gen.utils.db import navigation_label
from gramps.gen.plug import Gramplet
from gramps.gen.ggettext import gettext as _
from gi.repository import Gtk
class Backlinks(Gramplet):
"""
Displays the back references for an object.
"""
def init(self):
self.gui.WIDGET = self.build_gui()
self.gui.get_container_widget().remove(self.gui.textview)
self.gui.get_container_widget().add_with_viewport(self.gui.WIDGET)
self.gui.WIDGET.show()
def build_gui(self):
"""
Build the GUI interface.
"""
top = Gtk.TreeView()
titles = [(_('Type'), 1, 100),
(_('Name'), 2, 100)]
self.model = ListModel(top, titles)
return top
def display_backlinks(self, active_handle):
"""
Display the back referen
|
ces for an object.
"""
for classname, handle in \
self.dbstate.db.find_backlink_handles(active_handle):
name = navigation_label(self.dbstate.db, classname, handle)[0]
self.model.add((_(classname), name))
self.set_has_data(
|
self.model.count > 0)
def get_has_data(self, active_handle):
"""
Return True if the gramplet has data, else return False.
"""
if active_handle is None:
return False
for handle in self.dbstate.db.find_backlink_handles(active_handle):
return True
return False
class PersonBacklinks(Backlinks):
"""
Displays the back references for a person.
"""
def db_changed(self):
self.dbstate.db.connect('person-update', self.update)
def active_changed(self, handle):
self.update()
def update_has_data(self):
active_handle = self.get_active('Person')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Person')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class EventBacklinks(Backlinks):
"""
Displays the back references for an event.
"""
def db_changed(self):
self.dbstate.db.connect('event-update', self.update)
self.connect_signal('Event', self.update)
def update_has_data(self):
active_handle = self.get_active('Event')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Event')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class FamilyBacklinks(Backlinks):
"""
Displays the back references for a family.
"""
def db_changed(self):
self.dbstate.db.connect('family-update', self.update)
self.connect_signal('Family', self.update)
def update_has_data(self):
active_handle = self.get_active('Family')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Family')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class PlaceBacklinks(Backlinks):
"""
Displays the back references for a place.
"""
def db_changed(self):
self.dbstate.db.connect('place-update', self.update)
self.connect_signal('Place', self.update)
def update_has_data(self):
active_handle = self.get_active('Place')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Place')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class SourceBacklinks(Backlinks):
"""
Displays the back references for a source,.
"""
def db_changed(self):
self.dbstate.db.connect('source-update', self.update)
self.connect_signal('Source', self.update)
def update_has_data(self):
active_handle = self.get_active('Source')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Source')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class CitationBacklinks(Backlinks):
"""
Displays the back references for a Citation,.
"""
def db_changed(self):
self.dbstate.db.connect('citation-update', self.update)
self.connect_signal('Citation', self.update)
def update_has_data(self):
active_handle = self.get_active('Citation')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Citation')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class RepositoryBacklinks(Backlinks):
"""
Displays the back references for a repository.
"""
def db_changed(self):
self.dbstate.db.connect('repository-update', self.update)
self.connect_signal('Repository', self.update)
def update_has_data(self):
active_handle = self.get_active('Repository')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Repository')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class MediaBacklinks(Backlinks):
"""
Displays the back references for a media object.
"""
def db_changed(self):
self.dbstate.db.connect('media-update', self.update)
self.connect_signal('Media', self.update)
def update_has_data(self):
active_handle = self.get_active('Media')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Media')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class NoteBacklinks(Backlinks):
"""
Displays the back references for a note.
"""
def db_changed(self):
self.dbstate.db.connect('note-update', self.update)
self.connect_signal('Note', self.update)
def update_has_data(self):
active_handle = self.get_active('Note')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Note')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
|
dronecrew/px4tools
|
px4tools/mapping.py
|
Python
|
bsd-3-clause
| 952
| 0
|
"""
Mapping functions
"""
# pylint: disable=invalid-name, missing-docstring, no-member
from __future__ import print_function
import pandas
from mpl_toolkits.basemap import Basemap
def create_map(lon, lat):
"""
Create a map projection.
"""
lon_center = lon[0]
lat_center = lat[0]
return Basemap(
lon_0=lon_center,
lat_0=lat_center, projection='tmerc',
width=1e-5, height=1e-5)
def project_lat_lon(df):
gps_map = Basemap(lat_0=df.GPS_Lat.values[0],
lon_0=df.GPS_Lon.values[0],
|
width=11e-5, height=1e-5, projection='tmerc')
gps_y, gps_x = gps_map(df.GPS_Lon.values, df.GPS_Lat.val
|
ues)
gps_z = df.GPS_Alt - df.GPS_Alt.values[0]
df_new = pandas.DataFrame(pandas.DataFrame({
'GPS_X': gps_x, 'GPS_Y': gps_y, 'GPS_Z': gps_z}, index=df.index))
return pandas.concat([df, df_new], axis=1)
# vim: set et fenc= ff=unix sts=0 sw=4 ts=4 :
|
joepettigrew/multi-blog
|
main.py
|
Python
|
mit
| 890
| 0
|
# My files
from handlers import MainPage
from handlers import WelcomePage
from handlers import SignUpPage
from handlers import SignIn
from handlers import SignOut
from handlers import NewPost
from handlers import EditPost
from handlers import DeletePost
from handlers import SinglePost
from handlers import LikePost
from handlers import DislikePost
from handlers import EditComment
from handlers import DeleteComm
|
ent
import webapp2
app = webapp2.WSGIApplication([
('/', MainP
|
age),
('/signup', SignUpPage),
('/welcome', WelcomePage),
('/post/([0-9]+)', SinglePost),
('/new-post', NewPost),
('/edit-post/([0-9]+)', EditPost),
('/delete-post', DeletePost),
('/like-post', LikePost),
('/dislike-post', DislikePost),
('/edit-comment', EditComment),
('/delete-comment', DeleteComment),
('/login', SignIn),
('/logout', SignOut)
], debug=True)
|
concordusapps/alchemist
|
alchemist/tests/test_management.py
|
Python
|
mit
| 489
| 0
|
# -*- coding: ut
|
f-8 -*-
from __future__ import unicode_literals, absolute_import, division
from alchemist import management
from flask import Flask
class TestManager:
def setup(self):
self.app = Flask('alchemist')
self.app.config['COMPONENTS'] = ['alchemist']
def test_discover_commands(self):
"""Should discover commands from registered components.
"""
manager = management.Manager(self.app)
assert 'run' in ma
|
nager._commands
|
infinity0n3/fabtotum-experiments
|
fabtotum/fabui/database.py
|
Python
|
gpl-3.0
| 825
| 0
|
#!/bin/env python
# -*- coding: utf-8; -*-
#
# (c) 2016 FABtotum, http://www.fabtotu
|
m.com
#
# This file is part of FABUI.
#
# FABUI is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# FABUI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without ev
|
en the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FABUI. If not, see <http://www.gnu.org/licenses/>.
# Import standard python module
# Import external modules
# Import internal modules
|
titos-carrasco/DaguCar
|
Python/TestDaguCar.py
|
Python
|
mit
| 591
| 0.060914
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from rcr.robots.dagucar.DaguCar import DaguCar
def main():
car = DaguCar( "/dev/rfcomm1", 500 )
|
car.MoveForward( 15 )
car.Pause( 1000 )
car.MoveBackward( 15 )
car.Pause( 1000 )
car.MoveLeft( 15 )
car.Pause( 1000 )
car.MoveRight( 15 )
car.Pause( 1000 )
car.MoveForwardLeft( 15 )
car.Pause( 1000 )
car.MoveForwardRight( 15 )
car.Pause( 1000 )
car.MoveBackward
|
Left( 15 )
car.Pause( 1000 )
car.MoveBackwardRight( 15 )
car.Pause( 1000 )
car.Stop()
car.Close()
###
main()
|
laborautonomo/Mailpile
|
mailpile/__main__.py
|
Python
|
apache-2.0
| 118
| 0
|
import sys
from mailpile.app import Main
|
def main():
Main(
|
sys.argv[1:])
if __name__ == "__main__":
main()
|
xsuchy/tito
|
src/tito/distributionbuilder.py
|
Python
|
gpl-2.0
| 2,016
| 0.008433
|
import os
from tito.builder import UpstreamBuilder
from tito.common import debug, run_command
class DistributionBuilder(UpstreamBuilder):
"
|
"" This class is used for building packages for distributions.
Parent class UpstreamBuilder build one big patch from upstream and create e.g.:
Patch0: foo-1.2.13-1-t
|
o-foo-1.2.13-3-sat.patch
This class create one patch per each release. E.g.:
Patch0: foo-1.2.13-1-to-foo-1.2.13-2-sat.patch
Patch1: foo-1.2.13-2-to-foo-1.2.13-3-sat.patch
"""
def __init__(self, name=None, version=None, tag=None, build_dir=None,
pkg_config=None, global_config=None, user_config=None, options=None):
UpstreamBuilder.__init__(self, name, version, tag, build_dir, pkg_config,
global_config, user_config, options)
self.patch_files = []
def patch_upstream(self):
""" Create one patch per each release """
os.chdir(os.path.join(self.git_root, self.relative_project_dir))
debug("Running /usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \
% (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
output = run_command("/usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \
% (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
self.patch_files = output.split("\n")
for p_file in self.patch_files:
run_command("cp %s/%s %s" % (self.rpmbuild_gitcopy, p_file, self.rpmbuild_sourcedir))
(patch_number, patch_insert_index, patch_apply_index, lines) = self._patch_upstream()
for patch in self.patch_files:
lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number, patch))
lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number))
patch_number += 1
patch_insert_index += 1
patch_apply_index += 2
self._write_spec(lines)
|
malemburg/epcon
|
microblog/feeds.py
|
Python
|
bsd-2-clause
| 2,027
| 0.00296
|
# -*- cod
|
ing: UTF-8 -*-
from django.conf import settings as dsettings
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.core.urlresolvers import reverse
from microblog import models
from microblog import settings
import os.path
class FeedsDict(dict):
"""
dict custom che solleva un FeedDoesNotExist al posto di un KeyError
"""
def __getitem__(self, k):
try:
return super(FeedsDict, s
|
elf).__getitem__(k)
except KeyError:
raise FeedDoesNotExist()
languages = FeedsDict((l, l) for l, n in dsettings.LANGUAGES)
languages[None] = settings.MICROBLOG_DEFAULT_LANGUAGE
class LatestPosts(Feed):
def get_object(self, request, lang_code=None):
return languages[lang_code]
def link(self, obj):
try:
path = reverse('microblog-feeds-latest')
except:
path = reverse('microblog-feeds-latest', kwargs={'lang_code': obj})
return os.path.join(dsettings.DEFAULT_URL_PREFIX, path)
title = settings.MICROBLOG_TITLE
description = settings.MICROBLOG_DESCRIPTION
description_template = 'microblog/feeds/item_description.html'
author_name = settings.MICROBLOG_AUTHOR_NAME
author_email = settings.MICROBLOG_AUTHOR_EMAIL
author_link = settings.MICROBLOG_AUTHOR_LINK
def items(self, obj):
return models.PostContent.objects\
.all()\
.filter(language=obj, post__status='P')\
.exclude(headline='')\
.select_related('post', 'post__author')\
.order_by('-post__date')[:10]
def item_title(self, item):
return item.headline
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.post.date
def item_categories(self, item):
return [ x.name for x in item.post.tags.all()]
def item_author_name(self, item):
user = item.post.author
return '%s %s' % (user.first_name, user.last_name)
|
carloscadena/django-imager
|
imagersite/imager_api/tests.py
|
Python
|
mit
| 2,994
| 0
|
"""Test modile for API."""
from django.contrib.auth.models import User
from django.core.files.uploadedfile import SimpleUploadedFile
from django.urls import reverse
import factory
from imager_images.models import Album
from imager_images.models import Photo
from imagersite.settings import MEDIA_ROOT
import os
from rest_framework.test import APITestCase
class UserFactory(factory.django.DjangoModelFactory):
"""Setting up users for tests."""
class Meta(object):
"""Meta."""
model = User
username = factory.Sequence(lambda n: "user{}".format(n))
email = factory.Sequence(
lambda n: "user{}@example.com".format(n)
)
class PhotoFactory(factory.django.DjangoModelFactory):
"""Create photos for testing."""
class Meta(object):
"""Meta."""
model = Photo
title = factory.Sequence(lambda n: "photo{}".format(n))
image = SimpleUploadedFile(
name="testing.png",
content=open(MEDIA_ROOT + '/test/testing.png', 'rb').read(),
conten
|
t_type="image/png"
)
class AlbumFactory(factory.django.DjangoModelFactory):
"""Create albums for testing."""
class Meta(object):
"""Meta."""
model = Album
title = factory.Sequence(lambda n: "album{}".format(n))
class ApiTests(APITestCase):
"""Tests for the Api."""
def setUp(self):
"""Set up for testing."""
user = UserFactory.create()
user.set_password('caaarlos')
user.save()
self.user = user
p
|
hotos = [PhotoFactory.create(profile=user.profile) for i in range(20)]
album = AlbumFactory.build()
album.profile = user.profile
album.save()
for photo in photos:
album.photos.add(photo)
album.cover_photo = photos[0]
album.save()
def tearDown(self):
"""Teardown when tests complete."""
to_delete = os.path.join(MEDIA_ROOT, 'photos', 'testing*.png')
os.system('rm -rf ' + to_delete)
def test_get_route_status_200(self):
"""Status 200."""
response = self.client.get(reverse('api'))
self.assertEqual(response.status_code, 200)
def test_get_route_sends_photos(self):
"""Sends Json Photos."""
response = self.client.get(reverse('api'))
self.assertEqual(
len(response.json()),
Photo.objects.count()
)
def test_get_route_photos_have_meta_info(self):
"""Meta info on photos from api."""
response = self.client.get(reverse('api'))
image_meta = response.json()[0]
self.assertTrue('title' in image_meta)
self.assertTrue('description' in image_meta)
self.assertTrue('profile' in image_meta)
self.assertTrue('image' in image_meta)
self.assertTrue('date_uploaded' in image_meta)
self.assertTrue('date_modified' in image_meta)
self.assertTrue('date_published' in image_meta)
self.assertTrue('published' in image_meta)
|
Architektor/PySnip
|
venv/lib/python2.7/site-packages/twisted/test/test_monkey.py
|
Python
|
gpl-3.0
| 5,637
| 0.001951
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.monkey}.
"""
from __future__ import division, absolute_import
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher
class TestObj:
def __init__(self):
self.foo = 'foo value'
self.bar = 'bar value'
self.baz = 'baz value'
class MonkeyPatcherTests(unittest.SynchronousTestCase):
"""
Tests for L{MonkeyPatcher} monkey-patching class.
"""
def setUp(self):
self.testObject = TestObj()
self.originalObject = TestObj()
self.monkeyPatcher = MonkeyPatcher()
def test_empty(self):
"""
A monkey patcher without patches shouldn't change a thing.
"""
self.monkeyPatcher.patch()
# We can't assert that all state is unchanged, but at least we can
# c
|
heck our test object.
self.assertEqual(self.originalObject.foo, self.testObject.foo)
self.assertEqual(self.originalObject.bar, self.testObject.bar)
self.assertEqual(self.originalObject.baz, self.testObject.baz)
def test_constructWithPatches(self):
"""
Constructing a L{MonkeyPatcher} with patches should add all of the
given patches to the patch list.
"""
patcher = MonkeyPatcher((self.testObject,
|
'foo', 'haha'),
(self.testObject, 'bar', 'hehe'))
patcher.patch()
self.assertEqual('haha', self.testObject.foo)
self.assertEqual('hehe', self.testObject.bar)
self.assertEqual(self.originalObject.baz, self.testObject.baz)
def test_patchExisting(self):
"""
Patching an attribute that exists sets it to the value defined in the
patch.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
self.monkeyPatcher.patch()
self.assertEqual(self.testObject.foo, 'haha')
def test_patchNonExisting(self):
"""
Patching a non-existing attribute fails with an C{AttributeError}.
"""
self.monkeyPatcher.addPatch(self.testObject, 'nowhere',
'blow up please')
self.assertRaises(AttributeError, self.monkeyPatcher.patch)
def test_patchAlreadyPatched(self):
"""
Adding a patch for an object and attribute that already have a patch
overrides the existing patch.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'blah')
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'BLAH')
self.monkeyPatcher.patch()
self.assertEqual(self.testObject.foo, 'BLAH')
self.monkeyPatcher.restore()
self.assertEqual(self.testObject.foo, self.originalObject.foo)
def test_restoreTwiceIsANoOp(self):
"""
Restoring an already-restored monkey patch is a no-op.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'blah')
self.monkeyPatcher.patch()
self.monkeyPatcher.restore()
self.assertEqual(self.testObject.foo, self.originalObject.foo)
self.monkeyPatcher.restore()
self.assertEqual(self.testObject.foo, self.originalObject.foo)
def test_runWithPatchesDecoration(self):
"""
runWithPatches should run the given callable, passing in all arguments
and keyword arguments, and return the return value of the callable.
"""
log = []
def f(a, b, c=None):
log.append((a, b, c))
return 'foo'
result = self.monkeyPatcher.runWithPatches(f, 1, 2, c=10)
self.assertEqual('foo', result)
self.assertEqual([(1, 2, 10)], log)
def test_repeatedRunWithPatches(self):
"""
We should be able to call the same function with runWithPatches more
than once. All patches should apply for each call.
"""
def f():
return (self.testObject.foo, self.testObject.bar,
self.testObject.baz)
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
result = self.monkeyPatcher.runWithPatches(f)
self.assertEqual(
('haha', self.originalObject.bar, self.originalObject.baz), result)
result = self.monkeyPatcher.runWithPatches(f)
self.assertEqual(
('haha', self.originalObject.bar, self.originalObject.baz),
result)
def test_runWithPatchesRestores(self):
"""
C{runWithPatches} should restore the original values after the function
has executed.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
self.assertEqual(self.originalObject.foo, self.testObject.foo)
self.monkeyPatcher.runWithPatches(lambda: None)
self.assertEqual(self.originalObject.foo, self.testObject.foo)
def test_runWithPatchesRestoresOnException(self):
"""
Test runWithPatches restores the original values even when the function
raises an exception.
"""
def _():
self.assertEqual(self.testObject.foo, 'haha')
self.assertEqual(self.testObject.bar, 'blahblah')
raise RuntimeError("Something went wrong!")
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
self.monkeyPatcher.addPatch(self.testObject, 'bar', 'blahblah')
self.assertRaises(RuntimeError, self.monkeyPatcher.runWithPatches, _)
self.assertEqual(self.testObject.foo, self.originalObject.foo)
self.assertEqual(self.testObject.bar, self.originalObject.bar)
|
EICT/C-BAS
|
src/vendor/schedule/scheduleexceptions.py
|
Python
|
bsd-3-clause
| 857
| 0.009335
|
from eisoil.core.exception import CoreException
|
class ScheduleException(CoreException):
def __init_
|
_(self, desc):
self._desc = desc
def __str__(self):
return "Schedule: %s" % (self._desc,)
class ScheduleOverbookingError(ScheduleException):
def __init__(self, schedule_subject, resource_id, start_time, end_time):
"""All parameters should be strings or be able to str(...) itself."""
super(ScheduleOverbookingError, self).__init__("There are already reservations for %s during [%s - %s] in the %s schedule." % (str(resource_id), str(start_time), str(end_time), str(schedule_subject)))
class ScheduleNoSuchReservationError(ScheduleException):
def __init__(self, reservation_id):
super(ScheduleNoSuchReservationError, self).__init__("Could not find reservation with id %d." % (reservation_id))
|
alxgu/ansible
|
lib/ansible/module_utils/storage/emc/emc_vnx.py
|
Python
|
gpl-3.0
| 1,915
| 0.005744
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2018 Luca 'remix_tj' Lorenzetto
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN
|
NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WH
|
ETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
emc_vnx_argument_spec = {
'sp_address': dict(type='str', required=True),
'sp_user': dict(type='str', required=False, default='sysadmin'),
'sp_password': dict(type='str', required=False, default='sysadmin',
no_log=True),
}
|
amimoto/walky
|
walky/client/common.py
|
Python
|
mit
| 1,360
| 0.011029
|
from __future__ import absolute_import
import weakref
import threading
import asyncore
import socket
from walky.objects import *
from walky.port import *
from walky.engine import *
class Client(object):
engine = None
settings = None
connection = None
port = None
engine_class = Engine
object_class = ObjectStub
def __init__( self,
**settings ):
settings.setdefault('engine_class',self.engine_class)
settings.setdefault('port_class
|
',self.port_class)
settings.setdefault('object_class',self.object_class)
self.port = settings.get('port')
self.settings = settings
self.reset()
def reset(self):
if self.engine: self.engine.shutdown()
self.engine = self.settings['engine_class']()
def connect(self,*args,**kwargs):
""" Start the engine and the asyncore
"""
self.eng
|
ine.start()
self.connection = self.engine.connection_new(*args,**kwargs)
def run(self):
pass
def on_readline(self,line):
try:
pass
except Exception as ex:
pass
def sendline(self,line):
self.port().sendline(line)
def object_get(self,reg_obj_id):
return self.object_class(self.connection,reg_obj_id)
def close(self):
self.engine.shutdown()
|
jluukvg/text-classifier
|
python/randomize_tweet_order.py
|
Python
|
mit
| 957
| 0
|
# -*- coding: utf-8 -*-
import os
import codecs
import random
# this is the path of the folder that will contain the tweet files
tweets_folder = os.path.join("D:", os.sep, "Documents", "PycharmProjects",
"easy_group_classifier", "text_files")
# checks i
|
f previous path exists, if not, it creates it
if not os.path.isdir(tweets_folder):
os.makedirs(tweets_folder)
# the name of the file with clean tweets to scramble
filename = "technology"
tweets_file = os.path.join(tweets_folder, "%s.txt" % filename)
shuffled_file = os.path.join(tweets_folder, "%s_shuffled.txt" % filename)
tweet_list = []
with codecs.open(tweets_file, "rb", encoding="utf-8") as f:
for line in f:
tweet = line.strip()
tweet_li
|
st.append(tweet)
random.shuffle(tweet_list)
with codecs.open(shuffled_file, "wb", encoding="utf-8") as f:
for tweet in tweet_list:
f.write("%s\n" % tweet)
|
UnbDroid/robomagellan
|
Codigos/Raspberry/desenvolvimentoRos/build/rosserial/rosserial_arduino/catkin_generated/rosserial_arduino-extras.cmake.installspace.context.cmake.py
|
Python
|
gpl-3.0
| 1,578
| 0.003169
|
# generated from catkin/cmake/template/cfg-extras.context.py.in
DEVELSPACE = 'FALSE' == 'TRUE'
INSTALLSPACE = 'TRUE' == 'TRUE'
CATKIN_DEVEL_PREFIX = '/home/pi/Documents/desenvolvimentoRos/devel'
CATKIN_GLOBAL_BIN_DESTINATION = 'bin'
CATKIN_GLOBAL_ETC_DESTINATION = 'etc'
CATKIN_GLOBAL_INCLUDE_DESTINATION = 'include'
CATKIN_GLOBAL_LIB_DESTINATION = 'lib'
CATKIN_GLOBAL_LIBEXEC_DESTINATION = 'lib'
CATKIN_GLOBAL_PYTHON_DESTINATION = 'lib/python2.7/dist-packages'
CATKIN_GLOBAL_SHARE_DESTINATION = '
|
share'
CATKIN_PACKAGE_BIN_DESTINATION = 'lib/rosserial_arduino'
CATKIN_PACKAGE_ETC_DESTINATION = 'etc/rosserial_arduino'
CATKIN_PACKAGE_INCLUDE_DESTINATION = 'include/rosserial_arduino'
CATKIN_PACKAGE_LIB_DESTINATION = 'lib'
CATKIN_PA
|
CKAGE_LIBEXEC_DESTINATION = ''
CATKIN_PACKAGE_PYTHON_DESTINATION = 'lib/python2.7/dist-packages/rosserial_arduino'
CATKIN_PACKAGE_SHARE_DESTINATION = 'share/rosserial_arduino'
CMAKE_BINARY_DIR = '/home/pi/Documents/desenvolvimentoRos/build'
CMAKE_CURRENT_BINARY_DIR = '/home/pi/Documents/desenvolvimentoRos/build/rosserial/rosserial_arduino'
CMAKE_CURRENT_SOURCE_DIR = '/home/pi/Documents/desenvolvimentoRos/src/rosserial/rosserial_arduino'
CMAKE_INSTALL_PREFIX = '/home/pi/Documents/desenvolvimentoRos/install'
CMAKE_SOURCE_DIR = '/home/pi/Documents/desenvolvimentoRos/src'
PKG_CMAKE_DIR = '${rosserial_arduino_DIR}'
PROJECT_NAME = 'rosserial_arduino'
PROJECT_BINARY_DIR = '/home/pi/Documents/desenvolvimentoRos/build/rosserial/rosserial_arduino'
PROJECT_SOURCE_DIR = '/home/pi/Documents/desenvolvimentoRos/src/rosserial/rosserial_arduino'
|
HewlettPackard/oneview-ansible
|
test/test_oneview_ethernet_network_facts.py
|
Python
|
apache-2.0
| 4,198
| 0.001906
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2019) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
import pytest
from mock import mock
from hpe_test_utils import OneViewBaseFactsTest
from oneview_module_loader import EthernetNetworkFactsModule
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Ethernet Network",
options=[]
)
PARAMS_GET_BY_NAME_WITH_OPTIONS = dict(
config='config.json',
name="Test Ethernet Network",
options=['associatedProfiles', 'associa
|
tedUplinkGroups']
)
PRESENT_ENETS = [{
"name": "Test Ethernet
|
Network",
"uri": "/rest/ethernet-networks/d34dcf5e-0d8e-441c-b00d-e1dd6a067188"
}]
ENET_ASSOCIATED_UPLINK_GROUP_URIS = [
"/rest/uplink-sets/c6bf9af9-48e7-4236-b08a-77684dc258a5",
"/rest/uplink-sets/e2f0031b-52bd-4223-9ac1-d91cb519d548"
]
ENET_ASSOCIATED_PROFILE_URIS = [
"/rest/server-profiles/83e2e117-59dc-4e33-9f24-462af951cbbe",
"/rest/server-profiles/57d3af2a-b6d2-4446-8645-f38dd808ea4d"
]
ENET_ASSOCIATED_UPLINK_GROUPS = [dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[0], name='Uplink Set 1'),
dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[1], name='Uplink Set 2')]
ENET_ASSOCIATED_PROFILES = [dict(uri=ENET_ASSOCIATED_PROFILE_URIS[0], name='Server Profile 1'),
dict(uri=ENET_ASSOCIATED_PROFILE_URIS[1], name='Server Profile 2')]
@pytest.mark.resource(TestEthernetNetworkFactsModule='ethernet_networks')
class TestEthernetNetworkFactsModule(OneViewBaseFactsTest):
def test_should_get_all_enets(self):
self.resource.get_all.return_value = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_ALL
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=(PRESENT_ENETS))
)
def test_should_get_enet_by_name(self):
self.resource.data = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=(PRESENT_ENETS))
)
def test_should_get_enet_by_name_with_options(self):
self.resource.data = PRESENT_ENETS
self.resource.get_associated_profiles.return_value = ENET_ASSOCIATED_PROFILE_URIS
self.resource.get_associated_uplink_groups.return_value = ENET_ASSOCIATED_UPLINK_GROUP_URIS
profiles = []
for data in ENET_ASSOCIATED_PROFILES:
obj = mock.Mock()
obj.data = data
profiles.append(obj)
uplinks = []
for data in ENET_ASSOCIATED_UPLINK_GROUPS:
obj = mock.Mock()
obj.data = data
uplinks.append(obj)
self.mock_ov_client.server_profiles.get_by_uri.side_effect = profiles
self.mock_ov_client.uplink_sets.get_by_uri.side_effect = uplinks
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITH_OPTIONS
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=PRESENT_ENETS,
enet_associated_profiles=ENET_ASSOCIATED_PROFILES,
enet_associated_uplink_groups=ENET_ASSOCIATED_UPLINK_GROUPS)
)
if __name__ == '__main__':
pytest.main([__file__])
|
illicitonion/givabit
|
lib/sdks/google_appengine_1.7.1/google_appengine/google/appengine/ext/appstats/datamodel_pb.py
|
Python
|
apache-2.0
| 81,798
| 0.01879
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.datastore.entity_pb import *
import google.appengine.datastore.entity_pb
class AggregateRpcStatsProto(ProtocolBuffer.ProtocolMessage):
has_service_call_name_ = 0
service_call_name_ = ""
has_total_amount_of_calls_ = 0
total_amount_of_calls_ = 0
has_total_cost_of_calls_microdollars_ = 0
total_cost_of_calls_microdollars_ = 0
def __init__(self, contents=None):
self.total_billed_ops_ = []
if contents is not None: self.MergeFromString(contents)
def service_call_name(self): return self.service_call_name_
def set_service_call_name(self, x):
self.has_service_call_name_ = 1
self.service_call_name_ = x
def clear_service_call_name(self):
if self.has_service_call_name_:
self.has_service_call_name_ = 0
self.service_call_name_ = ""
def has_service_call_name(self): return self.has_service_call_name_
def total_amount_of_calls(self): return self.total_amount_of_calls_
def set_total_amount_of_calls(self, x):
self.has_total_amount_of_calls_ = 1
self.total_amount_of_calls_ = x
def clear_total_amount_of_calls(self):
if self.has_total_amount_of_calls_:
self.has_total_amount_of_calls_ = 0
self.total_amount_of_calls_ = 0
def has_total_amount_of_calls(self): return self.has_total_amount_of_calls_
def total_cost_of_calls_microdollars(self): return self.total_cost_of_calls_microdollars_
def set_total_cost_of_calls_microdollars(self, x):
self.has_total_cost_of_calls_microdollars_ = 1
self.total_cost_of_calls_microdollars_ = x
def clear_total_cost_of_calls_microdollars(self):
if self.has_total_cost_of_calls_microdollars_:
self.has_total_cost_of_calls_microdollars_ = 0
self.total_cost_of_calls_microdollars_ = 0
def has_total_cost_of_calls_microdollars(self): return self.has_total_cost_of_calls_microdollars_
def total_billed_ops_size(self): return len(self.total_billed_ops_)
def total_billed_ops_list(self): return self.total_billed_ops_
def total_billed_ops(self, i):
return self.total_billed_ops_[i]
def mutable_total_billed_ops(self, i):
return self.total_billed_ops_[i]
def add_total_billed_ops(self):
x = BilledOpProto()
self.total_billed_ops_.append(x)
return x
def clear_total_billed_ops(self):
self.total_billed_ops_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_service_call_name()): self.set_service_call_name(x.service_call_name())
if (x.has_total_amount_of_calls()): self.set_total_amount_of_calls(x.total_amount_of_calls())
if (x.has_total_cost_of_calls_microdollars()): self.set_total_cost_of_calls_microdollars(x.total_cost_of_calls_microdollars())
for i in xrange(x.total_billed_ops_size()): self.add_total_billed_ops().CopyFrom(x.total_billed_ops(i))
def Equals(self, x):
if x is self: return 1
if self.has_service_call_name_ != x.has_service_call_name_: return 0
if self.has_service_call_name_ and self.service_call_name_ != x.service_call_name_: return 0
if self.has_total_amount_of_calls_ != x.has_total_amount_of_calls_: return 0
if self.has_total_amount_of_calls_ and self.total_amount_of_calls_ != x.total_amount_of_calls_: return 0
if self.has_total_cost_of_calls_microdollars_ != x.has_total_cost_of_calls_microdollars_: return 0
if self.has_total_cost_of_calls_microdollars_ and self.total_cost_of_calls_microdollars_ != x.total_cost_of_calls_microdollars_: return 0
if len(self.total_billed_ops_) != len(x.total_billed_ops_): return 0
for e1, e2 in zip(self.total_billed_ops_, x.total_billed_ops_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_service_call_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: service_call_name not set.')
if (not self.has_total_amount_of_calls_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: total_amount_of_calls not set.')
for p in self.total_billed_ops_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.service_call_name_))
n += self.lengthVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_): n += 1 + self.lengthVarInt64(self.total_cost_of_calls_microdollars_)
n += 1 * len(self.total_billed_ops_)
for i in xrange(len(self.total_billed_ops_)): n += self.lengthString(self.total_billed_ops_[i].ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_service_call_name_):
n += 1
n += self.lengthString(len(self.service_call_name_))
if (self.has_total_amount_of_calls_):
n += 1
n += self.lengthVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_): n += 1 + self.lengthVarInt64(self.total_cost_of_calls_microdollars_)
n += 1 * len(self.total_billed_ops_)
for i in xrange(len(self.total_billed_ops_)): n += self.lengthString(self.total_billed_ops_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_service_call_name()
self.clear_total_amount_of_calls()
self.clear_total_cost_of_calls_microdollars()
self.clear_total_billed_ops()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.service_call_name_)
out.putVarInt32(24)
out.putVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_):
out.putVarInt32(32)
out.putVarInt64(self.total_cost_of_calls_microdollars_)
for i in xrange(len(self.total_billed_ops_)):
out.putVarInt32(42)
out.putVarInt32(self.total_billed_ops_[i].ByteSize())
self.total_billed_ops_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_service_call_name_):
out.putVarInt32(10)
out.putPrefixedString(self.service_call_name_)
if (self.has_total_amount_of_calls_):
out.putVarInt32(24)
out.putVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_):
out.putVarInt32(32)
out.putVarInt64(self.total_cost_of_calls_microdollars_)
for i in xrange(len(self.total_billed_ops_)):
out.putVarInt32(42)
out.putVarInt32(self.total_billed_ops_[i].ByteSizePartial())
self.total_billed_ops_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_service_call_name(d.getPrefixedString())
continue
if tt
|
== 24:
self.set_total_amount_of_calls(d.getVarInt64())
continue
if tt == 32:
self.set_total_cost_of_calls_microdollars(d.getVarInt64())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
|
d.skip(length)
self.add_total_billed_ops().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDeco
|
AlessandroZ/LaZagne
|
Linux/lazagne/softwares/wifi/wifi.py
|
Python
|
lgpl-3.0
| 1,246
| 0.001605
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from lazagne.config.module_info import ModuleInfo
try:
from ConfigParser import RawConfigParser # Python 2.7
except ImportError:
from configparser import RawConfigParser # Python 3
from collections import OrderedDict
class Wifi(ModuleInfo):
def __init__(self):
ModuleInfo.__init__(self, 'wifi', 'wifi')
def run(self):
pwd_found = []
directory = u'/etc/NetworkManager/system-connections'
if os.path.exists(directory):
if os.getuid() == 0:
wireless_ssid = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
for w in wireless_ssid:
|
cp = RawConfigParser()
cp.read(os.path.join(directory, w))
values = OrderedDict()
|
try:
values['SSID'] = cp.get('wifi', 'ssid')
values['Password'] = cp.get('wifi-security', 'psk')
pwd_found.append(values)
except Exception:
pass
else:
self.info('You need sudo privileges')
return pwd_found
|
richli/dame
|
setup.py
|
Python
|
mit
| 1,201
| 0.001665
|
import sys
import os.path
import subprocess
PY3 = sys.version >= '3'
from setuptools import setup, find_packages
# http://blog
|
s.nopcode.org/brainstorm/2013/05/20/pragmatic-python-versioning-via-setuptools-and-git-tags/
# Fetch version from git tags, and write to version.py.
# Also, when git is not available (PyPi package), use stored version.py.
version_py = os.path.join(os.path.dirname(__file__), 'dame', 'version.py')
try:
|
version_git = subprocess.check_output(
["git", "describe", "--always"]).rstrip()
# Convert bytes to str for Python3
if PY3:
version_git = version_git.decode()
except:
with open(version_py, 'r') as fh:
version_git = fh.read().strip().split('=')[-1].replace('"', '')
version_msg = ("# Do not edit this file, "
"pipeline versioning is governed by git tags")
with open(version_py, 'w') as fh:
fh.write(version_msg + os.linesep +
"__version__='{}'\n".format(version_git))
setup(
name="dame",
author="Richard Lindsley",
version=version_git,
packages=find_packages(),
license="MIT",
entry_points={
'gui_scripts': [
'dame = dame.dame:main'
]
},
)
|
sserrot/champion_relationships
|
venv/Lib/site-packages/setuptools/command/sdist.py
|
Python
|
mit
| 8,092
| 0.000124
|
from distutils import log
import distutils.command.sdist as orig
import os
import sys
import io
import contextlib
from setuptools.extern import six, ordered_set
from .py36compat import sdist_add_defaults
import pkg_resources
_default_revctrl = list
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
class sdist(sdist_add_defaults, orig.sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
negative_opt = {}
README_EXTENSIONS = ['', '.rst', '.txt', '.md']
READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
def run(self):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
self.check_readme()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
self.make_distribution()
dist_files = getattr(self.distribution, 'dist_files', [])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
dist_files.append(data)
def initialize_options(self):
orig.sdist.initialize_options(self)
self._default_to_gztar()
def _default_to_gztar(self):
# only needed on Python prior to 3.6.
if sys.version_info >= (3, 6, 0, 'beta', 1):
return
self.formats = ['gztar']
def make_distribution(self):
"""
Workaround for #516
"""
with self._remove_os_link():
orig.sdist.make_distribution(self)
@staticmethod
@contextlib.contextmanager
def _remove_os_link():
"""
In a context, remove and restore os.link if it exists
"""
class NoValue:
pass
orig_val = getattr(os, 'link', NoValue)
try:
del os.link
except Exception:
pass
try:
yield
finally:
if orig_val is not NoValue:
setattr(os, 'link', orig_val)
def __read_template_hack(self):
# This grody hack closes the template file (MANIFEST.in) if an
# exception occurs during read_template.
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
orig.sdist.read_template(self)
except Exception:
_, _, tb = sys.exc_info()
tb.tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2, 7, 2)
or (3, 0) <= sys.version_info < (3, 1, 4)
or (3, 2) <= sys.version_info < (3, 2, 1)
)
if has_leaky_handle:
read_template = __read_template_hack
def _add_defaults_optional(self):
if six.PY2:
sdist_add_defaults._add_defaults_optional(self)
else:
super()._add_defaults_optional()
if os.path.isfile('pyproject.toml'):
self.filelist.append('pyproject.toml')
def _add_defaults_python(self):
"""getting python files"""
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files())
self._add_data_files(self._safe_data_files(build_py))
def _safe_data_files(self, build_py):
"""
Extracting data_files from build_py is known to cause
infinite recursion errors when `include_package_data`
is enabled, so suppress it in that case.
"""
if self.distribution.include_package_data:
return ()
return build_py.data_files
def _add_data_files(self, data_files):
"""
Add data files as found in build_py.data_files.
"""
self.filelist.extend(
os.path.join(src_dir, name)
for _, src_dir, _, filenames in data_files
for name in filenames
)
def _add_defaults_data_files(self):
try:
if six.PY2:
sdist_add_defaults._add_defaults_data_files(self)
else:
super()._add_defaults_data_files()
except TypeError:
log.warn("data_files contains unexpected objects")
def check_readme(self):
for f in self.READMES:
if os.path.exists(f):
return
else:
self.warn(
"standard file not found: should have one of " +
', '.join(self.READMES)
)
def make_release_tree(self, base_dir, files):
orig.sdist.make_release_tree(self, base_dir, files)
# Save any egg_info command line options used to create this sdist
dest = os.path.join(base_dir, 'setup.cfg')
if hasattr(os, 'link') and os.path.exists(dest):
# unlink and re-copy, since it might be hard-linked, and
# we don't want to change the source version
os.unlink(dest)
|
self.copy_file('setup.cfg', dest)
self.get_finalized_command('egg_info').save_version_info(dest)
def _manifest_is_not_generated(self):
# check for special comment used in 2.7.1 and higher
if not os.path.isfile(self.mani
|
fest):
return False
with io.open(self.manifest, 'rb') as fp:
first_line = fp.readline()
return (first_line !=
'# file GENERATED by distutils, do NOT edit\n'.encode())
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest, 'rb')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if not six.PY2:
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:
log.warn("%r not UTF-8 decodable -- skipping" % line)
continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
def check_license(self):
"""Checks if license_file' or 'license_files' is configured and adds any
valid paths to 'self.filelist'.
"""
files = ordered_set.OrderedSet()
opts = self.distribution.get_option_dict('metadata')
# ignore the source of the value
_, license_file = opts.get('license_file', (None, None))
if license_file is None:
log.debug("'license_file' option was not specified")
else:
files.add(license_file)
try:
files.update(self.distribution.metadata.license_files)
except TypeError:
log.warn("warning: 'license_files' option is malformed")
for f in files:
if not os.path.exists(f):
log.warn(
"warning: Failed to find the configured license file '%s'",
f)
files.remove(f)
self.filelist.extend(files)
|
Azure/azure-sdk-for-python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/operations/_virtual_machine_images_operations.py
|
Python
|
mit
| 21,016
| 0.00452
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
location: str,
publisher_name: str,
offer: str,
skus: str,
version: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERIALIZER.url("offer", offer, 'str'),
"skus": _SERIALIZER.url("skus", skus, 'str'),
"version": _SERIALIZER.url("version", version, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
location: str,
publisher_name: str,
offer: str,
skus: str,
subscription_id: str,
*,
expand: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERIALIZER.url("offer", offer, 'str'),
"skus": _SERIALIZER.url("skus", skus, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
if orderby is not None:
query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_offers_request(
location: str,
publisher_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_publishers_request(
location: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_skus_request(
location: str,
publisher_name: str,
offer: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERI
|
ALIZER.url("offer", offer, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("ap
|
i_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return H
|
woozyking/techies
|
techies/stasistrap.py
|
Python
|
mit
| 1,129
| 0.000886
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Techies' Stasis Trap
:copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
import sys
from logging import Handler, NOTSET
_ref_atributes = [
'%(levelname)s',
'%(name)s',
'%(pathname)s',
'%(module)s',
'%(funcName)s',
'%(lineno)d',
'%(me
|
ssage)s'
]
'''
Reference log format, best used with UniQueue or CountQueue
'''
REF_LOG_FORMAT = ':'.join(_ref_atributes)
class QueueHandler(Handler):
'''
Queue Logging Handler
Inherits standard logging.Handler that emits to any standard Queue
compatible implementations. Including the ones in techies.landmines module
'''
def __init__(self, q, level=NOTSET):
if sys.version_info[:2] > (2, 6):
sup
|
er(QueueHandler, self).__init__(level)
else:
Handler.__init__(self, level)
self.q = q
def emit(self, record):
try:
self.q.put(self.format(record))
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
|
amyxchen/openhtf
|
test/phase_info_test.py
|
Python
|
apache-2.0
| 2,220
| 0.013964
|
# Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
import openhtf
from openhtf import plugs
def PlainFunc():
"""Plain Docstring"""
pass
def NormalTestPhase(test):
return 'return value'
def ExtraArgFunc(input=None):
return input
class TestPhaseInfo(unittest.TestCase):
def se
|
tUp(self):
self._phase_data = mock.Mock(plug_manager=plugs.PlugManager())
def testBasics(self):
phase = openhtf.PhaseInfo.WrapOrCopy(PlainFunc)
self.assertIs(phase.func, PlainFunc)
self.assertEqual(0, len(p
|
hase.plugs))
self.assertEqual('PlainFunc', phase.name)
self.assertEqual('Plain Docstring', phase.doc)
phase(self._phase_data)
test_phase = openhtf.PhaseInfo.WrapOrCopy(NormalTestPhase)
self.assertEqual('NormalTestPhase', test_phase.name)
self.assertEqual('return value', test_phase(self._phase_data))
def testMultiplePhases(self):
phase = openhtf.PhaseInfo.WrapOrCopy(PlainFunc)
second_phase = openhtf.PhaseInfo.WrapOrCopy(phase)
for attr in type(phase).all_attribute_names:
if attr == 'func': continue
self.assertIsNot(getattr(phase, attr), getattr(second_phase, attr))
def testWithArgs(self):
phase = openhtf.PhaseInfo.WrapOrCopy(ExtraArgFunc)
phase = phase.WithArgs(input='input arg')
result = phase(self._phase_data)
self.assertEqual('input arg', result)
second_phase = phase.WithArgs(input='second input')
first_result = phase(self._phase_data)
second_result = second_phase(self._phase_data)
self.assertEqual('input arg', first_result)
self.assertEqual('second input', second_result)
|
LLNL/spack
|
lib/spack/spack/test/cmd/python.py
|
Python
|
lgpl-2.1
| 1,190
| 0
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
import sys
import pytest
import spack
from spack.main import SpackCommand
python = SpackCommand('python')
def test_python():
out = python('-c', 'import spack; print(spack.spack_version)')
ass
|
ert out.strip() == spack.spack_version
def test_python_inte
|
rpreter_path():
out = python('--path')
assert out.strip() == sys.executable
def test_python_version():
out = python('-V')
assert platform.python_version() in out
def test_python_with_module():
# pytest rewrites a lot of modules, which interferes with runpy, so
# it's hard to test this. Trying to import a module like sys, that
# has no code associated with it, raises an error reliably in python
# 2 and 3, which indicates we successfully ran runpy.run_module.
with pytest.raises(ImportError, match="No code object"):
python('-m', 'sys')
def test_python_raises():
out = python('--foobar', fail_on_error=False)
assert "Error: Unknown arguments" in out
|
maikhoepfel/daphne
|
daphne/cli.py
|
Python
|
bsd-3-clause
| 7,303
| 0.001917
|
import sys
import argparse
import logging
import importlib
from .server import Server, build_endpoint_description_strings
from .access import AccessLogGenerator
logger = logging.getLogger(__name__)
DEFAULT_HOST = '127.0.0.1'
DEFAULT_PORT = 8000
class CommandLineInterface(object):
"""
Acts as the main CLI entry point for running the server.
"""
description = "Django HTTP/WebSocket server"
def __init__(self):
self.parser = argparse.ArgumentParser(
description=self.description,
)
self.parser.add_argument(
'-p',
'--port',
type=int,
help='Port number to listen on',
default=None,
)
self.parser.add_argument(
'-b',
'--bind',
dest='host',
help='The host/address to bind to',
default=None,
)
self.parser.add_argument(
'--websocket_timeout',
type=int,
help='max time websocket connected. -1 to infinite.',
default=None,
)
self.parser.add_argument(
'--websocket_connect_timeout',
type=int,
help='max time to refuse establishing connection. -1 to infinite',
default=None,
)
self.parser.add_argument(
'-u',
'--unix-socket',
dest='unix_socket',
help='Bind to a UNIX socket rather than a TCP host/port',
default=None,
)
self.parser.add_argument(
'--fd',
type=int,
dest='file_descriptor',
help='Bind to a file descriptor rather than a TCP host/port or named unix socket',
default=None,
|
)
self.parser.add_argument(
'-e',
'--endpoint',
|
dest='socket_strings',
action='append',
help='Use raw server strings passed directly to twisted',
default=[],
)
self.parser.add_argument(
'-v',
'--verbosity',
type=int,
help='How verbose to make the output',
default=1,
)
self.parser.add_argument(
'-t',
'--http-timeout',
type=int,
help='How long to wait for worker server before timing out HTTP connections',
default=120,
)
self.parser.add_argument(
'--access-log',
help='Where to write the access log (- for stdout, the default for verbosity=1)',
default=None,
)
self.parser.add_argument(
'--ping-interval',
type=int,
help='The number of seconds a WebSocket must be idle before a keepalive ping is sent',
default=20,
)
self.parser.add_argument(
'--ping-timeout',
type=int,
help='The number of seconds before a WeSocket is closed if no response to a keepalive ping',
default=30,
)
self.parser.add_argument(
'--ws-protocol',
nargs='*',
dest='ws_protocols',
help='The WebSocket protocols you wish to support',
default=None,
)
self.parser.add_argument(
'--root-path',
dest='root_path',
help='The setting for the ASGI root_path variable',
default="",
)
self.parser.add_argument(
'--proxy-headers',
dest='proxy_headers',
help='Enable parsing and using of X-Forwarded-For and X-Forwarded-Port headers and using that as the '
'client address',
default=False,
action='store_true',
)
self.parser.add_argument(
'--force-sync',
dest='force_sync',
action='store_true',
help='Force the server to use synchronous mode on its ASGI channel layer',
default=False,
)
self.parser.add_argument(
'channel_layer',
help='The ASGI channel layer instance to use as path.to.module:instance.path',
)
self.server = None
@classmethod
def entrypoint(cls):
"""
Main entrypoint for external starts.
"""
cls().run(sys.argv[1:])
def run(self, args):
"""
Pass in raw argument list and it will decode them
and run the server.
"""
# Decode args
args = self.parser.parse_args(args)
# Set up logging
logging.basicConfig(
level={
0: logging.WARN,
1: logging.INFO,
2: logging.DEBUG,
}[args.verbosity],
format="%(asctime)-15s %(levelname)-8s %(message)s",
)
# If verbosity is 1 or greater, or they told us explicitly, set up access log
access_log_stream = None
if args.access_log:
if args.access_log == "-":
access_log_stream = sys.stdout
else:
access_log_stream = open(args.access_log, "a", 1)
elif args.verbosity >= 1:
access_log_stream = sys.stdout
# Import channel layer
sys.path.insert(0, ".")
module_path, object_path = args.channel_layer.split(":", 1)
channel_layer = importlib.import_module(module_path)
for bit in object_path.split("."):
channel_layer = getattr(channel_layer, bit)
if not any([args.host, args.port, args.unix_socket, args.file_descriptor, args.socket_strings]):
# no advanced binding options passed, patch in defaults
args.host = DEFAULT_HOST
args.port = DEFAULT_PORT
elif args.host and not args.port:
args.port = DEFAULT_PORT
elif args.port and not args.host:
args.host = DEFAULT_HOST
# build endpoint description strings from (optional) cli arguments
endpoints = build_endpoint_description_strings(
host=args.host,
port=args.port,
unix_socket=args.unix_socket,
file_descriptor=args.file_descriptor
)
endpoints = sorted(
args.socket_strings + endpoints
)
logger.info(
'Starting server at %s, channel layer %s.' %
(', '.join(endpoints), args.channel_layer)
)
self.server = Server(
channel_layer=channel_layer,
endpoints=endpoints,
http_timeout=args.http_timeout,
ping_interval=args.ping_interval,
ping_timeout=args.ping_timeout,
websocket_timeout=args.websocket_timeout,
websocket_connect_timeout=args.websocket_connect_timeout,
action_logger=AccessLogGenerator(access_log_stream) if access_log_stream else None,
ws_protocols=args.ws_protocols,
root_path=args.root_path,
verbosity=args.verbosity,
proxy_forwarded_address_header='X-Forwarded-For' if args.proxy_headers else None,
proxy_forwarded_port_header='X-Forwarded-Port' if args.proxy_headers else None,
force_sync=args.force_sync,
)
self.server.run()
|
cc13ny/Allin
|
jiuzhang/Nine Chapters/2 Binary Search & Sorted Array/py/SearchforaRange.py
|
Python
|
mit
| 719
| 0.006954
|
class Solution:
"""
@param A : a list of integers
@param target : an integer to be searched
@return : a list of length 2, [index1, index2]
"""
def searchRange(self, A, target):
# write your code here
res = []
l, r = 0, len(A) - 1
while l <= r:
m = (l + r) / 2
if A[m] >= target:
r = m -
|
1
else:
l = m + 1
res.append(l)
l, r = 0, len(A) - 1
while l <= r:
m = (l + r) / 2
if A[m] > target:
r = m - 1
el
|
se:
l = l + 1
res.append(r)
if res[0] > res[1]: return [-1, -1]
return res
|
lupyuen/RaspberryPiImage
|
usr/share/pyshared/ajenti/usersync/base.py
|
Python
|
apache-2.0
| 277
| 0.00722
|
fr
|
om ajenti.api import *
@interface
class UserSyncProvider (BasePlugin):
allows_renaming = False
syncs_root = False
def test(self):
return False
def check_password(self, username, password):
return False
def syn
|
c(self):
pass
|
Capitains/MyCapytain
|
tests/retrievers/test_cts5.py
|
Python
|
mpl-2.0
| 10,090
| 0.000892
|
import unittest
import requests
import responses
from mock import patch, MagicMock
from MyCapytain.retrievers.cts5 import *
class TestEndpointsCts5(unittest.TestCase):
""" Test Cts5 Endpoint request making """
def setUp(self):
self.cts = HttpCtsRetriever("http://domainname.com/rest/cts")
def test_request_Cts_getCapabilities_arguments(self):
""" Tests that methods getCapabilities maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getCapabilities(inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetCapabilities"
}
)
def test_request_Cts_getValidReff_arguments(self):
""" Tests that methods getValidReff maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getValidReff(urn="urn", inventory="inventory", level=1)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetValidReff",
"level": "1",
"urn": "urn"
}
)
with patch('requests.get') as patched_get:
self.cts.getValidReff(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetValidReff",
"urn": "urn"
}
)
def test_request_Cts_getPassage_arguments(self):
""" Tests that methods getPassage maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getPassage(urn="urn", inventory="inventory", context=1)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassage",
"context": "1",
"urn": "urn"
}
)
with patch('requests.get') as patched_get:
self.cts.getPassage(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassage",
"urn": "urn"
}
)
def test_call_with_default(self):
inv = HttpCtsRetriever("http://domainname.com/rest/cts", inventory="annotsrc")
with patch('requests.get') as patched_get:
inv.getPassage(urn="urn")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "annotsrc",
"request": "GetPassage",
"urn": "urn"
}
)
def test_request_Cts_getPassagePlus_arguments(self):
""" Tests that methods getPassagePlus maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getPassagePlus(
urn="urn", inventory="inventory", context=1)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassagePlus",
"context": "1",
"urn": "urn"
}
)
with patch('requests.get') as patched_get:
self.cts.getPassagePlus(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassagePlus",
"urn": "urn"
}
)
def test_request_Cts_getFirstUrn_arguments(self):
""" Tests that methods getFirstUrn maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getFirstUrn(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetFirstUrn",
"urn": "urn"
}
)
def test_request_Cts_getPrevNextUrn_arguments(self):
""" Tests that methods getPrevNextUrn maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getPrevNextUrn(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventor
|
y",
"request": "GetPrevNextUrn",
"urn": "urn"
}
)
def test_request_Cts_getLabel_arguments(self):
""" Tests that methods g
|
etLabel maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getLabel(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetLabel",
"urn": "urn"
}
)
def test_get_siblings(self):
""" Ensure Citable CtsTextMetadata Service getMetadata is correctly routed """
with patch('requests.get') as patched_get:
self.cts.getSiblings("urn:cts:latinLit:phi1294.phi002.perseus-lat2", "1.1")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetPrevNextUrn",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2:1.1"
}
)
def test_get_children(self):
""" Ensure Citable CtsTextMetadata Service getMetadata is correctly routed """
with patch('requests.get') as patched_get:
self.cts.getReffs("urn:cts:latinLit:phi1294.phi002.perseus-lat2")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetValidReff",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2",
"level": "1"
}
)
with patch('requests.get') as patched_get:
self.cts.getReffs("urn:cts:latinLit:phi1294.phi002.perseus-lat2", subreference="1.1")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetValidReff",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2:1.1",
"level": "3"
}
)
with patch('requests.get') as patched_get:
self.cts.getReffs("urn:cts:latinLit:phi1294.phi002.perseus-lat2", subreference="1", level=2)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetValidReff",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2:1",
"level": "3"
}
)
def test_get_metadata(self):
""" Ensure Citable CtsTextMetadata Service getMetadata is correctly routed """
with patch('requests.get') as patched_get:
self.cts.getMetadata()
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetCapabilities"
}
)
with patch('requests.get') as patched_get:
self.cts.getMetadata(objectId="urn")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
|
kate-v-stepanova/genomics-status
|
status/projects.py
|
Python
|
mit
| 36,140
| 0.006364
|
""" Handlers for sequencing project information.
"""
import json
import string
import traceback
import tornado.web
import dateutil.parser
import datetime
import requests
import re
import paramiko
import base64
import urllib
import os
import logging
from itertools import ifilter
from collections import defaultdict
from collections import OrderedDict
from st
|
atus.util import dthandler, SafeHandler
from genologics import lims
from genologics.entities import Project
from genologics.entities import Sample
from genologics.entities import Process
from ge
|
nologics.entities import Artifact
from genologics.entities import Protocol
from genologics.config import BASEURI, USERNAME, PASSWORD
from zendesk import Zendesk, ZendeskError, get_id_from_url
lims = lims.Lims(BASEURI, USERNAME, PASSWORD)
application_log=logging.getLogger("tornado.application")
class PresetsHandler(SafeHandler):
"""Handler to GET and POST/PUT personalized and default set of presets in
project view.
"""
def get(self):
"""Get preset choices of columns from StatusDB
It will return a JSON with two lists of presets, the default ones and the user defined
presets.
"""
presets_list = self.get_argument('presets_list', 'pv_presets')
self.set_header("Content-type", "application/json")
presets = {
"default": self.application.genstat_defaults.get(presets_list),
"user": {}
}
#Get user presets
user_id = ''
user = self.get_secure_cookie('email')
for u in self.application.gs_users_db.view('authorized/users'):
if u.get('key') == user:
user_id = u.get('value')
break
presets['user'] = self.application.gs_users_db.get(user_id).get(presets_list, {})
self.write(json.dumps(presets))
class ProjectsBaseDataHandler(SafeHandler):
def keys_to_names(self, columns):
d = {}
for column_category, column_tuples in columns.iteritems():
for key, value in column_tuples.iteritems():
d[value] = key
return d
def project_summary_data(self, row):
# the details key gives values containing multiple udfs on project level
# and project_summary key gives 'temporary' udfs that will move to 'details'.
# Include these as normal key:value pairs
if 'project_summary' in row.value:
for summary_key, summary_value in row.value['project_summary'].iteritems():
row.value[summary_key] = summary_value
row.value.pop("project_summary", None)
# If key is in both project_summary and details, details has precedence
if 'details' in row.value:
for detail_key, detail_value in row.value['details'].iteritems():
row.value[detail_key] = detail_value
row.value.pop("details", None)
# Handle the pending reviews:
if 'pending_reviews' in row.value:
links = ','.join(['<a href="https://genologics.scilifelab.se/clarity/work-complete/{0}">Review </a>'.format(rid) for rid in row.value['pending_reviews']])
row.value['pending_reviews'] = links
# Find the latest running note, return it as a separate field
if 'running_notes' in row.value:
try:
notes = json.loads(row.value['running_notes'])
# note_dates = {datetime obj: time string, ...}
note_dates = dict(zip(map(dateutil.parser.parse, notes.keys()), notes.keys()))
latest_date = note_dates[max(note_dates.keys())]
row.value['latest_running_note'] = json.dumps({latest_date: notes[latest_date]})
except ValueError:
pass
if row.key[0] == 'open' and 'queued' in row.value:
#Add days in production field
now = datetime.datetime.now()
queued = row.value['queued']
diff = now - dateutil.parser.parse(queued)
row.value['days_in_production'] = diff.days
if row.key[0] == 'open' and 'open_date' in row.value:
end_date = datetime.datetime.now()
if 'queued' in row.value:
end_date = dateutil.parser.parse(row.value['queued'])
diff = (end_date - dateutil.parser.parse(row.value['open_date'])).days
if 'queued' not in row.value and diff > 14:
row.value['days_in_reception_control'] = '<b class="text-error">{}</b>'.format(diff)
else:
row.value['days_in_reception_control'] = diff
return row
def list_projects(self, filter_projects='all', oldest_date='2012-01-01', youngest_date=datetime.datetime.now().strftime("%Y-%m-%d")):
projects = OrderedDict()
oldest_open_date=self.get_argument('oldest_open_date', oldest_date)
youngest_open_date=self.get_argument('youngest_open_date', youngest_date)
oldest_close_date=self.get_argument('oldest_close_date', oldest_date)
youngest_close_date=self.get_argument('youngest_close_date', youngest_date)
oldest_queue_date=self.get_argument('oldest_queue_date', oldest_date)
youngest_queue_date=self.get_argument('youngest_queue_date', youngest_date)
summary_view = self.application.projects_db.view("project/summary", descending=True)
if filter_projects == 'closed':
summary_view = summary_view[["closed",'Z']:["closed",'']]
elif filter_projects not in ['all', 'aborted'] and filter_projects[:1] != 'P':
summary_view = summary_view[["open",'Z']:["open",'']]
for row in summary_view:
row = self.project_summary_data(row)
projects[row.key[1]] = row.value
filtered_projects = OrderedDict()
# Specific list of projects given
if filter_projects[:1] == 'P':
fprojs = filter_projects.split(',')
for p_id, p_info in projects.iteritems():
if p_id in fprojs:
filtered_projects[p_id] = p_info
# Filter aborted projects if not All projects requested: Aborted date has
# priority over everything else.
elif not filter_projects == 'all':
prefiltered_projects = OrderedDict()
for p_id, p_info in projects.iteritems():
if 'aborted' not in p_info:
prefiltered_projects[p_id] = p_info
else:
if filter_projects == 'aborted':
filtered_projects[p_id] = p_info
else:
filtered_projects = projects
if filter_projects == 'pending':
for p_id, p_info in prefiltered_projects.iteritems():
if not 'open_date' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'open':
for p_id, p_info in prefiltered_projects.iteritems():
if 'open_date' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'reception_control':
for p_id, p_info in prefiltered_projects.iteritems():
if 'open_date' in p_info and not 'queued' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'ongoing':
for p_id, p_info in prefiltered_projects.iteritems():
if 'queued' in p_info and not 'close_date' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'closed':
for p_id, p_info in prefiltered_projects.iteritems():
if 'close_date' in p_info :
filtered_projects[p_id] = p_info
elif filter_projects == "pending_review":
for p_id, p_info in prefiltered_projects.iteritems():
if 'pending_reviews' in p_info:
filtered_projects[p_id] = p_info
final_projects = self.filter_per_date(filtered_projects, youngest_open_date, oldest_open_date, youngest_queue_date, oldest_queue_date, youngest_close_date, oldest_close_date)
# Include dates for each project:
f
|
psicobyte/ejemplos-python
|
cap5/p62.py
|
Python
|
gpl-3.0
| 112
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
corto = 10
largo = l
|
ong(corto)
print type(corto)
print
|
type(largo)
|
ininex/geofire-python
|
resource/lib/python2.7/site-packages/gcloud/bigtable/test_instance.py
|
Python
|
mit
| 31,982
| 0
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import unittest2
class TestOperation(unittest2.TestCase):
OP_TYPE = 'fake-op'
OP_ID = 8915
BEGIN = datetime.datetime(2015, 10, 22, 1, 1)
LOCATION_ID = 'loc-id'
def _getTargetClass(self):
from gcloud.bigtable.instance import Operation
return Operation
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def _constructor_test_helper(self, instance=None):
operation = self._makeOne(
self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID,
instance=instance)
self.assertEqual(operation.op_type, self.OP_TYPE)
self.assertEqual(operation.op_id, self.OP_ID)
self.assertEqual(operation.begin, self.BEGIN)
self.assertEqual(operation.location_id, self.LOCATION_ID)
self.assertEqual(operation._instance, instance)
self.assertFalse(operation._complete)
def test_constructor_defaults(self):
self._constructor_test_helper()
def test_constructor_explicit_instance(self):
instance = object()
self._constructor_test_helper(instance=instance)
def test___eq__(self):
instance = object()
operation1 = self._makeOne(
self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID,
instance=instance)
operation2 = self._makeOne(
self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID,
instance=instance)
self.assertEqual(operation1, operation2)
def test___eq__type_differ(self):
operation1 = self._makeOne('foo', 123, None, self.LOCATION_ID)
operation2 = object()
self.assertNotEqual(operation1, operation2)
def test___ne__same_value(self):
instance = object()
operation1 = self._makeOne(
self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID,
instance=instance)
operation2 = self._makeOne(
self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID,
instance=instance)
comparison_val = (operation1 != operation2)
self.assertFalse(comparison_val)
def test___ne__(self):
operation1 = self._makeOne('foo', 123, None, self.LOCATION_ID)
operation2 = self._makeOne('bar', 456, None, self.LOCATION_ID)
self.assertNotEqual(operation1, operation2)
def test_finished_without_operation(self):
operation = self._makeOne(None, None, None, None)
operation._complete = True
with self.assertRaises(ValueError):
operation.finished()
def _finished_helper(self, done):
from google.longrunning import operations_pb2
from gcloud.bigtable._testing import _FakeStub
from gcloud.bigtable.instance import Instance
PROJECT = 'PROJECT'
INSTANCE_ID = 'instance-id'
TIMEOUT_SECONDS = 1
client = _Client(PROJECT, timeout_seconds=TIMEOUT_SECONDS)
instance = Instance(INSTANCE_ID, client, self.LOCATION_ID)
operation = self._makeOne(
self.OP_TYPE, self.OP_ID, self.BEGIN, self.LOCATION_ID,
instance=instance)
# Create request_pb
op_name = ('operations/projects/' + PROJECT +
'/instances/' + INSTANCE_ID +
'/locations/' + self.LOCATION_ID +
'/operations/%d' % (self.OP_ID,))
request_pb = operations_pb2.GetOperationRequest(name=op_name)
# Create response_pb
response_pb = operations_pb2.Operation(done=done)
# Patch the stub used by the API method.
client._operations_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = done
# Perform the method and check the result.
result = operation.finished()
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'GetOperation',
(request_pb, TIMEOUT_SECONDS),
{},
)])
if done:
self.assertTrue(operation._complete)
else:
self.assertFalse(operation._complete)
def test_finished(self):
self._finished_helper(done=True)
def test_finished_not_done(self):
self._finished_helper(done=False)
class TestInstance(unittest2.TestCase):
PROJECT = 'project'
INSTANCE_ID = 'instance-id'
INSTANCE_NAME = 'projects/' + PROJECT + '/instances/' + INSTANCE_ID
LOCATION_ID = 'locname'
LOCATION = 'projects/' + PROJECT + '/locations/' + LOCATION_ID
DISPLAY_NAME = 'display_name'
OP_ID = 8915
OP_NAME = ('operations/projects/%s/instances/%soperations/%d' %
(PROJECT, INSTANCE_ID, OP_ID))
TABLE_ID = 'table_id'
TABLE_NAME = INSTANCE_NAME + '/tables/' + TABLE_ID
TIMEOUT_SECONDS = 1
def _getTargetClass(self):
from gcloud.bigtable.instance import Instance
return Instance
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor_defaults(self):
from gcloud.bigtable.cluster import DEFAULT_SERVE_NODES
client = object()
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
self.assertEqual(instance.instance_id, self.INSTANCE_ID)
self.assertEqual(instance.display_name, self.INSTANCE_ID)
self.assertTrue(instance._client is client)
self.assertEqual(instance._cluster_location_id, self.LOCATION_ID)
self.assertEqual(instance._cluster_serve_nodes, DEFAULT_SERVE_NODES)
def test_constructor_non_default(self):
display_name = 'display_name'
client = object()
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID,
display_name=display_name)
self.assertEqual(instance.instance_id, self.INSTANCE_ID)
self.assertEqual(instance.display_name, display_name)
self.assertTrue(instance._client is client)
def
|
test_copy(self):
display_name = 'display_name'
client = _Client(self.PROJECT)
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID,
displa
|
y_name=display_name)
new_instance = instance.copy()
# Make sure the client copy succeeded.
self.assertFalse(new_instance._client is client)
self.assertEqual(new_instance._client, client)
# Make sure the client got copied to a new instance.
self.assertFalse(instance is new_instance)
self.assertEqual(instance, new_instance)
def test_table_factory(self):
from gcloud.bigtable.table import Table
instance = self._makeOne(self.INSTANCE_ID, None, self.LOCATION_ID)
table = instance.table(self.TABLE_ID)
self.assertTrue(isinstance(table, Table))
self.assertEqual(table.table_id, self.TABLE_ID)
self.assertEqual(table._instance, instance)
def test__update_from_pb_success(self):
from gcloud.bigtable._generated_v2 import (
instance_pb2 as data_v2_pb2)
display_name = 'display_name'
instance_pb = data_v2_pb2.Instance(
display_name=display_name,
)
instance = self._makeOne(None, None, None, None)
self.assertEqual(instance.display_name, None)
instance._update_from_pb(instance_pb)
self.assertEqual(instance.display_name, display_name)
def test__update_from_pb_no_display_name(self):
from gcloud.bigtable._generated_v2 import (
ins
|
sander76/home-assistant
|
tests/components/awair/test_sensor.py
|
Python
|
apache-2.0
| 9,940
| 0.000101
|
"""Tests for the Awair sensor platform."""
from unittest.mock import patch
from homeassistant.components.awair.const import (
API_CO2,
API_HUMID,
API_LUX,
API_PM10,
API_PM25,
API_SCORE,
API_SPL_A,
API_TEMP,
API_VOC,
ATTR_UNIQUE_ID,
DOMAIN,
SENSOR_TYPES,
)
from homeassistant.const import (
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
LIGHT_LUX,
PERCENTAGE,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
)
from homeassistant.helpers import entity_registry as er
from .const import (
AWAIR_UUID,
CONFIG,
DEVICES_FIXTURE,
GEN1_DATA_FIXTURE,
GEN2_DATA_FIXTURE,
GLOW_DATA_FIXTURE,
MINT_DATA_FIXTURE,
OFFLINE_FIXTURE,
OMNI_DATA_FIXTURE,
UNIQUE_ID,
USER_FIXTURE,
)
from tests.common import MockConfigEntry
async def setup_awair(hass, fixtures):
"""Add Awair devices to hass, using specified fixtures for data."""
entry = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG)
with patch("python_awair.AwairClient.query", side_effect=fixtures):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
def assert_expected_properties(
hass, registry, name, unique_id, state_value, attributes
):
"""Assert expected properties from a dict."""
entry = registry.async_get(name)
assert entry.unique_id == unique_id
state = hass.states.get(name)
assert state
assert state.state == state_value
for attr, value in attributes.items():
assert state.attributes.get(attr) == value
async def test_awair_gen1_sensors(hass):
"""Test expected sensors on a 1st gen Awair."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN1_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"88",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_temperature",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_TEMP][ATTR_UNIQUE_ID]}",
"21.8",
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, "awair_index": 1.0},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_humidity",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_HUMID][ATTR_UNIQUE_ID]}",
"41.59",
{ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE, "awair_index": 0.0},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_carbon_dioxide",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_CO2][ATTR_UNIQUE_ID]}",
"654.0",
{
ATTR_ICON: "mdi:cloud",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_MILLION,
"awair_index": 0.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_volatile_organic_compounds",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_VOC][ATTR_UNIQUE_ID]}",
"366",
{
ATTR_ICON: "mdi:cloud",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_BILLION,
"awair_index": 1.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
# gen1 unique_id should be awair_12345-DUST, which matches old integration behavior
f"{AWAIR_UUID}_DUST",
"14.3",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 1.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm10",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM10][ATTR_UNIQUE_ID]}",
"14.3",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 1.0,
},
)
# We should not have a dust sensor; it's aliased as pm2.5
# and pm10 sensors.
assert hass.states.get("sensor.living_room_dust") is None
# We should not have sound or lux sensors.
assert hass.states.get("sensor.living_room_sound_level") is None
assert hass.states.get("sensor.living_room_illuminance") is None
async def test_awair_gen2_sensors(hass):
"""Test expected sensors on a 2nd gen Awair."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN2_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"97",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}",
"2.0",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 0.0,
},
)
# The Awair 2nd gen reports specifically a pm2.5 sensor,
# and so we don't alias anything. Make sure we didn't do that.
assert hass.states.get("sensor.living_room_pm10") is None
async def test_awair_mint_sensors(hass):
"""Test expected sensors on an
|
Awair mint."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, MINT_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_propertie
|
s(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"98",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}",
"1.0",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 0.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_illuminance",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}",
"441.7",
{ATTR_UNIT_OF_MEASUREMENT: LIGHT_LUX},
)
# The Mint does not have a CO2 sensor.
assert hass.states.get("sensor.living_room_carbon_dioxide") is None
async def test_awair_glow_sensors(hass):
"""Test expected sensors on an Awair glow."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GLOW_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"93",
{ATTR_ICON: "mdi:blur"},
)
# The glow does not have a particle sensor
assert hass.states.get("sensor.living_room_pm2_5") is None
async def test_awair_omni_sensors(hass):
"""Test expected sensors on an Awair omni."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OMNI_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"99",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_sound_level",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SPL_A][ATTR_UNIQUE_ID]}",
"47.0",
{ATTR_ICON: "mdi:ear-hearing", ATTR_UNIT_OF_MEASUREMENT: "dBa"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_illuminance",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATT
|
5y/flask
|
flask/testsuite/basic.py
|
Python
|
bsd-3-clause
| 48,486
| 0.002311
|
# -*- coding: utf-8 -*-
"""
flask.testsuite.basic
~~~~~~~~~~~~~~~~~~~~~
The basic functionality.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import re
import uuid
import time
import flask
import pickle
import unittest
from datetime import datetime
from threading import Thread
from flask.testsuite import FlaskTestCase, emits_module_deprecation_warning
from flask._compat import text_type
from werkzeug.exceptions import BadRequest, NotFound, Forbidden
from werkzeug.http import parse_date
from werkzeug.routing import BuildError
class BasicFunctionalityTestCase(FlaskTestCase):
def test_options_work(self):
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST'])
self.assert_equal(rv.data, b'')
def test_options_on_multiple_rules(self):
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return 'Hello World'
@app.route('/', methods=['PUT'])
def index_put():
return 'Aha!'
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST', 'PUT'])
def test_options_handling_disabled(self):
app = flask.Flask(__name__)
def index():
return 'Hello World!'
index.provide_automatic_options = False
app.route('/')(index)
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(rv.status_code, 405)
app = flask.Flask(__name__)
def index2():
return 'Hello World!'
index2.provide_automatic_options = True
app.route('/', methods=['OPTIONS'])(index2)
rv = app.test_client().open('/', method='OPTIONS')
self.assert_equal(sorted(rv.allow), ['OPTIONS'])
def test_request_dispatching(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.request.method
@app.route('/more', methods=['GET', 'POST'])
def more():
return flask.request.method
c = app.test_client()
self.assert_equal(c.get('/').data, b'GET')
rv = c.post('/')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS'])
rv = c.head('/')
self.assert_equal(rv.status_code, 200)
self.assert_fals
|
e(rv.data) # head truncates
self.assert_equal(c.post('/more').data, b'POST')
self.assert_equal(c.get('/more').data, b'G
|
ET')
rv = c.delete('/more')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST'])
def test_disallow_string_for_allowed_methods(self):
app = flask.Flask(__name__)
with self.assert_raises(TypeError):
@app.route('/', methods='GET POST')
def index():
return "Hey"
def test_url_mapping(self):
app = flask.Flask(__name__)
def index():
return flask.request.method
def more():
return flask.request.method
app.add_url_rule('/', 'index', index)
app.add_url_rule('/more', 'more', more, methods=['GET', 'POST'])
c = app.test_client()
self.assert_equal(c.get('/').data, b'GET')
rv = c.post('/')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS'])
rv = c.head('/')
self.assert_equal(rv.status_code, 200)
self.assert_false(rv.data) # head truncates
self.assert_equal(c.post('/more').data, b'POST')
self.assert_equal(c.get('/more').data, b'GET')
rv = c.delete('/more')
self.assert_equal(rv.status_code, 405)
self.assert_equal(sorted(rv.allow), ['GET', 'HEAD', 'OPTIONS', 'POST'])
def test_werkzeug_routing(self):
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
def bar():
return 'bar'
def index():
return 'index'
app.view_functions['bar'] = bar
app.view_functions['index'] = index
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
def test_endpoint_decorator(self):
from werkzeug.routing import Submount, Rule
app = flask.Flask(__name__)
app.url_map.add(Submount('/foo', [
Rule('/bar', endpoint='bar'),
Rule('/', endpoint='index')
]))
@app.endpoint('bar')
def bar():
return 'bar'
@app.endpoint('index')
def index():
return 'index'
c = app.test_client()
self.assert_equal(c.get('/foo/').data, b'index')
self.assert_equal(c.get('/foo/bar').data, b'bar')
def test_session(self):
app = flask.Flask(__name__)
app.secret_key = 'testkey'
@app.route('/set', methods=['POST'])
def set():
flask.session['value'] = flask.request.form['value']
return 'value set'
@app.route('/get')
def get():
return flask.session['value']
c = app.test_client()
self.assert_equal(c.post('/set', data={'value': '42'}).data, b'value set')
self.assert_equal(c.get('/get').data, b'42')
def test_session_using_server_name(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com/')
self.assert_in('domain=.example.com', rv.headers['set-cookie'].lower())
self.assert_in('httponly', rv.headers['set-cookie'].lower())
def test_session_using_server_name_and_port(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/')
self.assert_in('domain=.example.com', rv.headers['set-cookie'].lower())
self.assert_in('httponly', rv.headers['set-cookie'].lower())
def test_session_using_server_name_port_and_path(self):
app = flask.Flask(__name__)
app.config.update(
SECRET_KEY='foo',
SERVER_NAME='example.com:8080',
APPLICATION_ROOT='/foo'
)
@app.route('/')
def index():
flask.session['testing'] = 42
return 'Hello World'
rv = app.test_client().get('/', 'http://example.com:8080/foo')
self.assert_in('domain=example.com', rv.headers['set-cookie'].lower())
self.assert_in('path=/foo', rv.headers['set-cookie'].lower())
self.assert_in('httponly', rv.headers['set-cookie'].lower())
def test_session_using_application_root(self):
class PrefixPathMiddleware(object):
def __init__(self, app, prefix):
self.app = app
self.prefix = prefix
def __call__(self, environ, start_response):
environ['SCRIPT_NAME'] = self.prefix
return self.app(environ, start_response)
app = flask.Flask(__name__)
app.wsgi_app = PrefixPathMiddleware(app.wsgi_app, '/bar')
app.config.update(
SECRET_KEY='foo',
APPLICATION_ROOT='/bar'
)
@app.route('/')
def index():
flask.session['testing'] = 42
re
|
pannkotsky/groupmate
|
backend/wsgi.py
|
Python
|
mit
| 384
| 0
|
"""
WSGI config for p
|
roject.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
application = get_wsgi_appli
|
cation()
|
PoisonBOx/PyGames
|
2.Pie/drawLine.py
|
Python
|
gpl-2.0
| 453
| 0.002208
|
import sys
import pygame
from pygame.locals import *
pygame.init()
screen =
|
pygame.display.set_mode((600, 500))
pygame.display.
|
set_caption("Drawing Lines")
screen.fill((0, 80, 0))
# draw the line
color = 100, 255, 200
width = 8
pygame.draw.line(screen, color, (100, 100), (500, 400), width)
pygame.display.update()
while True:
for event in pygame.event.get():
if event.type in (QUIT, KEYDOWN):
sys.exit()
|
duaneg/django-hookbox
|
djhookbox/views.py
|
Python
|
bsd-3-clause
| 5,848
| 0.007182
|
# Part of django-hookbox
# Copyright 2011, Duane Griffin <duaneg@dghda.com>
from django.conf import settings
from django.dispatch import Signal
from django.http import HttpResponse
from django.template import loader, RequestContext
from django.views.decorators.csrf import csrf_exempt
import json
import logging
import random
import string
logger = logging.getLogger('djhookbox')
secret = getattr(settings, 'HOOKBOX_WEBHOOK_SECRET', None)
_callbacks = []
def _call_callbacks(op, *args, **kwargs):
result = None
for callback in [cb for (cbop, cb) in _callbacks if cbop is None or cbop == op]:
oneresult = callback(op, *args, **kwargs)
if result is None:
result = oneresult
elif not oneresult is None:
logger.warn("multiple results returned from %s callback", op)
return result
def whcallback(arg):
'''
Decorator for functions which handle webhook callbacks.
|
All functions are called with the operation type and user as the first two
arguments. Operations on a channel (i.e. not connect/disconnect) will be
called with a channel
|
name as the third argument, and publish will be
called with the payload as the fourth argument.
If a string argument is given the function will only be called for
matching webhooks. If no argument is given it will be called for all
webhooks.
Webhooks may optionally return a result, handling of which is dependent on
the operation type:
- The connect/disconnect operations ignore any results.
- Create callbacks should return either a dict containing the channel
options or, if they want to disallow channel creation, a failure
message (string).
If no create callback returns a response the operation is deemed to have
*failed*.
- Other callbacks may return a failure message (string), a
dictionary (which will be returned as a successful response), or a
properly formatted hookbox response.
If no callback returns a response the operation will be deemed to have
*succeded* and an empty success response will be returned.
In all cases, including connect/disconnect, if more that one callback
returns a result the first will be used and a warning will be logged.
'''
# Called without op arg: register the callback for all operations
if callable(arg):
_callbacks.append((None, arg))
return arg
# Otherwise only register the callback for the specified operation
def decorator(method):
_callbacks.append((arg, method))
return decorator
# TODO: Not sure these are necessary any more, the callbacks provide a super-set
# of their functionality.
signals = {
'connect': Signal(),
'disconnect': Signal(),
'subscribe': Signal(providing_args = ['channel']),
'unsubscribe': Signal(providing_args = ['channel']),
}
def webhook(method):
'''
Decorator which:
- checks a WebHook's secret key is correct
- exempts the view from CSRF checks
- massages the return result into the format expected by hookbox
Returns 403 if the secret is required and not present/incorrect.
'''
@csrf_exempt
def wrapper(*args, **kwargs):
# Process the request
request = args[0]
if secret is None or ('secret' in request.POST and request.POST['secret'] == secret):
try:
data = method(*args, **kwargs)
if data is None:
result = [True, {}]
elif isinstance(data, dict):
result = [True, data]
elif isinstance(data, str):
result = [False, {'msg': data}]
else:
assert isinstance(data, list)
assert len(data) == 2
result = data
except Exception as err:
result = [False, {'msg': str(err)}]
else:
result = [False, {'msg': 'webhook secret verification failed'}]
# Log the result
if result[0]:
logger.info("webhook succeeded: %s (%s): %s", method.__name__, request.user.username, str(result[1]))
else:
logger.warn("webhook failed: %s (%s): %s", method.__name__, request.user.username, result[1]['msg'])
return HttpResponse(json.dumps(result), mimetype = 'application/json')
return wrapper
@webhook
def connect(request):
signals['connect'].send(request.user)
_call_callbacks('connect', request.user)
if request.user.is_authenticated():
username = request.user.username
else:
username = ' _' + ''.join(random.choice(string.letters + string.digits) for i in xrange(10))
return {
'name': username
}
@webhook
def disconnect(request):
signals['disconnect'].send_robust(request.user)
_call_callbacks('disconnect', request.user)
@webhook
def create_channel(request):
result = _call_callbacks('create', request.user, request.POST['channel_name'])
return result or [False, {'msg': 'unrecognized channel: %s' % request.POST['channel_name']}]
@webhook
def publish(request):
return _call_callbacks('publish', request.user, request.POST['channel_name'], request.POST['payload'])
@webhook
def destroy_channel(request):
return _call_callbacks('destroy', request.user, channel = request.POST['channel_name'])
@webhook
def subscribe(request):
signals['subscribe'].send(request.user, channel = request.POST['channel_name'])
return _call_callbacks('subscribe', request.user, channel = request.POST['channel_name'])
@webhook
def unsubscribe(request):
signals['unsubscribe'].send_robust(request.user, channel = request.POST['channel_name'])
return _call_callbacks('unsubscribe', request.user, channel = request.POST['channel_name'])
|
dontnod/weblate
|
openshift/wsgi_install.py
|
Python
|
gpl-3.0
| 3,281
| 0
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright © 2014 Daniel Tschan <tschan@puzzle.ch>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import os
from string import Template
VIRTUALENV = os.path.join(
os.environ['OPENSHIFT_PYTHON_DIR'], 'virtenv', 'bin', 'activate_this.py'
)
with open(VIRTUALENV) as handle:
code = compile(handle.read(), 'activate_this.py', 'exec')
# pylint: disable=exec-used
exec(code, dict(__file__=VIRTUALENV)) # noqa
def application(environ, start_response):
ctype = 'text/html'
response_body = Template('''<!doctype html>
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta charset="utf-8">
<title>Installing Weblate</title>
<style>
html {
background: #f5f5f5;
height: 100%;
}
body {
color: #404040;
font-family: "Helvetica Neue",Helvetica,"Liberation Sans",Arial,sans-serif;
font-size: 14px;
line-height: 1.4;
}
h1 {
color: #000;
line-height: 1.38em;
margin: .4em 0 .5em;
font-size: 25px;
font-weight: 300;
border-bottom: 1px solid #fff;
}
h1:after {
content: "";
display: block;
height: 1px;
backg
|
round-color: #ddd;
}
p {
margin: 0 0 2em;
}
pre {
padding: 13.333px 20px;
margin: 0 0 20px;
font-size: 13px;
line-height: 1.4;
background-color: #fff;
border-left: 2px solid rgba(120,120,120,0.35);
font-family: Menlo,Monaco,"Liberation Mono",Consolas,monospace !important;
}
.content {
display: table;
margin-left: -15px;
margin-right: -15px;
|
position: relative;
min-height: 1px;
padding-left: 30px;
padding-right: 30px;
}
</style>
</head>
<body>
<div class="content">
<h1>$action1 Weblate</h1>
<p>
Weblate is being $action2.
Please wait a few minutes and refresh this page.
</p>
$log
</div>
</body>
</html>''')
context = {}
if os.path.exists(os.environ['OPENSHIFT_DATA_DIR'] + '/.installed'):
context['action1'] = 'Updating'
context['action2'] = 'updated'
context['log'] = ''
else:
context['action1'] = 'Installing'
context['action2'] = 'installed'
log_msg = os.popen(
r"cat ${OPENSHIFT_PYTHON_LOG_DIR}/install.log |"
r" grep '^[^ ]\|setup.py install' |"
r" sed 's,/var/lib/openshift/[a-z0-9]\{24\},~,g'"
).read()
context['log'] = '<pre>' + log_msg + '</pre>'
response_body = response_body.substitute(context)
status = '200 OK'
response_headers = [
('Content-Type', ctype),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]
|
tdoughty1/mongodb_M101P
|
Week1/Lecture/hello_world/hello_world.py
|
Python
|
gpl-2.0
| 237
| 0.025316
|
import bottle
@bottle.route('/')
def home_page():
mythings = ['apple','orange','banana','peach']
return bottle.template('hello_world', usern
|
ame='Todd',things=
|
mythings)
bottle.debug(True)
bottle.run(host='localhost', port=8082)
|
plotly/python-api
|
packages/python/plotly/plotly/tools.py
|
Python
|
mit
| 25,137
| 0.000517
|
# -*- coding: utf-8 -*-
"""
tools
=====
Functions that USERS will possibly want access to.
"""
from __future__ import absolute_import
import json
import warnings
import six
import re
import os
from plotly import exceptions, optional_imports
from plotly.files import PLOTLY_DIR
DEFAULT_PLOTLY_COLORS = [
"rgb(31, 119, 180)",
"rgb(255, 127, 14)",
"rgb(44, 160, 44)",
"rgb(214, 39, 40)",
"rgb(148, 103, 189)",
"rgb(140, 86, 75)",
"rgb(227, 119, 194)",
"rgb(127, 127, 127)",
"rgb(188, 189, 34)",
"rgb(23, 190, 207)",
]
REQUIRED_GANTT_KEYS = ["Task", "Start", "Finish"]
PLOTLY_SCALES = {
"Greys": ["rgb(0,0,0)", "rgb(255,255,255)"],
"YlGnBu": ["rgb(8,29,88)", "rgb(255,255,217)"],
"Greens": ["rgb(0,68,27)", "rgb(247,252,245)"],
"YlOrRd": ["rgb(128,0,38)", "rgb(255,255,204)"],
"Bluered": ["rgb(0,0,255)", "rgb(255,0,0)"],
"RdBu": ["rgb(5,10,172)", "rgb(178,10,28)"],
"Reds": ["rgb(220,220,220)", "rgb(178,10,28)"],
"Blues": ["rgb(5,10,172)", "rgb(220,220,220)"],
"Picnic": ["rgb(0,0,255)", "rgb(255,0,0)"],
"Rainbow": ["rgb(150,0,90)", "rgb(255,0,0)"],
"Portland": ["rgb(12,51,131)", "rgb(217,30,30)"],
"Jet": ["rgb(0,0,131)", "rgb(128,0,0)"],
"Hot": ["rgb(0,0,0)", "rgb(255,255,255)"],
"Blackbody": ["rgb(0,0,0)", "rgb(160,200,255)"],
"Earth": ["rgb(0,0,130)", "rgb(255,255,255)"],
"Electric": ["rgb(0,0,0)", "rgb(255,250,220)"],
"Viridis": ["rgb(68,1,84)", "rgb(253,231,37)"],
}
# color constants for violin plot
DEFAULT_FILLCOLOR = "#1f77b4"
DEFAULT_HISTNORM = "probability density"
ALTERNATIVE_HISTNORM = "probability"
# Warning format
def warning_on_one_line(message, category, filename, lineno, file=None, line=None):
return "%s:%s: %s:\n\n%s\n\n" % (filename, lineno, category.__name__, message)
warnings.formatwarning = warning_on_one_line
ipython_core_display = optional_imports.get_module("IPython.core.display")
sage_salvus = optional_imports.get_module("sage_salvus")
### mpl-related tools ###
def mpl_to_plotly(fig, resize=False, strip_style=False, verbose=False):
"""Convert a matplotlib figure to plotly dictionary and send.
All available information about matplotlib visualizations are stored
within a matplotlib.figure.Figure object. You can create a plot in python
using matplotlib, store the figure object, and then pass this object to
the fig_to_plotly function. In the background, mplexporter is used to
crawl through the mpl figure object for appropriate information. This
information is then systematically sent to the PlotlyRenderer which
creates the JSON structure used to make plotly visualizations. Finally,
these dictionaries are sent to plotly and your browser should open up a
new tab for viewing! Optionally, if you're working in IPython, you can
set notebook=True and the PlotlyRenderer will call plotly.iplot instead
of plotly.plot to have the graph appear directly in the IPython notebook.
Note, this function gives the user access to a simple, one-line way to
render an mpl figure in plotly. If you need to trouble shoot, you can do
this step manually by NOT running this fuction and entereing the following:
===========================================================================
from plotly.matplotlylib import mplexporter, PlotlyRenderer
# create an mpl figure and store it under a varialble 'fig'
renderer = PlotlyRenderer()
exporter = mplexporter.Exporter(renderer)
exporter.run(fig)
===========================================================================
You can then inspect the JSON structures by accessing these:
renderer.layout -- a plotly layout dictionary
renderer.data -- a list of plotly data dictionaries
"""
matplotlylib = optional_imports.get_module("plotly.matplotlylib")
if matplotlylib:
renderer = matplotlylib.PlotlyRenderer()
matplotlylib.Exporter(renderer).run(fig)
if resize:
renderer.resize()
if strip_style:
renderer.strip_style()
if verbose:
print(renderer.msg)
return renderer.plotly_fig
else:
warnings.warn(
"To use Plotly's matplotlylib functionality, you'll need to have "
"matplotlib successfully installed with all of its dependencies. "
"You're getting this error because matplotlib or one of its "
"dependencies doesn't seem to be installed correctly."
)
### graph_objs related tools ###
def get_subplots(rows=1, columns=1, print_grid=False, **kwargs):
"""Return a dictionary instance with the subplots set in 'layout'.
Example 1:
# stack two subplots vertically
fig = tools.get_subplots(rows=2)
fig['data'] += [
|
Scatter(x=[1,2,3], y=[2,1,2], xaxis='x1', yaxis='y1')]
fig['data'] += [Scatter(x=[1,2,3], y=[2,1,2], xaxis='x2', yaxis='y2')]
Example 2:
# print out string showing the subplot
|
grid you've put in the layout
fig = tools.get_subplots(rows=3, columns=2, print_grid=True)
Keywords arguments with constant defaults:
rows (kwarg, int greater than 0, default=1):
Number of rows, evenly spaced vertically on the figure.
columns (kwarg, int greater than 0, default=1):
Number of columns, evenly spaced horizontally on the figure.
horizontal_spacing (kwarg, float in [0,1], default=0.1):
Space between subplot columns. Applied to all columns.
vertical_spacing (kwarg, float in [0,1], default=0.05):
Space between subplot rows. Applied to all rows.
print_grid (kwarg, True | False, default=False):
If True, prints a tab-delimited string representation
of your plot grid.
Keyword arguments with variable defaults:
horizontal_spacing (kwarg, float in [0,1], default=0.2 / columns):
Space between subplot columns.
vertical_spacing (kwarg, float in [0,1], default=0.3 / rows):
Space between subplot rows.
"""
# TODO: protected until #282
from plotly.graph_objs import graph_objs
warnings.warn(
"tools.get_subplots is depreciated. " "Please use tools.make_subplots instead."
)
# Throw exception for non-integer rows and columns
if not isinstance(rows, int) or rows <= 0:
raise Exception("Keyword argument 'rows' " "must be an int greater than 0")
if not isinstance(columns, int) or columns <= 0:
raise Exception("Keyword argument 'columns' " "must be an int greater than 0")
# Throw exception if non-valid kwarg is sent
VALID_KWARGS = ["horizontal_spacing", "vertical_spacing"]
for key in kwargs.keys():
if key not in VALID_KWARGS:
raise Exception("Invalid keyword argument: '{0}'".format(key))
# Set 'horizontal_spacing' / 'vertical_spacing' w.r.t. rows / columns
try:
horizontal_spacing = float(kwargs["horizontal_spacing"])
except KeyError:
horizontal_spacing = 0.2 / columns
try:
vertical_spacing = float(kwargs["vertical_spacing"])
except KeyError:
vertical_spacing = 0.3 / rows
fig = dict(layout=graph_objs.Layout()) # will return this at the end
plot_width = (1 - horizontal_spacing * (columns - 1)) / columns
plot_height = (1 - vertical_spacing * (rows - 1)) / rows
plot_num = 0
for rrr in range(rows):
for ccc in range(columns):
xaxis_name = "xaxis{0}".format(plot_num + 1)
x_anchor = "y{0}".format(plot_num + 1)
x_start = (plot_width + horizontal_spacing) * ccc
x_end = x_start + plot_width
yaxis_name = "yaxis{0}".format(plot_num + 1)
y_anchor = "x{0}".format(plot_num + 1)
y_start = (plot_height + vertical_spacing) * rrr
y_end = y_start + plot_height
xaxis = dict(domain=[x_start, x_end], anchor=x_anchor)
fig["layout"][xaxis_name] = xaxis
yaxis = dict(domain=[y_start, y_end], anchor=y_anchor)
fig["layout"][yaxis_name] = yaxis
plot_num += 1
if print_grid:
print("This is the format of your plot grid
|
ttreeagency/PootleTypo3Org
|
pootle/apps/pootle_app/views/index/index.py
|
Python
|
gpl-2.0
| 4,828
| 0.000414
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2012 Zuza Software Foundation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import locale
from django.conf import settings
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext as _
from pootle.i18n.gettext import tr_lang
from pootle_app.models import Directory
from pootle_app.models.permissions import (get_matching_permissions,
check_permission)
from pootle_app.views.top_stats import gentopstats_root
from pootle_language.models import Language
from pootle_misc.browser import get_table_headings
from pootle_misc.stats import get_raw_stats
from pootle_profile.models import get_profile
from pootle_project.models import Project
from pootle_statistics.models import Submission
def get_items(request, model, get_last_action, name_func):
items = []
if not check_permission('view', request):
return items
for item in model.objects.iterator():
stats = get_raw_stats(item)
translated_percentage = stats['translated']['percentage']
items.append({
'code': item.code,
'name': name_func(item.fullname),
'lastactivity': get_last_action(item),
'stats': stats,
'completed_title': _("%(percentage)d%% complete",
{'percentage': translated_percentage}),
})
items.sort(lambda x, y: locale.strcoll(x['name'], y['name']))
return items
def getlanguages(request):
def get_last_action(item):
try:
return Submission.objects.filter(
translation_project__language=item).latest().as_html()
except Submission.DoesNotExist:
return ''
return get_items(request, Language, get_last_action, tr_lang)
def getprojects(request):
def get_last_action(item):
try:
return Submission.objects.filter(
translation_project__project=item).latest().as_html()
except Submission.DoesNotExist:
return ''
return get_items(request, Project, get_last_action, lambda name: name)
def view(request):
request.permissions = get_matching_permissions(get_profile(request.user),
Directory.objects.root)
can_edit = request.user.is_superuser
languages = getlanguages(request)
languages_table_fields = ['language', 'progress', 'activity']
languages_table = {
'id': 'index-languages',
'proportional': False,
'fields': languages_table_fields,
'headings': get_table_headings(languages_table_fields),
'items': filter(lambda x: x['stats']['total']['words'] != 0, languages),
}
projects = getprojects(request)
projects_table_fields = ['project', 'progress', 'activity']
projects_table = {
'id': 'index-projects',
'proportional': False,
'fields': projects_table_fields,
'headings': get_table_headings(projects_table_fields),
'items': projects,
}
templatevars = {
'description': _(settings.DESCRIPTION),
'keywords': [
'Pootle',
'translate',
'translation',
'localisation',
'localization',
'l10n',
'traduction',
|
'traduire',
],
'topstats': gentopstats_root(),
'permissions': request.permissions,
'can_edit': can_edit,
'languages_table': languages_table,
'projects_table': proje
|
cts_table,
}
visible_langs = [l for l in languages if l['stats']['total']['words'] != 0]
templatevars['moreprojects'] = (len(projects) > len(visible_langs))
if can_edit:
from pootle_misc.siteconfig import load_site_config
from pootle_app.forms import GeneralSettingsForm
siteconfig = load_site_config()
setting_form = GeneralSettingsForm(siteconfig)
templatevars['form'] = setting_form
return render_to_response('index/index.html', templatevars,
RequestContext(request))
|
zlpmichelle/crackingtensorflow
|
wide_deep/wide_deep.py
|
Python
|
apache-2.0
| 8,315
| 0.007216
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example code for TensorFlow Wide & Deep Tutorial using TF.Learn API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import shutil
import sys
import tensorflow as tf
_CSV_COLUMNS = [
'age', 'workclass', 'fnlwgt', 'education', 'education_num',
'marital_status', 'occupation', 'relationship', 'race', 'gender',
'capital_gain', 'capital_loss', 'hours_per_week', 'native_country',
'income_bracket'
]
_CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''],
[0], [0], [0], [''], ['']]
parser = argparse.ArgumentParser()
parser.add_argument(
'--model_dir', type=str, default='/tmp/census_model',
help='Base directory for the model.')
parser.add_argument(
'--model_type', type=str, default='wide_deep',
help="Valid model types: {'wide', 'deep', 'wide_deep'}.")
parser.add_argument(
'--train_epochs', type=int, default=20, help='Number of training epochs.')
parser.add_argument(
'--epochs_per_eval', type=int, default=2,
help='The number of training epochs to run between evaluations.')
parser.add_argument(
'--batch_size', type=int, default=40, help='Number of examples per batch.')
parser.add_argument(
'--train_data', type=str, default='/tmp/census_data/adult.data',
help='Path to the training data.')
parser.add_argument(
'--test_data', type=str, default='/tmp/census_data/adult.test',
help='Path to the test data.')
def build_model_columns():
"""Builds a set of wide and deep feature columns."""
# Continuous columns
age = tf.feature_column.numeric_column('age')
education_num = tf.feature_column.numeric_column('education_num')
capital_gain = tf.feature_column.numeric_column('capital_gain')
capital_loss = tf.feature_column.numeric_column('capital_loss')
hours_per_week = tf.feature_column.numeric_column('hours_per_week')
education = tf.feature_column.categorical_column_with_vocabulary_list(
'education', [
'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college',
'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school',
'5th-6th', '10th', '1st-4th', 'Preschool', '12th'])
marital_status = tf.feature_column.categorical_column_with_vocabulary_list(
'marital_status', [
'Married-civ-spouse', 'Divorced', 'Married-spouse-absent',
'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed'])
relationship = tf.feature_column.categorical_column_with_vocabulary_list(
'relationship', [
'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried',
'Other-relative'])
workclass = tf.feature_column.categorical_column_with_vocabulary_list(
'workclass', [
'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov',
'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked'])
# To show an example of hashing:
occupation = tf.feature_column.categorical_column_with_hash_bucket(
'occupation', hash_bucket_size=1000)
# Transformations.
age_buckets = tf.feature_column.bucketized_column(
age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
# Wide columns and deep columns.
base_columns = [
education, marital_status, relationship, workclass, occupation,
age_buckets,
]
crossed_columns = [
tf.feature_column.crossed_column(
['education', 'occupation'], hash_bucket_size=1000),
tf.feature_column.crossed_column(
[age_buckets, 'education', 'occupation'], hash_bucket_size=1000),
]
wide_columns = base_columns + crossed_columns
deep_columns = [
age,
education_num,
capital_gain,
capital_loss,
hours_per_week,
tf.feature_column.indicator_column(workclass),
tf.feature_column.indicator_column(education),
tf.feature_column.indicator_column(marital_status),
tf.feature_column.indicator_column(relationship),
# To show an example of embedding
tf.feature_column.embedding_column(occupation, dimension=8),
]
return wide_columns, deep_columns
def build_estimator(model_dir, model_type):
"""Build an estimator appropriate for the given model type."""
wide_columns, deep_columns = build_model_columns()
hidden_units = [100, 75, 50, 25]
# Create a tf.estimator.RunConfig to ensure the model is run on CPU, which
# trains faster than GPU for this model.
run_config = tf.estimator.RunConfig().replace(
session_config=tf.ConfigProto(device_count={'GPU': 0}))
if model_type == 'wide':
return tf.estimator.LinearClassifier(
model_dir=model_dir,
feature_columns=wide_columns,
config=run_config)
elif model_type == 'deep':
return tf.estimator.DNNClassifier(
model_dir=model_dir,
feature_columns=deep_columns,
hidden_units=hidden_units,
config=run_config)
else:
return tf.estimator.DNNLinearCombinedClassifier(
model_dir=model_dir,
linear_feature_columns=wide_columns,
dnn_feature_columns=deep_columns,
dnn_hidden_units=hidden_units,
config=run_config)
def input_fn(data_file, num_epochs, shuffle, batch_size):
"""Generate an input function for the Estimator."""
assert tf.gfile.Exists(data_file), (
'%s not found. Please make sure you have either run data_download.py or '
'set both arguments --train_data and --test_data.' % data_file)
def parse_csv(value):
print('Parsing', data_file)
columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS)
features = dict(zip(_CSV_COLUMNS, columns))
labels = features.pop('income_bracket')
return features, tf.equal(labels, '>50K')
# Extract lines from input files using the Dataset API.
dataset = tf.contrib.data.TextLineDataset(data_file)
dataset = dataset.map(parse_csv, num_threads=5)
# Apply transformations to the Dataset
dataset = dataset.batch(batch_size)
dataset = dataset.repeat(num_epochs)
# Input function that is called by the Estimator
def _input_fn():
if shuffle:
# Apply shuffle transformation to re-shuffle the dataset in each call.
shuffled_dataset = dataset.shuffle(buffer_size=100000)
iterator = shuffled_dataset.make_one_shot_iterator()
else:
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
return features, labels
return _input_fn
def main(unused_argv):
# Clean up the model directory if present
shutil.rmtree(FLAGS.model_dir, ignore_errors=True)
model = build_estimator(FLAGS.model_dir, FLAGS.model_type)
# Set up input function generators for the train and test data files.
train_input_fn = input_fn(
data_file=FLAGS.train_data,
num_epochs=FLAGS.epochs_per_eval,
shuffle=True,
batch_size=FLAGS.batch_size)
eval_input_fn = input_fn(
data_file=FLAGS.test_data,
num_epochs=1,
shuffle=False,
batch_size=FLAGS.batch_size)
# Train and evaluate the model every `FLAGS.epochs_per_eval` epochs.
for n in rang
|
e(F
|
LAGS.train_epochs // FLAGS.epochs_per_eval):
model.train(input_fn=train_input_fn)
results = model.evaluate(input_fn=eval_input_fn)
# Display evaluation metrics
print('Results at epoch', (n + 1) * FLAGS.epochs_per_eval)
print('-' * 30)
for key in sorted(results):
print('%s: %s' % (key, results[key]))
if __name__ == '__main__':
tf.loggi
|
tschinz/iPython_Workspace
|
settings/profile_wpwiki/ipython_notebook_config.py
|
Python
|
gpl-2.0
| 24,145
| 0.003686
|
# Configuration file for ipython-notebook.
c = get_config()
#------------------------------------------------------------------------------
# NotebookApp configuration
#------------------------------------------------------------------------------
# NotebookApp will inherit config from: BaseIPythonApplication, Application
# The url for MathJax.js.
# c.NotebookApp.mathjax_url = ''
# Supply extra arguments that will be passed to Jinja environment.
# c.NotebookApp.jinja_environment_options = {}
# The IP address the notebook server will listen on.
# c.NotebookApp.ip = u''
# DEPRECATED use base_url
# c.NotebookApp.base_project_url = '/'
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.NotebookApp.verbose_crash = False
# The random bytes used to secure cookies. By default this is a new random
# number every time you start the Notebook. Set it to a value in a config file
# to enable logins to persist across server sessions.
#
# Note: Cookie secrets should be kept private, do not share config files with
# cookie_secret stored in plaintext (you can read the value from a file).
# c.NotebookApp.cookie_secret = ''
# The number of additional ports to try if the specified port is not available.
# c.NotebookApp.port_retries = 50
# Whether to open in a browser after starting. The specific browser used is
# platform dependent and determined by the python standard library `webbrowser`
# module, unless it is overridden using the --browser (NotebookApp.browser)
# configuration option.
# c.NotebookApp.open_browser = True
# The notebook manager class to use.
# c.NotebookApp.notebook_manager_class = 'IPython.html.services.notebooks.filenbmanager.FileNotebookManager'
# The date format used by logging formatters for %(asctime)s
# c.NotebookApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# The port the notebook server will listen on.
# c.NotebookApp.port = 8888
# Whether to overwrite existing config files when copying
# c.NotebookApp.overwrite = False
# Whether to enable MathJax for typesetting math/TeX
#
# MathJax is the javascript library IPython uses to render math/LaTeX. It is
# very large, so you may want to disable it if you have a slow internet
# connection, or for offline use of the notebook.
#
# When disabled, equations etc. will appear as their untransformed TeX source.
c.NotebookApp.enable_mathjax = True
# The full path to an SSL/TLS certificate file.
# c.NotebookApp.certfile = u''
# The base URL for the notebook server.
#
# Leading and trailing slashes can be omitted, and will automatically be added.
# c.NotebookApp.base_url = '/'
# The directory to use for notebooks and kernels.
# c.NotebookApp.notebook_dir = u'C:\\Program Files\\WinPython-64bit-2.7.6.4\\python-2.7.6.amd64'
#
# c.NotebookApp.file_to_run = ''
# The IPython profile to use.
# c.NotebookApp.profile = u'default'
# paths for Javascript extensions. By default, this is just
# IPYTHONDIR/nbextensions
# c.NotebookApp.nbextensions_path = []
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.NotebookApp.ipython_dir = u''
# Set the log level by value or name.
# c.NotebookApp.log_level = 30
# Hashed password to use for web authentication.
#
# To generate, type in a python/IPython shell:
#
# from IPython.lib import passwd; passwd()
#
# The string should be of the form type:salt:hashed-password.
c.NotebookApp.password = u'sha1:b39a13a9217a:b7796191b9cdfbb20f3b29b57ad34a32dafd6ba8'
# The Logging format template
# c.NotebookApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.NotebookApp.extra_config_file = u''
# Extra paths to search for serving static files.
#
# This allows a
|
dding javascript/css to be available from the notebook server
# machine, or overriding individual files in the IPython
# c.NotebookApp.extra_static_paths = []
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
# c.NotebookApp.trust_xheaders = False
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config f
|
iles for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.NotebookApp.copy_config_files = False
# The full path to a private key file for usage with SSL/TLS.
# c.NotebookApp.keyfile = u''
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
# c.NotebookApp.webapp_settings = {}
# Specify what command to use to invoke a web browser when opening the notebook.
# If not specified, the default browser will be determined by the `webbrowser`
# standard library module, which allows setting of the BROWSER environment
# variable to override it.
# c.NotebookApp.browser = u''
#------------------------------------------------------------------------------
# IPKernelApp configuration
#------------------------------------------------------------------------------
# IPython: an enhanced interactive Python shell.
# IPKernelApp will inherit config from: BaseIPythonApplication, Application,
# InteractiveShellApp
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.IPKernelApp.exec_PYTHONSTARTUP = True
# The importstring for the DisplayHook factory
# c.IPKernelApp.displayhook_class = 'IPython.kernel.zmq.displayhook.ZMQDisplayHook'
# Set the IP or interface on which the kernel will listen.
# c.IPKernelApp.ip = u''
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
c.IPKernelApp.pylab = 'inline'
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.IPKernelApp.verbose_crash = False
# The Kernel subclass to be used.
#
# This should allow easy re-use of the IPKernelApp entry point to configure and
# launch kernels other than IPython's own.
# c.IPKernelApp.kernel_class = 'IPython.kernel.zmq.ipkernel.Kernel'
# Run the module as a script.
# c.IPKernelApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# set the shell (ROUTER) port [default: random]
# c.IPKernelApp.shell_port = 0
# set the control (ROUTER) port [default: random]
# c.IPKernelApp.control_port = 0
# Whether to overwrite existing config files when copying
# c.IPKernelApp.overwrite = False
# Execute the given command string.
# c.IPKernelApp.code_to_run = ''
# set the stdin (ROUTER) port [default: random]
# c.IPKernelApp.stdin_port = 0
# Set the log level by value or name.
# c.IPKernelApp.log_level = 30
# lines of code to run at IPython startup.
# c.IPKernelApp.exec_lines = []
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.IPKernelApp.extra_config_file = u''
# The importstring for the OutStream factory
# c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream'
# Whether to create profile dir if it doesn't exist
# c.IPKernelApp.auto_create = False
# set the heartbeat port [default: random]
# c.IPKernelApp.hb_port = 0
#
# c.IPKernelApp.transport = 'tcp'
# redirect stdout to the null device
# c.IPKernelApp.no_stdout = False
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.IPKernelApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.IPKernelApp.extra_extension = ''
# A file to be run
# c.IPKernelApp.file_to_run = ''
# The IPython profile to use.
# c.IPKernelApp.profile = u'default'
#
# c.IPKernelApp.parent_a
|
daniellinye/HRINFG3
|
euromast/components/stateManagment.py
|
Python
|
mit
| 1,431
| 0.002096
|
import pygame as pg
from i18n import i18n
class BaseScene(object):
"""
Parent class for individual game states to inherit from.
"""
def __init__(self, helpers=None):
self.done = False
self.quit = False
self.sounds = helpers['sounds'] if helpers else None
self.assets = helpers['assets'] if helpers else None
self.vars = helpers['vars'] if helpers else None
self.game = helpers['vars']['pygame'] if helpers else None
self.i18n = i18n.Localize()
self.next_state = None
self.wait = None
self.screen_rect = pg.display.get_surface().get_rect()
self.persist = {}
self.player = None
self.players = None
self.font = pg.font.SysFont("Arial", 24)
def startup(self, persistent):
"""
Called when a state resumes being active.
Allows information to be passed between states.
persistent: a dict passed from state to state
"""
self.persist = persistent
def get_event(self, event):
"""
|
Handle a single event passed by the Game object.
"""
pass
def update(self, dt):
"""
Update the state. Called by the Game object once
per frame.
dt: time since last frame
"""
pass
def draw(self, surface):
"""
Draw everything to the scre
|
en.
"""
pass
|
julio77nz/MusicSea
|
musicsea/features/steps/register_album.py
|
Python
|
gpl-3.0
| 2,839
| 0.003875
|
from behave import *
import operator
from django.db.models import Q
use_step_matcher("parse")
@given('Exists album at group "{group_name}" by "{username}"')
def step_impl(context, group_name, username):
from django.contrib.auth.models import User
user = User.objects.get(username=username)
from musicseaapp.models import Group
group = Group.objects.get(name=group_name)
from musicseaapp.models import Album
for row in context.table:
album = Album(group=group, user=user)
for heading in row.headings:
setattr(album, heading, row[heading])
album.save()
@given('Exists album registered by "{username}"')
def step_impl(context, username):
from django.contrib.auth.models import User
user = User.objects.get(username=username)
from musicseaapp.models import Album
for row in context.table:
album = Album(user=user)
for heading in row.headings:
setattr(album, heading, row[heading])
album.save()
@when('I register album at group "{group_name}"')
def step_impl(context, group_name):
from musicseaapp.models import Group
group = Group.objects.get(name=group_name)
for row in context.table:
context.browser.visit(context.get_url('musicseaapp:albums_create', group.pk))
if context.browser.url == context.get_url('musicseaapp:albums_crea
|
te', group.pk):
form = context.browser.find_by_tag('form').first
for heading in row.headings:
context.browser.fill(heading, row[heading])
form.find_by_value('Submit').first.click()
@then('I\'m viewing the details page for album at group "{group_name}" by "{username}"')
def step_impl(context, group_name, user
|
name):
q_list = [Q((attribute, context.table.rows[0][attribute])) for attribute in context.table.headings]
from django.contrib.auth.models import User
q_list.append(Q(('user', User.objects.get(username=username))))
from musicseaapp.models import Group
q_list.append(Q(('group', Group.objects.get(name=group_name))))
from musicseaapp.models import Album
album = Album.objects.filter(reduce(operator.and_, q_list)).get()
assert context.browser.url == context.get_url(album)
@then('There are {count:n} albums')
def step_impl(context, count):
from musicseaapp.models import Album
assert count == Album.objects.count()
@when('I edit the current album')
def step_impl(context):
context.browser.find_link_by_text('edit').click()
# TODO: Test also using direct edit view link
# context.browser.visit(context.get_url('musicseaapp:album_edit', album.pk))
form = context.browser.find_by_tag('form').first
for heading in context.table.headings:
context.browser.fill(heading, context.table[0][heading])
form.find_by_value('Submit').first.click()
|
andersx/cs-proteins
|
scripts/visualize_noe.py
|
Python
|
unlicense
| 1,599
| 0.003752
|
import glob
import sys
import string
import Bio.PDB
def parse_noe(filename):
f = open(filename, 'r')
noe_pairs = []
for line in f.readlines():
res_a = int(string.split(line)[2])
res_b = int(string.split(line)[7])
noe_pair = [res_a, res_b]
if noe_pair not in noe_pairs:
noe_pairs.append(noe_pair)
f.close()
print len(noe_pairs), "CA lines"
return noe_pairs
def count_restraints(filename):
f = open(filename, 'r')
noe_pairs = []
for line in f.readlines():
# print line
res_a = int(string.split(line)[2])
res_b = int(string.split(line)[7])
name_a = string.split(line)[
|
5].rstrip(")")[:-1]
name_b = string.split(line)[10].rstrip(")")[:-1]
noe_pair = [res_a, res_b, name_a, name_b]
if [res_a, res_b, name_a, name_b] not in noe_pairs and \
[res_b, res_a, name_b, name_a] not in noe_pairs:
noe_pairs.append(noe_pair)
f.close()
print len(noe_pairs), "NOE contacts"
return len(noe_pairs)
native_pdb = sys.argv[1]
noe = False
noe_file = ""
if len(sys.argv) == 3:
noe = True
|
noe_file = sys.argv[2]
count_restraints(noe_file)
cmd.load(native_pdb, "native")
cmd.hide("all")
cmd.show("cartoon", "native")
if noe:
for pair in parse_noe(noe_file):
cmd.distance("noe", "native and resi %i and name ca" % (pair[0]),
"native and resi %i and name ca" % (pair[1]))
cmd.hide("labels", "noe")
cmd.color("grey", "native")
|
H-uru/libhsplasma
|
Python/scripts/prp-checkfiles.py
|
Python
|
gpl-3.0
| 5,456
| 0.004032
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of HSPlasma.
#
# HSPlasma is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HSPlasma is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HSPlasma. If not, see <http://www.gnu.org/licenses/>.
# This script reads a bunch of prp and age files to check whether libHSPlasma shows any error messages or other problems.
# Call "./prp-checkfiles.py --help" for a list of options.
# by Diafero
import sys, glob, os
from optparse import OptionParser
import PyHSPlasma
width = 80
kTmpFile = "tmpcomparefile.prp"
def readObjects(location):
data = []
# read all the objects
for type in rm.getTypes(location):
while len(data) <= type:
data.append({}) # fill array
for key in rm.getKeys(location, type):
if key.exists() and key.isLoaded():
data[type][key.name] = key.object.stub.getData()
return data
def checkObjectsEqual(objs1, objs2, ignorePhysics):
if len(objs1) != len(objs2):
raise Exception('Number of types changed')
for type in range(0, len(objs1)):
typeName = PyHSPlasma.plFactory.ClassName(type, rm.getVer())
# compare the objects of this type
for name in objs1[type].keys():
if not name in objs2[type].keys():
print('Type [%04X]%s, object %s missing' % (type, typeName, name))
if ignorePhysics and type == PyHSPlasma.plFactory.kGenericPhysical: continue
obj1 = objs1[type][name]
obj2 = objs2[type][name]
if len(obj1) != len(obj2):
print('Type [%04X]%s, object %s changed changed size (%d => %d)' % (type, typeName, name, len(obj1), len(obj2)))
if obj1 != obj2:
print('Type [%04X]%s, object %s changed but stay same size' % (type, typeName, name))
# check if something got added
for name in objs2[type].keys():
if not name in objs1[type].keys():
print('Type [%04X]%s, object %s added' % (type, typeName, name))
def compareFiles(file1, file2, ignorePhysics):
# read old objects
location = rm.ReadPage(file1, True).location
oldObjects = readObjects(location)
rm.UnloadPage(location)
# read new objects
location = rm.ReadPage(file2, True).location
newObjects = readObjects(location)
rm.UnloadPage(location)
# now compare the objects
checkObjectsEqual(oldObjects, newObjects, ignorePhysics)
def overprint(text):
sys.stdout.write("\r"+text+(" "*(width-len(text))))
sys.stdout.flush()
### Main app
parser = OptionParser()
parser.add_option("-v", "--verbose",
action="count", dest="verbose", default=0,
help="If set one time, warnings are printed. If set two or more times, all debug messages are printed.")
parser.add_option("-c", "--check-repack",
action="store_true", dest="checkrepack", default=False,
help="Re-pack the prp files and compare to the original file (does nothing for age files)")
parser.add_option("-k", "--keep-repacked",
action="store_true", d
|
est="keeprepack", default=False,
help="Do not remove the temporary repacked file (h
|
as no effect if --check-repack is not given)")
parser.add_option("-p", "--ignore-physics",
action="store_true", dest="ignorephysics", default=False,
help="Do not compare re-packed physicals (has no effect if --check-repack is not given)")
(options, args) = parser.parse_args()
# set verbose level
if options.verbose >= 2:
PyHSPlasma.plDebug.Init(PyHSPlasma.plDebug.kDLAll)
elif options.verbose == 1:
PyHSPlasma.plDebug.Init(PyHSPlasma.plDebug.kDLWarning)
else: # options.verbose == 0
PyHSPlasma.plDebug.Init(PyHSPlasma.plDebug.kDLError)
# read files
rm = PyHSPlasma.plResManager()
for files in args:
for file in glob.iglob(files): # do the globbing on Windows, too
overprint("Reading "+file+"...")
if file.lower().endswith(".prp"):
page = rm.ReadPage(file)
if options.checkrepack:
overprint("Writing "+file+"...")
rm.WritePage(kTmpFile, page)
rm.UnloadPage(page.location)
if options.checkrepack:
overprint("Comparing "+file+"...")
compareFiles(file, kTmpFile, options.ignorephysics)
if not options.keeprepack: os.remove(kTmpFile)
elif file.lower().endswith(".age"):
age = rm.ReadAge(file, True) # readPages=True
for pageNum in range(0, age.getNumPages()):
loc = age.getPageLoc(pageNum, rm.getVer())
page = rm.FindPage(loc)
if (page == None):
raise Exception("Unable to completely load age "+age.name+": Can't find page "+str(loc))
rm.UnloadAge(age.name)
else:
print("Error: Unknown file type!")
overprint("Done!")
sys.stdout.write("\n")
|
arielalmendral/ert
|
python/python/ert_gui/shell/libshell/shell_collection.py
|
Python
|
gpl-3.0
| 5,721
| 0.002972
|
import shlex
import textwrap
from ert_gui.shell.libshell import autoCompleteList, ShellFunction, ShellProperty, widthAsPercentageOfConsoleWidth, getTerminalSize
class ShellCollection(object):
command_help_message = "The command: '%s' supports the following keywords:"
def __init__(self, name, parent=None, description="No description available"):
super(ShellCollection, self).__init__()
self.__name = name
self.__parent = None
self.__description = description
if parent is not None:
self.setParent(parent)
parent.addChild(self)
self.__collection = {}
self.__model_tracker = {}
self.__children = []
def setParent(self, parent):
if not hasattr(parent, "shellContext"):
raise ValueError("Parent is missing function: shellContext()")
if not hasattr(parent, "lastCommandFailed"):
raise ValueError("Parent is missing function: lastCommandFailed()")
setattr(parent, "do_%s" % self.name, self.doKeywords)
setattr(parent, "complete_%s" % self.name, self.completeKeywords)
setattr(parent, "help_%s" % self.name, self.helpKeywords)
self.__parent = parent
def addChild(self, child):
self.__children.append(child)
def cleanup(self):
for child in self.__children:
child.cleanup()
def addCollection(self, collection):
"""
:type collection: ShellCollection
"""
self.__collection[collection.name] = collection
collection.setParent(self)
def addProperty(self, property):
"""
:type property: ShellProperty
"""
self.__collection[property.name] = property
property.setParent(self)
def addFunction(self, function):
"""
:type function: ShellFunction
"""
self.__collection[function.name] = function
function.setParent(self)
def addShellProperty(self, name, getter, setter=None, validator=None, completer=None, help_arguments=None, help_message=None, pretty_attribute=None, model=None):
""" @rtype: ShellProperty """
shell_property = ShellProperty(name, getter, setter, validator, completer, help_arguments, help_message, pretty_attribute)
self.addProperty(shell_property)
if model is None:
model = self
self.__model_tracker[name] = model
return shell_property
def getModelForProperty(self, property_name):
return self.__model_tracker[property_name]
def addShellFunction(self, name, function, completer=None, help_arguments=None, help_message=None, model=None):
""" @rtype: ShellFunction """
func = ShellFunction(name, function, completer, help_arguments, help_message)
self.addFunction(func)
if model is None:
model = self
self.__model_tracker[name] = model
return func
def getModelForFunction(self, name):
return self.__model_tracker[name]
@property
def name(self):
return self.__name
def shellContext(self):
""" :rtype: ert_gui.shell.libshell.ShellContext """
return self.__parent.shellContext()
def lastCommandFailed(self, message):
self.__parent.lastCommandFailed(message)
def findKeywords(self):
return self.__collection.keys()
def completeKeywords(self, text, line, begidx, endidx):
arguments = shlex.split(line)
assert arguments[0] == self.name
line = line[len(self.name) + 1:]
begidx = begidx - len(self.name) + 1
endidx = endidx - len(self.name) + 1
keyword, sep, arguments = line.partition(' ')
if begidx >= len(keyword) and keyword in self.findKeywords():
if hasattr(self, "complete_%s" % keyword):
func = getattr(self, "complete_%s" % keyword)
return func(text, line, begidx, endidx)
else:
return []
else:
return autoCompleteList(text, self.findKeywords())
def doKeywords(self, line):
keyword, sep, arguments = line.partition(' ')
if keyword.strip() == "":
self.printGuidance()
elif keyword in self.__collection:
func = ge
|
tattr(self, "do_%s" % keyword)
return func(arguments)
else:
self.lastCommandFailed("Unknown keyword: '%s'" % keyword)
self.printGuidance()
def printGuidance(self):
print(self.command_help_message % self.name)
self.shellContext().shell().columnize(self.findKeywords(), getTerminalSize()[0])
def helpKeywords(self):
print(self.command_help_message % self.name)
|
keywords = self.findKeywords()
keyword_column_width = widthAsPercentageOfConsoleWidth(20)
parameter_column_width = widthAsPercentageOfConsoleWidth(30)
help_column_width = widthAsPercentageOfConsoleWidth(48)
help_format = " %-" + str(keyword_column_width) + "s %-" + str(parameter_column_width) + "s %-" + str(help_column_width) + "s"
print(help_format % ("Keyword", "Parameter(s)", "Help"))
for keyword in keywords:
message = "No help available!"
parameters = None
if hasattr(self, "help_tuple_%s" % keyword):
func = getattr(self, "help_tuple_%s" % keyword)
_, parameters, message = func()
message = textwrap.wrap(message, help_column_width)
print(help_format % (keyword, parameters, message[0]))
if len(message) > 1:
for line in message[1:]:
print(help_format % ("", "", line))
|
ihidalgo/uip-prog3
|
Parciales/practicas/kivy-designer-master/designer/buildozer_spec_editor.py
|
Python
|
mit
| 8,139
| 0.000614
|
import os
import json
import tempfile
import webbrowser
import designer
from kivy.app import App
|
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import ObjectProperty, ConfigParser, StringProperty
from kivy.uix.settings import Settings, InterfaceWithSidebar, MenuSidebar,\
ContentPanel, SettingsPanel
from designer.uix.settings import SettingList, SettingDict
from pygments.lexers.configs import IniLexer
class SpecCon
|
tentPanel(ContentPanel):
def on_current_uid(self, *args):
result = super(SpecContentPanel, self).on_current_uid(*args)
if isinstance(self.current_panel, SpecCodeInput):
self.current_panel.load_spec()
return result
class SpecMenuSidebar(MenuSidebar):
def on_selected_uid(self, *args):
'''(internal) unselects any currently selected menu buttons, unless
they represent the current panel.
'''
for button in self.buttons_layout.children:
button.selected = button.uid == self.selected_uid
class SpecEditorInterface(InterfaceWithSidebar):
def open_buildozer_docs(self, *args):
webbrowser.open('http://buildozer.readthedocs.org')
class SpecSettingsPanel(SettingsPanel):
def get_value(self, section, key):
'''Return the value of the section/key from the :attr:`config`
ConfigParser instance. This function is used by :class:`SettingItem` to
get the value for a given section/key.
If you don't want to use a ConfigParser instance, you might want to
override this function.
'''
config = self.config
if not config:
return
if config.has_option(section, key):
return config.get(section, key)
else:
return ''
def set_value(self, section, key, value):
# some keys are not enabled by default on .spec. If the value is empty
# and this key is not on .spec, so we don't need to save it
if not value and not self.config.has_option(section, key):
return False
super(SpecSettingsPanel, self).set_value(section, key, value)
class SpecCodeInput(BoxLayout):
text_input = ObjectProperty(None)
'''CodeInput with buildozer.spec text.
Instance of :class:`kivy.config.ObjectProperty` and defaults to None
'''
lbl_error = ObjectProperty(None)
'''(internal) Label to display errors.
Instance of :class:`kivy.config.ObjectProperty` and defaults to None
'''
spec_path = StringProperty('')
'''buildozer.spec path.
Instance of :class:`kivy.config.StringProperty` and defaults to ''
'''
__events__ = ('on_change', )
def __init__(self, **kwargs):
super(SpecCodeInput, self).__init__(**kwargs)
self.text_input.lexer = IniLexer()
def load_spec(self, *args):
'''Read the buildozer.spec and update the CodeInput
'''
self.lbl_error.color = [0, 0, 0, 0]
self.text_input.text = open(self.spec_path, 'r').read()
def _save_spec(self, *args):
'''Try to save the spec file. If there is a error, show the label.
If not, save the file and dispatch on_change
'''
designer = App.get_running_app().root
designer.project_watcher.stop()
f = tempfile.NamedTemporaryFile()
f.write(self.text_input.text)
try:
cfg = ConfigParser()
cfg.read(f.name)
except Exception:
self.lbl_error.color = [1, 0, 0, 1]
else:
spec = open(self.spec_path, 'w')
spec.write(self.text_input.text)
spec.close()
self.dispatch('on_change')
f.close()
designer.project_watcher.start_watching(
designer.project_loader.proj_dir)
def on_change(self, *args):
'''Event handler to dispatch a .spec modification
'''
pass
class BuildozerSpecEditor(Settings):
'''Subclass of :class:`kivy.uix.settings.Settings` responsible for
the UI editor of buildozer spec
'''
config_parser = ObjectProperty(None)
'''Config Parser for this class. Instance
of :class:`kivy.config.ConfigParser`
'''
def __init__(self, **kwargs):
super(BuildozerSpecEditor, self).__init__(**kwargs)
self.register_type('dict', SettingDict)
self.register_type('list', SettingList)
self.SPEC_PATH = ''
self.proj_dir = ''
self.config_parser = ConfigParser.get_configparser("buildozer_spec")
if self.config_parser is None:
self.config_parser = ConfigParser(name="buildozer_spec")
def load_settings(self, proj_dir):
'''This function loads project settings
'''
self.interface.menu.buttons_layout.clear_widgets()
self.proj_dir = proj_dir
self.SPEC_PATH = os.path.join(proj_dir, 'buildozer.spec')
_dir = os.path.dirname(designer.__file__)
_dir = os.path.split(_dir)[0]
self.config_parser.read(self.SPEC_PATH)
self.add_json_panel('Application', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_app.json'))
self.add_json_panel('Android', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_android.json'))
self.add_json_panel('iOS', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_ios.json'))
self.add_json_panel('Buildozer', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_buildozer.json'))
raw_spec = SpecCodeInput(spec_path=self.SPEC_PATH)
raw_spec.bind(on_change=self.on_spec_changed)
self.interface.add_panel(raw_spec, "buildozer.spec", raw_spec.uid)
menu = self.interface.menu
menu.selected_uid = menu.buttons_layout.children[-1].uid
def on_spec_changed(self, *args):
self.load_settings(self.proj_dir)
# force to show the last panel
menu = self.interface.menu
menu.selected_uid = menu.buttons_layout.children[0].uid
def create_json_panel(self, title, config, filename=None, data=None):
'''Override the original method to use the custom SpecSettingsPanel
'''
if filename is None and data is None:
raise Exception('You must specify either the filename or data')
if filename is not None:
with open(filename, 'r') as fd:
data = json.loads(fd.read())
else:
data = json.loads(data)
if type(data) != list:
raise ValueError('The first element must be a list')
panel = SpecSettingsPanel(title=title, settings=self, config=config)
for setting in data:
# determine the type and the class to use
if not 'type' in setting:
raise ValueError('One setting are missing the "type" element')
ttype = setting['type']
cls = self._types.get(ttype)
if cls is None:
raise ValueError(
'No class registered to handle the <%s> type' %
setting['type'])
# create a instance of the class, without the type attribute
del setting['type']
str_settings = {}
for key, item in setting.items():
str_settings[str(key)] = item
instance = cls(panel=panel, **str_settings)
# instance created, add to the panel
panel.add_widget(instance)
return panel
def on_config_change(self, *args):
designer = App.get_running_app().root
designer.project_watcher.stop()
self.config_parser.write()
super(BuildozerSpecEditor, self).on_config_change(*args)
designer.project_watcher.start_watching(
designer.project_loader.proj_dir)
|
MuhammadAlkarouri/hug
|
benchmarks/http/pyramid_test.py
|
Python
|
mit
| 281
| 0
|
from pyramid.view import view_config
fro
|
m pyramid.config import Configurator
@view_config(route_name='text', renderer='string')
def text(request):
return 'Hello, World!'
config = Configurator()
config.add_route('text', '/text')
config.scan()
app = config.make_wsgi_
|
app()
|
samrushing/caesure
|
docs/match.py
|
Python
|
bsd-2-clause
| 11,932
| 0.011314
|
# -*- Python -*-
# XXX originally from the python implementation of irken.
# See "The Implementation of Functional Programming Languages",
# Chapter 5: "Efficient Compilation of Pattern-Matching".
# http://research.microsoft.com/en-us/um/people/simonpj/papers/slpj-book-1987/
#
# Thanks for the hint, OCaml people! (Xavier Leroy?) They were kind enough to put this reference in
# their source code (ocaml/bytecomp/matching.ml), otherwise I may have never found out about this
# book. And thanks to Simon Peyton-Jones for putting his book online.
is_a = isinstance
from pdb import set_trace as trace
from pprint import pprint as pp
class variable:
# creates a binding
def __init__ (self, name):
self.name = name
def __repr__ (self):
return '<%s>' % (self.name,)
class literal:
# matches a literal
def __init__ (self, value):
self.value = value
if is_a (value, VAR):
import pdb; pdb.set_trace()
def __repr__ (self):
return 'L%s' % (repr(self.value))
def __cmp__ (self, other):
if is_a (other, literal):
v = self.value
o = other.value
#return cmp ((v.kind,v.value), (o.kind,o.value))
return cmp (v, o)
else:
return -1
class constructor:
# matches a constructor
def __init__ (self, name, subs):
self.datatype, self.alt =
|
name.split (':')
self.subs = subs
def __len__ (self):
# arity of this constructor
ret
|
urn len (self.subs)
def __repr__ (self):
return '(%s/%s %s)' % (self.datatype, self.alt, ' '.join ([repr(x) for x in self.subs]))
# bad match
class MatchError (Exception):
pass
class IncompleteMatch (Exception):
pass
FAIL = ['%%fail']
ERROR = ['%%match-error']
# The next step in this code is to try to optimize the generated tree, which should be a matter of
# using heuristics to pick which pattern out of several to begin with. This code always starts
# with the left-most pattern, and descends recursively; see first_pats_are() below.
class compiler:
def __init__ (self, context):
self.context = context
self.gensym_counter = 0
def gensym (self):
c = self.gensym_counter
self.gensym_counter += 1
return 'm%d' % (c,)
def compile (self, rules, vars):
# how many pattern args?
nrules = len (rules)
pats, result = rules[0]
npats = len (pats)
#vars = [ self.gensym() for x in range (npats) ]
for pats, result in rules[1:]:
# must have the same number of patterns in each
assert (len(pats) == npats)
rules0 = []
for pats, code in rules:
kinds = [self.kind (x) for x in pats]
rules0.append ((kinds, code))
return vars, self.match (vars, rules0, ERROR)
def kind (self, p):
if is_a (p, list) or is_a (p, tuple):
if is_a (p, list):
what = 'list'
else:
what = 'tuple'
if len(p) == 0:
# () -> (list:nil)
return constructor ('%s:nil' % what, [])
elif is_a (p[0], list) and p[0][0] == 'colon' and len(p[0]) == 3:
# a constructor
return constructor ('%s:%s' % (p[0][1], p[0][2]), [self.kind (x) for x in p[1:]])
else:
# (a b . c) => (list:cons ...)
# XXX create a metavariable for this dot
if p[0] == '.':
# cdr
return self.kind (p[1])
else:
return constructor ('%s:cons' % what, [self.kind (p[0]), self.kind (p[1:])])
elif is_a (p, VAR):
return variable (p)
else:
return literal (p)
def first_pats_are (self, rules, kind):
# are the first patterns in each rule of <kind>?
for pats, code in rules:
if not is_a (pats[0], kind):
return False
else:
return True
def match (self, vars, rules, default):
#print '-------- match -------------'
#pp ((vars, rules, default))
# the empty rule
if not vars:
if len(rules):
empty_pat, code = rules[0]
return code
else:
return default
# if every rule begins with a variable
# apply if every rule begins with a variable
if self.first_pats_are (rules, variable):
return self.variable_rule (vars, rules, default)
# if every rule is a constructor (i.e., no variables)
if self.first_pats_are (rules, constructor):
return self.constructor_rule (vars, rules, default)
# if every rule is a constant
if self.first_pats_are (rules, literal):
return self.constant_rule (vars, rules, default)
# we have a mixture of variables and constructors..
return self.mixture_rule (vars, rules, default)
def subst (self, var0, var1, code):
# this will record a subst to be applied during node building (nodes.py)
if var1 == '_':
# unless it's a wildcard, no need.
return code
elif is_a (code, list) and len(code) and code[0] == 'let_subst':
return ['let_subst', code[1] + [(var1, var0)], code[2]]
else:
return ['let_subst', [(var1, var0)], code]
def variable_rule (self, vars, rules, default):
# if every rule begins with a variable, we can remove that column
# from the set of patterns and substitute the var within each body.
var = vars[0]
vars = vars[1:]
rules0 = []
for pats, code in rules:
rules0.append ((pats[1:], self.subst (var, pats[0].name, code)))
return self.match (vars, rules0, default)
def fatbar (self, e1, e2):
if e1 == FAIL:
return e2
elif e2 == FAIL:
return e1
else:
return ['%%fatbar', e1, e2]
def get_arity (self, rules):
# given a set of polymorphic variant rules:
# 1) compute the constructor arity
# 2) verify that they're all the same
arity = len (rules[0][0][0])
for pats, code in rules[1:]:
if len(pats[0]) != arity:
raise MatchError ("arity mismatch in polymorphic variant pattern", rules)
return arity
def constructor_rule (self, vars, rules, default):
# Note: this rule is used for normal constructors *and* polymorphic variants.
# ok, group them by constructor (retaining the order within each constructor alt).
alts = {}
datatype = rules[0][0][0].datatype
if datatype != 'None':
dt = self.context.datatypes[datatype]
else:
# polymorphic variant
dt = None
for pats, code in rules:
alt = pats[0].alt
# XXX raise this as a real syntax error...
assert (pats[0].datatype == datatype)
if not alts.has_key (alt):
alts[alt] = [(pats, code)]
else:
alts[alt].append ((pats, code))
cases = []
if default != ERROR:
default0 = FAIL
else:
default0 = default
for alt, rules0 in alts.iteritems():
# new variables to stand for the fields of the constructor
if dt:
arity = dt.arity (alt)
else:
arity = self.get_arity (rules0)
vars0 = [self.gensym() for x in range (arity)]
wild = [True for x in vars0]
rules1 = []
for pats, code in rules0:
rules1.append ((pats[0].subs + pats[1:], code))
if len (pats[0].subs) != arity:
raise MatchError ("arity mismatch in variant pattern", rules0)
for i in range (len (pats[0].subs)):
sub = pats[0].subs[i]
if not (is_a (sub, variable) and sub.name == '_'):
wild[i] = False
|
Captain-Coder/tribler
|
Tribler/Core/Modules/restapi/events_endpoint.py
|
Python
|
lgpl-3.0
| 13,739
| 0.005968
|
from twisted.web import server, resource
from Tribler.Core.Modules.restapi.util import convert_db_channel_to_json, convert_search_torrent_to_json, \
fix_unicode_dict
from Tribler.Core.simpledefs import (NTFY_CHANNELCAST, SIGNAL_CHANNEL, SIGNAL_ON_SEARCH_RESULTS, SIGNAL_TORRENT,
NTFY_UPGRADER, NTFY_STARTED, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_INSERT,
NTFY_NEW_VERSION, NTFY_FINISHED, NTFY_TRIBLER, NTFY_UPGRADER_TICK, NTFY_CHANNEL,
NTFY_DISCOVERED, NTFY_TORRENT, NTFY_ERROR, NTFY_DELETE, NTFY_MARKET_ON_ASK,
NTFY_UPDATE, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_TRANSACTION_COMPLETE,
NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID_TIMEOUT,
NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT,
SIGNAL_RESOURCE_CHECK, SIGNAL_LOW_SPACE, NTFY_CREDIT_MINING, STATE_SHUTDOWN)
import Tribler.Core.Utilities.json_util as json
from Tribler.Core.version import version_id
class EventsEndpoint(resource.Resource):
"""
Important events in Tribler are returned over the events endpoint. This connection is held open. Each event is
pushed over this endpoint in the form of a JSON dictionary. Each JSON dictionary contains a type field that
indicates the type of the event. Individual events are separated by a newline character (\n).
Currently, the following events are implemented:
- events_start: An indication that the event socket is opened and that the server is ready to push events. This
includes information about whether Tribler has started already or not and the version of Tribler used.
- search_result_channel: This event dictionary contains a search result with a channel that has been found.
- search_result_torrent: This event dictionary contains a search result with a torrent that has been found.
- upgrader_started: An indication that the Tribler upgrader has started.
- upgrader_finished: An indication that the Tribler upgrader has finished.
- upgrader_tick: An indication that the state of the upgrader has changed. The dictionary contains a human-readable
string with the new state.
- watch_folder_corrupt_torrent: This event is emitted when a corrupt .torrent file in the watch folder is found.
The dictionary contains the name of the corrupt torrent file.
- new_version_available: This event is emitted when a new version of Tribler is available.
- tribler_started: An indica
|
tor that Tribler has completed the startup procedure and is ready to use.
- channel_discovered: An indicator that Tribler has discovered a new channel. The event contains the name,
description and dispersy community id of the discovered channel.
- torrent_discovered: An indicator that Tribler has discovered a new torrent. The event contains the infohash, name,
list of trackers, list of files with name and size, and the disper
|
sy community id of the discovered torrent.
- torrent_removed_from_channel: An indicator that a torrent has been removed from a channel. The event contains
the infohash and the dispersy id of the channel which contained the removed torrent.
- torrent_finished: A specific torrent has finished downloading. The event includes the infohash and name of the
torrent that has finished downloading.
- torrent_error: An error has occurred during the download process of a specific torrent. The event includes the
infohash and a readable string of the error message.
- tribler_exception: An exception has occurred in Tribler. The event includes a readable string of the error.
- market_ask: Tribler learned about a new ask in the market. The event includes information about the ask.
- market_bid: Tribler learned about a new bid in the market. The event includes information about the bid.
- market_ask_timeout: An ask has expired. The event includes information about the ask.
- market_bid_timeout: An bid has expired. The event includes information about the bid.
- market_transaction_complete: A transaction has been completed in the market. The event contains the transaction
that was completed.
- market_payment_received: We received a payment in the market. The events contains the payment information.
- market_payment_sent: We sent a payment in the market. The events contains the payment information.
- market_iom_input_required: The Internet-of-Money modules requires user input (like a password or challenge
response).
"""
def __init__(self, session):
resource.Resource.__init__(self)
self.session = session
self.events_requests = []
self.infohashes_sent = set()
self.channel_cids_sent = set()
self.session.add_observer(self.on_search_results_channels, SIGNAL_CHANNEL, [SIGNAL_ON_SEARCH_RESULTS])
self.session.add_observer(self.on_search_results_torrents, SIGNAL_TORRENT, [SIGNAL_ON_SEARCH_RESULTS])
self.session.add_observer(self.on_upgrader_started, NTFY_UPGRADER, [NTFY_STARTED])
self.session.add_observer(self.on_upgrader_finished, NTFY_UPGRADER, [NTFY_FINISHED])
self.session.add_observer(self.on_upgrader_tick, NTFY_UPGRADER_TICK, [NTFY_STARTED])
self.session.add_observer(self.on_watch_folder_corrupt_torrent,
NTFY_WATCH_FOLDER_CORRUPT_TORRENT, [NTFY_INSERT])
self.session.add_observer(self.on_new_version_available, NTFY_NEW_VERSION, [NTFY_INSERT])
self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED])
self.session.add_observer(self.on_channel_discovered, NTFY_CHANNEL, [NTFY_DISCOVERED])
self.session.add_observer(self.on_torrent_discovered, NTFY_TORRENT, [NTFY_DISCOVERED])
self.session.add_observer(self.on_torrent_removed_from_channel, NTFY_TORRENT, [NTFY_DELETE])
self.session.add_observer(self.on_torrent_finished, NTFY_TORRENT, [NTFY_FINISHED])
self.session.add_observer(self.on_torrent_error, NTFY_TORRENT, [NTFY_ERROR])
self.session.add_observer(self.on_market_ask, NTFY_MARKET_ON_ASK, [NTFY_UPDATE])
self.session.add_observer(self.on_market_bid, NTFY_MARKET_ON_BID, [NTFY_UPDATE])
self.session.add_observer(self.on_market_ask_timeout, NTFY_MARKET_ON_ASK_TIMEOUT, [NTFY_UPDATE])
self.session.add_observer(self.on_market_bid_timeout, NTFY_MARKET_ON_BID_TIMEOUT, [NTFY_UPDATE])
self.session.add_observer(self.on_market_transaction_complete,
NTFY_MARKET_ON_TRANSACTION_COMPLETE, [NTFY_UPDATE])
self.session.add_observer(self.on_market_payment_received, NTFY_MARKET_ON_PAYMENT_RECEIVED, [NTFY_UPDATE])
self.session.add_observer(self.on_market_payment_sent, NTFY_MARKET_ON_PAYMENT_SENT, [NTFY_UPDATE])
self.session.add_observer(self.on_resource_event, SIGNAL_RESOURCE_CHECK, [SIGNAL_LOW_SPACE])
self.session.add_observer(self.on_credit_minig_error, NTFY_CREDIT_MINING, [NTFY_ERROR])
self.session.add_observer(self.on_shutdown, NTFY_TRIBLER, [STATE_SHUTDOWN])
def write_data(self, message):
"""
Write data over the event socket if it's open.
"""
try:
message_str = json.dumps(message)
except UnicodeDecodeError:
# The message contains invalid characters; fix them
message_str = json.dumps(fix_unicode_dict(message))
if len(self.events_requests) == 0:
return
else:
[request.write(message_str + '\n') for request in self.events_requests]
def start_new_query(self):
self.infohashes_sent = set()
self.channel_cids_sent = set()
def on_search_results_channels(self, subject, changetype, objectID, results):
"""
Returns the channel search results over the events endpoint.
"""
query = ' '.join(results['keywords'])
for channel in results['result_list']:
channel_json = convert_db
|
grow/pygrow
|
grow/templates/tests_test.py
|
Python
|
mit
| 2,013
| 0
|
"""Tests for the template tests."""
import unittest
from grow.templates import tests
class BuiltinTestsTestCase(unittest.TestCase):
def test_subset_filter(self):
"""Provided value is a subset when has all the required values."""
value = ['banana', 'apple']
test_value = ['banana']
self.assertTrue(tests.is_subset_of(value, test_value))
def test_subset_filter_equal(self):
"""Provided value is a subset when equal."""
value = ['banana']
test_value = ['banana']
self.assertTrue(tests.is_subset_of(value, test_value))
def test_subset_filter_not(self):
"""Provided value is not a subset when missing values."""
value = ['banana']
test_value = ['banana', 'apple']
self.assertFalse(tests.is_subset_of(value, test_value))
def test_subset_filter_none(self):
"""Provided value is a subset when both are blank."""
value = []
test_value = []
self.assertTrue(tests.is_subset_of(value, test_value))
def test_superset_filter(self):
"""Provided value is a superset when missing some of the values."""
value = ['banana']
test_value = ['banana', 'apple']
self.assertTrue(tests.is_superset_of(value, test_value))
def test_supers
|
et_filter_equal(self):
"""Provided value is a superset when equal."""
value = ['banana']
test_value = ['banana']
self.assertTrue(tests.is_superset_of(value, test_value))
def test_superset_filter_not(self):
"""Provided value is not a superset when has extra values."""
value = ['banana', 'apple']
test_value = [
|
'banana']
self.assertFalse(tests.is_superset_of(value, test_value))
def test_superset_filter_none(self):
"""Provided value is a superset when both are blank."""
value = []
test_value = []
self.assertTrue(tests.is_superset_of(value, test_value))
if __name__ == '__main__':
unittest.main()
|
anurag03/integration_tests
|
cfme/tests/containers/test_cockpit.py
|
Python
|
gpl-2.0
| 2,473
| 0.002426
|
import pytest
from cfme.containers.provider import ContainersProvider
from cfme.markers.env_markers.provider import providers
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.providers import ProviderFilter
from cfme.utils.wait import wait_for
pytestmark = [
pytest.mark.usefixtures('setup_provider'),
pytest.mark.tier(1),
pytest.mark.provider(gen_func=providers,
filters=[ProviderFilter(classes=[ContainersProvider],
required_flags=['cockpit'])],
scope='function')]
@pytest.mark.uncollectif(lambda appliance: appliance.version < "5.9",
|
reason='Cockpit Feature is only available in 5.9 and greater')
@pytest.mark.parametrize('cockpit', [False, True], ids=['disabled', 'enabled'])
def test_cockpit_button_access(appliance, provider, cockpit, request):
""" The test verifies the existence of cockpit "Web Console"
button on each node, click the button if enabled, verify no errors are di
|
splayed.
"""
request.addfinalizer(lambda: appliance.server.settings.disable_server_roles('cockpit_ws'))
if cockpit:
appliance.server.settings.enable_server_roles('cockpit_ws')
wait_for(lambda: appliance.server_roles['cockpit_ws'] is True, delay=10, timeout=300)
elif not cockpit:
appliance.server.settings.disable_server_roles('cockpit_ws')
wait_for(lambda: appliance.server_roles['cockpit_ws'] is False, delay=10, timeout=300)
else:
pytest.skip("Cockpit should be either enabled or disabled.")
collection = appliance.collections.container_nodes
nodes = collection.all()
for node in nodes:
view = (navigate_to(node, 'Details') if node else
pytest.skip("Could not determine node of {}".format(provider.name)))
if cockpit:
appliance.server.browser.refresh()
assert not view.toolbar.web_console.disabled
view.toolbar.web_console.click()
webconsole = node.vm_console
webconsole.switch_to_console()
assert not view.is_displayed
assert node.name in appliance.server.browser.url
webconsole.close_console_window()
assert view.is_displayed
view.flash.assert_no_error()
else:
appliance.server.browser.refresh()
assert view.toolbar.web_console.disabled
|
michal-stuglik/django-blastplus
|
manage.py
|
Python
|
mit
| 283
| 0
|
#!/
|
usr/bin/env python
import os
import sys
if __name__ == "__main__":
# testing environment
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "blastplus.test_settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| |
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/numpy/distutils/exec_command.py
|
Python
|
mit
| 8,663
| 0.003925
|
"""
exec_command
Implements exec_command function that is (almost) equivalent to
commands.getstatusoutput function but on NT, DOS systems the
returned status is actually correct (though, the returned status
values may be different by a factor). In addition, exec_command
takes keyword arguments for (re-)defining environment variables.
Provides functions:
exec_command --- execute command in a specified directory and
in the modified environment.
find_executable --- locate a command using info from environment
variable PATH. Equivalent to posix `which`
command.
Author: Pearu Peterson <pearu@cens.ioc.ee>
Created: 11 January 2003
Requires: Python 2.x
Successfully tested on:
======== ============ =================================================
os.name sys.platform comments
======== ============ =================================================
posix linux2 Debian (sid) Linux, Python 2.1.3+, 2.2.3+, 2.3.3
PyCrust 0.9.3, Idle 1.0.2
posix linux2 Red Hat 9 Linux, Python 2.1.3, 2.2.2, 2.3.2
posix sunos5 SunOS 5.9, Python 2.2, 2.3.2
posix darwin Darwin 7.2.0, Python 2.3
nt win32 Windows Me
Python 2.3(EE), Idle 1.0, PyCrust 0.7.2
Python 2.1.1 Idle 0.8
nt win32 Windows 98, Python 2.1.1. Idle 0.8
nt win32 Cygwin 98-4.10, Python 2.1.1(MSC) - echo tests
fail i.e. redefining environment variables may
not work. FIXED: don't use cygwin echo!
Comment: also `cmd /c echo` will not work
but redefining environment variables do work.
posix cygwin Cygwin 98-4.10, Python 2.3.3(cygming special)
nt win32 Windows XP, Python 2.3.3
======== ============ =================================================
Known bugs:
* Tests, that send messages to stderr, fail when executed from MSYS prompt
because the messages are lost at some point.
"""
from __future__ import division, absolute_import, print_function
__all__ = ['exec_command', 'find_executable']
import os
import sys
import subprocess
from numpy.distutils.misc_util import is_sequence, make_temp_file
from numpy.distutils import log
def temp_file_name():
fo, name = make_temp_file()
fo.close()
return name
def get_pythonexe():
pythonexe = sys.executable
if os.name in ['nt', 'dos']:
fdir, fn = os.path.split(pythonexe)
fn = fn.upper().replace('PYTHONW', 'PYTHON')
pythonexe = os.path.join(fdir, fn)
assert os.path.isfile(pythonexe), '%r is not a file' % (pythonexe,)
return pythonexe
def find_executable(exe, path=None, _cache={}):
"""Return full path of a executable or None.
Symbolic links are not followed.
"""
key = exe, path
try:
return _cache[key]
except KeyError:
pass
log.debug('find_executable(%r)' % exe)
orig_exe = exe
if path is None:
path = os.environ.get('PATH', os.defpath)
if os.name=='posix':
realpath = os.path.realpath
else:
realpath = lambda a:a
if exe.startswith('"'):
exe = exe[1:-1]
suffixes = ['']
if os.name in ['nt', 'dos', 'os2']:
fn, ext = os.path.splitext(exe)
extra_suffixes = ['.exe', '.com', '.bat']
if ext.lower() not in extra_suffixes:
suffixes = extra_suffixes
if os.path.isabs(exe):
paths = ['']
else:
paths = [ os.path.abspath(p) for p in path.split(os.pathsep) ]
for path in paths:
fn = os.path.join(path, exe)
for s in suffixes:
f_ext = fn+s
if not os.path.islink(f_ext):
f_ext = realpath(f_ext)
if os.path.isfile(f_ext) and os.access(f_ext, os.X_OK):
log.info('Found executable %s' % f_ext)
_cache[key] = f_ext
return f_ext
log.warn('Could not locate executable %s' % orig_exe)
return None
############################################################
def _preserve_environment( names ):
log.debug('_preserve_environment(%r)' % (names))
env = {}
for name in names:
env[name] = os.environ.get(name)
return env
def _update_environment( **env ):
log.debug('_update_environment(...)')
for name, value in env.items():
os.environ[name] = value or ''
def _supports_fileno(stream):
"""
Returns True if 'stream' supports the file descriptor and allows fileno().
"""
if hasattr(stream, 'fileno'):
try:
stream.fileno()
return True
except IOError:
return False
else:
return False
def exec_command(command, execute_in='', use_shell=None, use_tee=None,
_with_python = 1, **env ):
"""
Return (status,output) of executed command.
Parameters
----------
command : str
A
|
concatenated string of executable and arguments.
execute_in : str
Before running command ``cd execute_in`` and after ``cd -``.
use_shell : {bool, None}, optional
If True, execute ``sh -c command``. Default None (True)
use_tee : {bool, None}, o
|
ptional
If True use tee. Default None (True)
Returns
-------
res : str
Both stdout and stderr messages.
Notes
-----
On NT, DOS systems the returned status is correct for external commands.
Wild cards will not work for non-posix systems or when use_shell=0.
"""
log.debug('exec_command(%r,%s)' % (command,\
','.join(['%s=%r'%kv for kv in env.items()])))
if use_tee is None:
use_tee = os.name=='posix'
if use_shell is None:
use_shell = os.name=='posix'
execute_in = os.path.abspath(execute_in)
oldcwd = os.path.abspath(os.getcwd())
if __name__[-12:] == 'exec_command':
exec_dir = os.path.dirname(os.path.abspath(__file__))
elif os.path.isfile('exec_command.py'):
exec_dir = os.path.abspath('.')
else:
exec_dir = os.path.abspath(sys.argv[0])
if os.path.isfile(exec_dir):
exec_dir = os.path.dirname(exec_dir)
if oldcwd!=execute_in:
os.chdir(execute_in)
log.debug('New cwd: %s' % execute_in)
else:
log.debug('Retaining cwd: %s' % oldcwd)
oldenv = _preserve_environment( list(env.keys()) )
_update_environment( **env )
try:
st = _exec_command(command,
use_shell=use_shell,
use_tee=use_tee,
**env)
finally:
if oldcwd!=execute_in:
os.chdir(oldcwd)
log.debug('Restored cwd to %s' % oldcwd)
_update_environment(**oldenv)
return st
def _exec_command(command, use_shell=None, use_tee = None, **env):
"""
Internal workhorse for exec_command().
"""
if use_shell is None:
use_shell = os.name=='posix'
if use_tee is None:
use_tee = os.name=='posix'
if os.name == 'posix' and use_shell:
# On POSIX, subprocess always uses /bin/sh, override
sh = os.environ.get('SHELL', '/bin/sh')
if is_sequence(command):
command = [sh, '-c', ' '.join(command)]
else:
command = [sh, '-c', command]
use_shell = False
elif os.name == 'nt' and is_sequence(command):
# On Windows, join the string for CreateProcess() ourselves as
# subprocess does it a bit differently
command = ' '.join(_quote_arg(arg) for arg in command)
# Inherit environment by default
env = env or None
try:
proc = subprocess.Popen(command, shell=use_shell, env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True)
except EnvironmentError:
# Return 127, as os.spawn*() and /bin/sh do
return 127, ''
text, err = proc.communicate()
# Another historical oddity
if text[-1:] == '\n':
|
eCrowdMedia/ebooklib
|
docs/conf.py
|
Python
|
agpl-3.0
| 9,313
| 0.007516
|
# -*- coding: utf-8 -*-
#
# EbookLib documentation build configuration file, created by
# sphinx-quickstart on Fri Apr 25 11:49:49 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'EbookLib'
copyright = u'2014, Aleksandar Erkalovic'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.16'
# The full version, including alpha/beta/rc tags.
release = '0.16'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each
|
theme, see the
# documentation.
#html_theme_opt
|
ions = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'EbookLibdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'EbookLib.tex', u'EbookLib Documentation',
u'Aleksandar Erkalovic', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ebooklib', u'EbookLib Documentation',
[u'Aleksandar Erkalovic'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'EbookLib', u'EbookLib Documentation',
u'Aleksandar Erkalovic', 'EbookLib', 'Python library for EPUB and Kindle formats.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'EbookLib'
epub_author = u'Aleksandar Erkalovic'
epub_publisher = u'Aleksandar Erkalovic'
epub_copyright = u'2014, Aleksandar Erkalovic'
|
arsfeld/conduit
|
test/python-tests/TestSyncGConfFolder.py
|
Python
|
gpl-2.0
| 1,001
| 0.022977
|
#common sets up the conduit environment
from common import *
#setup test
test =
|
SimpleSyncTest()
#Setup the key to sync
gconf = test.get_dataprovider("GConfTwoWay")
gconf.module.whitelist = ['/apps/metacity/general/num_workspaces']
folder = test.get_dataprovider("TestFolderTwoWay")
test.prepare(gconf, folder)
test.set_two_way_policy({"conflict":"ask","deleted":"ask"})
test.set_two_way_sync(True)
a = test.get_source_count()
b = test.get_sink_count()
ok("Got items to sync (%s,%s)" % (a,b), a == 1 and b == 0)
for i in (1,2,3,4):
if i > 1:
|
#Now modify the file
f = folder.module.get(
folder.module.get_all()[0]
)
f._set_file_mtime(datetime.datetime(2008,1,i))
a,b = test.sync()
aborted,errored,conflicted = test.get_sync_result()
ok("Sync #%s: Completed without conflicts" % i, aborted == False and errored == False and conflicted == False)
ok("Sync #%s: All items (%s,%s)" % (i,a,b), a == b and a == 1)
finished()
|
bendoran/vogen
|
src/vogen/as3parser.py
|
Python
|
mit
| 2,621
| 0.021366
|
import re
from vogen.voparser import VoParser, VoVariable
class AS3VoParser( VoParser ):
def parse( self, input_string, verbose ):
self.input_string = input_string
self.verbose = verbose
class_names = re.findall(r"class (\w+)", self.input_string );
#Find the class_name
if len(class_names) > 0 :
if verbose :
print "Found Class"+class_names[0]
class_name = class_names[0]
else:
print "Couldn't find class_name in Source File"
return False
#Find the properties
variables = list()
for variable in re.findall(r"private var ([\w\[\]]+) ?: ?(\w+)", self.input_string ):
vo_variable = VoVariable( variable[0], variable[1] )
variables.append( vo_variable )
if self.verbose :
print "Found Property: " + vo_variable.__str__()
if len( variables ) <= 0 :
print "Couldn't find any variables in Source File, can't build a vo"
return False
return self.build_class( variables, class_name)
def build_class(self, variables, class_name ):
return_text = self.input_string
#Rename all existing variables
for variable in variables :
return_text = return_text.replace( variable.variable_name, "_"+variable.variable_name )
#Strip the last bracket
return_text = return_text.rstrip('}')
return_text = return_text.rstrip('}\n')
#Print the Constructor
return_text += "\n\t\tpublic function " + class_name + "( "
for variable in variables :
return_text += "\n\t\t\t" + variable.variable_name + " : " + variable.variable_type + " ,"
return_text = return_text.rstrip(', ')
r
|
eturn_text += "\n\t\t){"
for variable in variables :
return_text += "\n\t\t\tthis._" + variable.variable_name + " = " + variable.variable_name + ";"
return_text += "\n\t\t}"
#A bit of White Space
return_tex
|
t += "\n"
#Print the Getters
for variable in variables :
return_text += "\n\t\tpublic function get " + variable.variable_name + "() : " + variable.variable_type + "{"
return_text += "\n\t\t\treturn this._" + variable.variable_name + ";"
return_text += "\n\t\t}"
return_text += "\n"
return_text += "\n\t}"
return_text += "\n}"
return return_text
|
dextervip/rpv
|
GerenDisponibilidade/professor/tests.py
|
Python
|
gpl-3.0
| 810
| 0.007407
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from professor.models
|
import Professor, DiaSemana, DisponibilidadeAula
from professor.models import Professor, DiaSemana, DisponibilidadeAula
class SimpleTest(TestCase):
def adicionaDisponibilidade(self):
p = Professor.objects.get(id=1)
p.informarDisponibilidade("qua", "14:30")
p = Professor.objects.get(id=1)
result = p.getDisponibilidadeAulas()
self.assertEqual(result[0].hora, "14:30")
self.assertEqual(result[
|
0].diaSemana.nome_curto, "qua")
self.assertEqual(result[0].diaSemana.nome_curto, "quar")
|
tiredpixel/pikka-bird-collector-py
|
pikka_bird_collector/collectors/mysql.py
|
Python
|
mit
| 5,160
| 0.00814
|
from pikka_bird_collector.parsers.table import Table as Parser
from .base_port_command import BasePortCommand, Base
class Mysql(BasePortCommand):
"""
Collector for MySQL (https://www.mysql.com/).
The collector is enabled whenever non-empty settings are passed.
Multiple instances running on the same box are supported; just specify
each port within settings.
By default, core status, master status, slave status, and slave hosts
are gathered. Optionally, variables can be gathered.
Because MySQL metrics are inconsistent in their representation of
booleans (e.g. `ON`, `YES`, `Yes`) and to minimise payload size and
downstream storage, all
|
values are remapped if they match these. This
probably won't cause you problems, but if encounter a string which is no
longer a string, this is probably why. :)
DEPENDENCIES:
mysql
Available in PATH.
SETTINGS:
minimal:
{
3306: None}
supported:
{
3306: {
'user': "
|
USER",
'password': "PASSWORD",
'collect': {
'master_status': False,
'slave_status': False,
'slave_hosts': False,
'variables': True}}}
"""
COLLECT_SETTING_DEFAULTS = {
'master_status': True,
'slave_hosts': True,
'slave_status': True,
'variables': False}
CMD_SHOW_MASTER_STATUS = 'SHOW MASTER STATUS'
CMD_SHOW_SLAVE_HOSTS = 'SHOW SLAVE HOSTS'
CMD_SHOW_SLAVE_STATUS = 'SHOW SLAVE STATUS'
CMD_SHOW_STATUS = 'SHOW /*!50002 GLOBAL */ STATUS'
CMD_SHOW_VARIABLES = 'SHOW VARIABLES'
PARSE_BOOLS = { # the stringy booleans are inconsistent
'ON': True,
'OFF': False,
'YES': True,
'NO': False,
'Yes': True,
'No': False}
@staticmethod
def command_tool(port, settings, command):
settings = settings or {}
c = ['mysql',
'--host', '127.0.0.1', # socket not (yet) supported
'--port', port,
'--execute', command,
'--batch',
'--raw',
'--column-names']
if settings.get('user'):
c.append('--user=%s' % settings['user'])
if settings.get('password'):
c.append('--password=%s' % settings['password'])
return c
def collect_port(self, port, settings):
metrics = {}
o = self.command_output(port, settings, self.CMD_SHOW_STATUS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value)
ms = parser.parse(o)
if len(ms):
metrics['status'] = ms
else:
return metrics # service down; give up
if self.collect_setting('master_status', settings):
o = self.command_output(port, settings, self.CMD_SHOW_MASTER_STATUS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value,
tag_header_col='file')
ms = parser.parse(o)
if len(ms):
metrics['master_status'] = ms
if self.collect_setting('slave_status', settings):
o = self.command_output(port, settings, self.CMD_SHOW_SLAVE_STATUS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value,
transpose=True)
ms = parser.parse(o)
if len(ms):
metrics['slave_status'] = ms
if self.collect_setting('slave_hosts', settings):
o = self.command_output(port, settings, self.CMD_SHOW_SLAVE_HOSTS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value,
tag_header_col='server_id')
ms = parser.parse(o)
if len(ms):
metrics['slave_hosts'] = ms
if self.collect_setting('variables', settings):
o = self.command_output(port, settings, self.CMD_SHOW_VARIABLES)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value)
ms = parser.parse(o)
if len(ms):
metrics['variables'] = ms
return metrics
@staticmethod
def __parse_str_setting_value(value):
v = Base.parse_str_setting_value(value)
if v in Mysql.PARSE_BOOLS:
v = Mysql.PARSE_BOOLS[v]
return v
|
uvchik/pvlib-python
|
pvlib/test/test_atmosphere.py
|
Python
|
bsd-3-clause
| 5,239
| 0.005535
|
import itertools
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_allclose
from pvlib import atmosphere
from pvlib import solarposition
latitude, longitude, tz, altitude = 32.2, -111, 'US/Arizona', 700
times = p
|
d.date_range(start='20140626', end='20140626', freq='6h', tz=tz)
ephem_data = solarposition.get_solarposition(times, latitude, longitude)
# need to add physical tests instead of just functional tests
def test_pres2alt():
atmosphere.pres2alt(100000)
def test_alt2press():
atmosphere.pres2alt(1000)
@pytest.mark.parametrize("model",
['simple', 'kasten1966', 'youngirvine1967', 'kastenyoung1989',
'gueymard1993', 'young1994', 'pickering2002'])
def test_airmass(model):
ou
|
t = atmosphere.relativeairmass(ephem_data['zenith'], model)
assert isinstance(out, pd.Series)
out = atmosphere.relativeairmass(ephem_data['zenith'].values, model)
assert isinstance(out, np.ndarray)
def test_airmass_scalar():
assert not np.isnan(atmosphere.relativeairmass(10))
def test_airmass_scalar_nan():
assert np.isnan(atmosphere.relativeairmass(100))
def test_airmass_invalid():
with pytest.raises(ValueError):
atmosphere.relativeairmass(ephem_data['zenith'], 'invalid')
def test_absoluteairmass():
relative_am = atmosphere.relativeairmass(ephem_data['zenith'], 'simple')
atmosphere.absoluteairmass(relative_am)
atmosphere.absoluteairmass(relative_am, pressure=100000)
def test_absoluteairmass_numeric():
atmosphere.absoluteairmass(2)
def test_absoluteairmass_nan():
np.testing.assert_equal(np.nan, atmosphere.absoluteairmass(np.nan))
def test_gueymard94_pw():
temp_air = np.array([0, 20, 40])
relative_humidity = np.array([0, 30, 100])
temps_humids = np.array(
list(itertools.product(temp_air, relative_humidity)))
pws = atmosphere.gueymard94_pw(temps_humids[:, 0], temps_humids[:, 1])
expected = np.array(
[ 0.1 , 0.33702061, 1.12340202, 0.1 ,
1.12040963, 3.73469877, 0.1 , 3.44859767, 11.49532557])
assert_allclose(pws, expected, atol=0.01)
@pytest.mark.parametrize("module_type,expect", [
('cdte', np.array(
[[ 0.9905102 , 0.9764032 , 0.93975028],
[ 1.02928735, 1.01881074, 0.98578821],
[ 1.04750335, 1.03814456, 1.00623986]])),
('monosi', np.array(
[[ 0.9776977 , 1.02043409, 1.03574032],
[ 0.98630905, 1.03055092, 1.04736262],
[ 0.98828494, 1.03299036, 1.05026561]])),
('polysi', np.array(
[[ 0.9770408 , 1.01705849, 1.02613202],
[ 0.98992828, 1.03173953, 1.04260662],
[ 0.99352435, 1.03588785, 1.04730718]])),
('cigs', np.array(
[[ 0.9745919 , 1.02821696, 1.05067895],
[ 0.97529378, 1.02967497, 1.05289307],
[ 0.97269159, 1.02730558, 1.05075651]])),
('asi', np.array(
[[ 1.0555275 , 0.87707583, 0.72243772],
[ 1.11225204, 0.93665901, 0.78487953],
[ 1.14555295, 0.97084011, 0.81994083]]))
])
def test_first_solar_spectral_correction(module_type, expect):
ams = np.array([1, 3, 5])
pws = np.array([1, 3, 5])
ams, pws = np.meshgrid(ams, pws)
out = atmosphere.first_solar_spectral_correction(pws, ams, module_type)
assert_allclose(out, expect, atol=0.001)
def test_first_solar_spectral_correction_supplied():
# use the cdte coeffs
coeffs = (0.87102, -0.040543, -0.00929202, 0.10052, 0.073062, -0.0034187)
out = atmosphere.first_solar_spectral_correction(1, 1, coefficients=coeffs)
expected = 0.99134828
assert_allclose(out, expected, atol=1e-3)
def test_first_solar_spectral_correction_ambiguous():
with pytest.raises(TypeError):
atmosphere.first_solar_spectral_correction(1, 1)
def test_kasten96_lt():
"""Test Linke turbidity factor calculated from AOD, Pwat and AM"""
amp = np.array([1, 3, 5])
pwat = np.array([0, 2.5, 5])
aod_bb = np.array([0, 0.1, 1])
lt_expected = np.array(
[[[1.3802, 2.4102, 11.6802],
[1.16303976, 2.37303976, 13.26303976],
[1.12101907, 2.51101907, 15.02101907]],
[[2.95546945, 3.98546945, 13.25546945],
[2.17435443, 3.38435443, 14.27435443],
[1.99821967, 3.38821967, 15.89821967]],
[[3.37410769, 4.40410769, 13.67410769],
[2.44311797, 3.65311797, 14.54311797],
[2.23134152, 3.62134152, 16.13134152]]]
)
lt = atmosphere.kasten96_lt(*np.meshgrid(amp, pwat, aod_bb))
assert np.allclose(lt, lt_expected, 1e-3)
return lt
def test_angstrom_aod():
"""Test Angstrom turbidity model functions."""
aod550 = 0.15
aod1240 = 0.05
alpha = atmosphere.angstrom_alpha(aod550, 550, aod1240, 1240)
assert np.isclose(alpha, 1.3513924317859232)
aod700 = atmosphere.angstrom_aod_at_lambda(aod550, 550, alpha)
assert np.isclose(aod700, 0.10828110997681031)
def test_bird_hulstrom80_aod_bb():
"""Test Bird_Hulstrom broadband AOD."""
aod380, aod500 = 0.22072480948195175, 0.1614279181106312
bird_hulstrom = atmosphere.bird_hulstrom80_aod_bb(aod380, aod500)
assert np.isclose(0.11738229553812768, bird_hulstrom)
|
dsavransky/miscpy
|
miscpy/PlotFun/logHist.py
|
Python
|
mit
| 1,435
| 0.013937
|
import matplotlib.pyplot as plt
import numpy as np
def logHist(X, N=30,fig=None, noclear=False, pdf=False, **kywds):
'''
Plot logarithmic histogram or probability density function from
sampled data.
Args:
X (numpy.ndarray): 1-D array of sampled values
N (Optional[int]): Number of bins (default 30)
fig (Optional[int]): Figure number (default None)
noclear (Optioanl[bool]): Clear figure (default False)
pdf (Optional[bool]): If True normalize by bin width (default False)
and display as curve instead of bar chart.
Note: results are always normalized by number of samples
**kywds: Arbitrary keyword arguments passed to matplotlib.pyplot.bar
(or matplotlib.pyplot.semilogx if pdf is True)
Returns:
|
x (ndarray): abscissa values of frequencies
n (ndarray): (normalized) frequency values
'''
x = np.logspace(np.log10(np.min(X)),np.log10(np.max(X)),N+1)
n,x = np.histogram(X,bins=x)
n = n/float(X.size)
plt.figure(fig)
if not noclear: plt.clf()
if pdf:
n /= np.diff(x)
x = x[:-1]+np.diff(x)/2
plt.semilogx(x,n,**kywds)
else:
|
plt.bar(x[:len(x)-1],n,width=np.diff(x),**kywds)
a = plt.gca()
a.set_xlim(10.**np.floor(np.log10(np.min(X))),10.**np.ceil(np.log10(np.max(X))))
a.set_xscale('log')
plt.axis()
return x,n
|
sfu-fas/coursys
|
quizzes/management/commands/special_case_tutorial.py
|
Python
|
gpl-3.0
| 1,849
| 0.005408
|
"""
Create quiz TimeSpecialCase for all students in a particular lab/tutorial section.
Usage will be like:
./manage.py special_case_tutorial 2020su-cmpt-120-d1 q1 D101 '2021-10-07T09:30' '2021-10-07T10:30'
"""
import datetime
from django.core.management.base import BaseCommand
from django.db import transaction
from iso8601 import iso8601
from coredata.models import CourseOffering, Member
from quizzes.models import Quiz, TimeSpecialCase
def parse_datetime(s: str) -> datetime.datetime:
return iso8601.parse_date(s).replace(tzinfo=None)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('offering_slug', type=str, help='CourseOffering slug')
parser.add_argument('activity_slug', type=str, help='the slug of the Activity with the quiz')
parser.add_argument('section', type=str, help='lab/tutorial section to modify')
parser.add_argument('start_time', type=parse_datetime, help='start time for this section')
parser.add_argument('end
|
_time', type=parse_datetime, help='end time for this section')
def handle(self, *args, **options):
offering_slug = options['offering_slug']
activity_slug = options['activity_slug']
section = options['section']
start_time = options['s
|
tart_time']
end_time = options['end_time']
offering = CourseOffering.objects.get(slug=offering_slug)
quiz = Quiz.objects.get(activity__slug=activity_slug, activity__offering=offering)
members = Member.objects.filter(offering=offering, role='STUD', labtut_section=section)
with transaction.atomic():
for m in members:
TimeSpecialCase.objects.update_or_create(
quiz=quiz, student=m,
defaults={'start': start_time, 'end': end_time}
)
|
Elec/django-recaptcha
|
captcha/widgets.py
|
Python
|
bsd-3-clause
| 1,285
| 0
|
# -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
from captcha import client
class ReCap
|
tcha(forms.widgets.Widget):
if getattr(settings, "RECAPTCHA_NOCAPTCHA", False):
recaptcha_response_name = 'g-recaptcha-response'
recaptcha_challenge_name = 'g-recaptcha-response'
else:
recaptcha_challenge_name =
|
'recaptcha_challenge_field'
recaptcha_response_name = 'recaptcha_response_field'
def __init__(self, public_key=None, use_ssl=None, attrs={}, *args,
**kwargs):
self.public_key = public_key if public_key else \
settings.RECAPTCHA_PUBLIC_KEY
self.use_ssl = use_ssl if use_ssl is not None else getattr(
settings, 'RECAPTCHA_USE_SSL', False)
self.js_attrs = attrs
super(ReCaptcha, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
return mark_safe(u'%s' % client.displayhtml(
self.public_key,
self.js_attrs, use_ssl=self.use_ssl))
def value_from_datadict(self, data, files, name):
return [
data.get(self.recaptcha_challenge_name, None),
data.get(self.recaptcha_response_name, None)
]
|
Huyuwei/tvm
|
tests/python/relay/test_cpp_build_module.py
|
Python
|
apache-2.0
| 4,614
| 0.001734
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import tvm
from tvm import relay
from tvm.contrib.nvcc import have_fp16
def test_basic_build():
tgt = "llvm"
ctx = tvm.cpu()
# func
a = relay.var("a", dtype="float32", shape=(16, 8))
b = relay.var("b", dtype="float32", shape=(8, 8))
c = relay.var("c", dtype="float32", shape=(16, 8))
x = relay.nn.dense(a, b)
y = relay.nn.relu(x)
z = y + c
func = relay.Function([a, b, c], z)
A = tvm.nd.array(np.random.uniform(-1, 1, (16, 8)).astype("float32"), ctx=ctx)
B = tvm.nd.array(np.random.uniform(-1, 1, (8, 8)).astype("float32"), ctx=ctx)
C = tvm.nd.array(np.random.uniform(-1, 1, (16, 8)).astype("float32"), ctx=ctx)
params = {
"b" : B,
"c" : C
}
# build
targets = {
tvm.expr.IntImm("int32", ctx.device_type): tgt
}
g_json, mmod, params = relay.build(relay.Module.from_expr(func), targets, "llvm", params=params)
# test
rt = tvm.contrib.graph_runtime.create(g_json, mmod, ctx)
rt.set_input("a", A)
rt.load_params(relay.save_param_dict(params))
rt.run()
out = rt.get_output(0)
np.testing.assert_allclose(out.asnumpy(), np.maximum(np.dot(A.asnumpy(),
B.asnumpy().T),
0) + C.asnumpy(),
atol=1e-5, rtol=1e-5)
def test_fp16_build():
dtype = "float16"
if not tvm.module.enabled("cuda") or not tvm.gpu(0).exist:
print("skip because cuda is not enabled.")
return
ctx = tvm.gpu(0)
if dtype == "float16" and not have_fp16(ctx.compute_version):
print("skip because gpu does not support fp16")
return
x = relay.var("x", dtype=dtype, shape=(4, 4))
y = relay.var("y", dtype=dtype, shape=(4, 4))
z = x + y
func = relay.Function([x, y], z)
X = tvm.nd.array(np.random.uniform(-1, 1, (4, 4)).astype(dtype), ctx=ctx)
Y = tvm.nd.array(np.random.uniform(-1, 1, (4, 4)).astype(dtype), ctx=ctx)
params = {
"x": X,
"y": Y,
}
# build
g_json, mmod, params = relay.build(func, "cuda", params=params)
# test
rt = tvm.contrib.graph_runtime.create(g_json, mmod, ctx)
rt.load_params(relay.save_param_dict(params))
rt.run()
out = rt.get_output(0)
np.testing.assert_allclose(out.asnumpy(), X.asnumpy() + Y.asnumpy(),
atol=1e-5, rtol=1e-5)
def test_fp16_conversion():
def check_conversion(tgt, ctx):
if not tvm.module.enabled(tgt):
print("skip because {} is not enabled.".format(tgt))
return
elif tgt == "cuda" and ctx.exist and not have_fp16(ctx.compute_version):
print("skip becaus
|
e gpu does not support fp16")
r
|
eturn
n = 10
for (src, dst) in [('float32', 'float16'), ('float16', 'float32')]:
x = relay.var("x", relay.TensorType((n,), src))
y = x.astype(dst)
func = relay.Function([x], y)
# init input
X = tvm.nd.array(n * np.random.randn(n).astype(src) - n / 2)
# build
with relay.build_config(opt_level=1):
g_json, mmod, params = relay.build(relay.Module.from_expr(func), tgt)
# test
rt = tvm.contrib.graph_runtime.create(g_json, mmod, ctx)
rt.set_input("x", X)
rt.run()
out = rt.get_output(0)
np.testing.assert_allclose(out.asnumpy(), X.asnumpy().astype(dst),
atol=1e-5, rtol=1e-5)
for target, ctx in [('llvm', tvm.cpu()), ('cuda', tvm.gpu())]:
check_conversion(target, ctx)
if __name__ == "__main__":
test_basic_build()
test_fp16_build()
test_fp16_conversion()
|
mezz64/home-assistant
|
homeassistant/components/hangouts/__init__.py
|
Python
|
apache-2.0
| 4,741
| 0.001476
|
"""Support for Hangouts."""
import logging
from hangups.auth import GoogleAuthError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.conversation.util import create_matcher
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant
from homeassistant.helpers import dispatcher, intent
import homeassistant.helpers.config_validation as cv
# We need an import from .config_flow, without it .config_flow is never loaded.
from .config_flow import HangoutsFlowHandler # noqa: F401
from .const import (
CONF_BOT,
CONF_DEFAULT_CONVERSATIONS,
CONF_ERROR_SUPPRESSED_CONVERSATIONS,
CONF_INTENTS,
CONF_MATCHERS,
CONF_REFRESH_TOKEN,
CONF_SENTENCES,
DOMAIN,
EVENT_HANGOUTS_CONNECTED,
EVENT_HANGOUTS_CONVERSATIONS_CHANGED,
EVENT_HANGOUTS_CONVERSATIONS_RESOLVED,
INTENT_HELP,
INTENT_SCHEMA,
MESSAGE_SCHEMA,
SERVICE_RECONNECT,
SERVICE_SEND_MESSAGE,
SERVICE_UPDATE,
TARGETS_SCHEMA,
)
from .hangouts_bot import HangoutsBot
from .intents import HelpIntent
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_INTENTS, default={}): vol.Schema(
{cv.string: INTENT_SCHEMA}
),
vol.Optional(CONF_DEFAULT_CONVERSATIONS, default=[]): [TARGETS_SCHEMA],
vol.Optional(CONF_ERROR_SUPPRESSED_CONVERSATIONS, default=[]): [
TARGETS_SCHEMA
],
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Hangouts bot component."""
if (config := config.get(DOMAIN)) is None:
hass.data[DOMAIN] = {
CONF_INTENTS: {},
CONF_DEFAULT_CONVERSATIONS: [],
CONF_ERROR_SUPPRESSED_CONVERSATIONS: [],
}
return True
hass.data[DOMAIN] = {
CONF_INTENTS: config[CONF_INTENTS],
CONF_DEFAULT_CONVERSATIONS: config[CONF_DEFAULT_CONVERSATIONS],
CONF_ERROR_SUPPRESSED_CONVERSATIONS: config[
CONF_ERROR_SUPPRESSED_CONVERSATIONS
],
}
if (
hass.data[DOMAIN][CONF_INTENTS]
and INTENT_HELP not in hass.data[DOMAIN][CONF_INTENTS]
):
hass.data[DOMAIN][CONF_INTENTS][INTENT_HELP] = {CONF_SENTENCES: ["HELP"]}
for data in hass.data[DOMAIN][CONF_INTENTS].values():
matchers = []
for sentence in data[CONF_SENTENCES]:
matchers.append(create_matcher(sentence))
data[CONF_MATCHERS] = matchers
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass: HomeAssistant, config: ConfigEntry) -> bool:
"""Set up a conf
|
ig entry."""
try:
bot = HangoutsBot(
hass,
config.data.get(CONF_REFRESH_TOKEN),
hass.data[DOMAIN][CONF_INTENTS],
hass.data[DOMAIN][CONF_DEFAULT_CONVERSATIONS],
hass.data[DOMAIN][CONF_ERROR_SUPPRESSED_CONVERSATIONS],
)
hass.data[DOMAIN][CONF_BOT] = bot
except GoogleAuthError as exception:
_LOGGER.error("Hangouts failed to log in: %s", str(exception)
|
)
return False
dispatcher.async_dispatcher_connect(
hass, EVENT_HANGOUTS_CONNECTED, bot.async_handle_update_users_and_conversations
)
dispatcher.async_dispatcher_connect(
hass, EVENT_HANGOUTS_CONVERSATIONS_CHANGED, bot.async_resolve_conversations
)
dispatcher.async_dispatcher_connect(
hass,
EVENT_HANGOUTS_CONVERSATIONS_RESOLVED,
bot.async_update_conversation_commands,
)
config.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, bot.async_handle_hass_stop)
)
await bot.async_connect()
hass.services.async_register(
DOMAIN,
SERVICE_SEND_MESSAGE,
bot.async_handle_send_message,
schema=MESSAGE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_UPDATE,
bot.async_handle_update_users_and_conversations,
schema=vol.Schema({}),
)
hass.services.async_register(
DOMAIN, SERVICE_RECONNECT, bot.async_handle_reconnect, schema=vol.Schema({})
)
intent.async_register(hass, HelpIntent(hass))
return True
async def async_unload_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
"""Unload a config entry."""
bot = hass.data[DOMAIN].pop(CONF_BOT)
await bot.async_disconnect()
return True
|
dsarlis/Cloud-Burst
|
etl/process.py
|
Python
|
apache-2.0
| 258
| 0.007752
|
from sys import argv
for line
|
in open(argv[1]):
tweet_id, hashtags, hashtags_count, user_id, created_at, followers_count, score, text = line.replace('\n', '').split('\t')
print ','.join([tweet_id, user_id, created_at, followers_count, score, text]
|
)
|
sagiss/sardana
|
src/sardana/taurus/qt/qtgui/extra_sardana/expdescription.py
|
Python
|
lgpl-3.0
| 19,906
| 0.00628
|
#!/usr/bin/env python
##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""This module provides widget for configuring the data acquisition and display of an experiment"""
__all__ = ["ExpDescriptionEditor"]
from taurus.external.qt import Qt
import copy
import taurus
import taurus.core
from taurus.qt.qtgui.base import TaurusBaseWidget
from taurus.qt.qtgui import resource
from sardana.taurus.qt.qtcore.tango.sardana.model import SardanaBaseProxyModel, SardanaTypeTreeItem
from taurus.qt.qtgui.util.ui import UILoadable
## Using a plain model and filtering and checking 'Acquirable' in item.itemData().interfaces is more elegant, but things don't get properly sorted...
#from taurus.qt.qtcore.tango.sardana.model import SardanaElementPlainModel
class SardanaAcquirableProxyModel(SardanaBaseProxyModel):
# ALLOWED_TYPES = 'Acquirable'
#
# def filterAcceptsRow(self, sourceRow, sourceParent):
# sourceModel = self.sourceModel()
# idx = so
|
urceModel.index(sourceRow, 0, sourceParent)
# item = idx.internalPointer()
# return 'Acquirable' in item.itemData().interfaces
# ALLOWED_TYPES = ['Motor', 'CTExpChannel', 'ZeroDExpChannel', 'OneDExpChannel',
# 'TwoDExpChannel', 'ComChannel', 'IORegiste
|
r', 'PseudoMotor',
# 'PseudoCounter']
from sardana.sardanadefs import ElementType, TYPE_ACQUIRABLE_ELEMENTS
ALLOWED_TYPES = [ElementType[t] for t in TYPE_ACQUIRABLE_ELEMENTS]
def filterAcceptsRow(self, sourceRow, sourceParent):
sourceModel = self.sourceModel()
idx = sourceModel.index(sourceRow, 0, sourceParent)
treeItem = idx.internalPointer()
if isinstance(treeItem, SardanaTypeTreeItem):
return treeItem.itemData() in self.ALLOWED_TYPES
return True
@UILoadable(with_ui='ui')
class ExpDescriptionEditor(Qt.QWidget, TaurusBaseWidget):
'''
A widget for editing the configuration of a experiment (measurement groups,
plot and storage parameters, etc).
It receives a Sardana Door name as its model and gets/sets the configuration
using the `ExperimentConfiguration` environmental variable for that Door.
'''
def __init__(self, parent=None, door=None, plotsButton=True):
Qt.QWidget.__init__(self, parent)
TaurusBaseWidget.__init__(self, 'ExpDescriptionEditor')
self.loadUi()
self.ui.buttonBox.setStandardButtons(Qt.QDialogButtonBox.Reset | Qt.QDialogButtonBox.Apply)
newperspectivesDict = copy.deepcopy(self.ui.sardanaElementTree.KnownPerspectives)
#newperspectivesDict[self.ui.sardanaElementTree.DftPerspective]['model'] = [SardanaAcquirableProxyModel, SardanaElementPlainModel]
newperspectivesDict[self.ui.sardanaElementTree.DftPerspective]['model'][0] = SardanaAcquirableProxyModel
self.ui.sardanaElementTree.KnownPerspectives = newperspectivesDict #assign a copy because if just a key of this class memberwas modified, all instances of this class would be affected
self.ui.sardanaElementTree._setPerspective(self.ui.sardanaElementTree.DftPerspective)
self._localConfig = None
self._originalConfiguration = None
self._dirty = False
self._dirtyMntGrps = set()
self.connect(self.ui.activeMntGrpCB, Qt.SIGNAL('activated (QString)'), self.changeActiveMntGrp)
self.connect(self.ui.createMntGrpBT, Qt.SIGNAL('clicked ()'), self.createMntGrp)
self.connect(self.ui.deleteMntGrpBT, Qt.SIGNAL('clicked ()'), self.deleteMntGrp)
self.connect(self.ui.compressionCB, Qt.SIGNAL('currentIndexChanged (int)'), self.onCompressionCBChanged)
self.connect(self.ui.pathLE, Qt.SIGNAL('textEdited (QString)'), self.onPathLEEdited)
self.connect(self.ui.filenameLE, Qt.SIGNAL('textEdited (QString)'), self.onFilenameLEEdited)
self.connect(self.ui.channelEditor.getQModel(), Qt.SIGNAL('dataChanged (QModelIndex, QModelIndex)'), self._updateButtonBox)
self.connect(self.ui.channelEditor.getQModel(), Qt.SIGNAL('modelReset ()'), self._updateButtonBox)
preScanList = self.ui.preScanList
self.connect(preScanList, Qt.SIGNAL('dataChanged'),
self.onPreScanSnapshotChanged)
#TODO: For Taurus 4 compatibility
if hasattr(preScanList, "dataChangedSignal"):
preScanList.dataChangedSignal.connect(self.onPreScanSnapshotChanged)
self.connect(self.ui.choosePathBT, Qt.SIGNAL('clicked ()'), self.onChooseScanDirButtonClicked)
self.__plotManager = None
icon = resource.getIcon(":/actions/view.svg")
self.togglePlotsAction = Qt.QAction(icon, "Show/Hide plots", self)
self.togglePlotsAction.setCheckable(True)
self.togglePlotsAction.setChecked(False)
self.togglePlotsAction.setEnabled(plotsButton)
self.addAction(self.togglePlotsAction)
self.connect(self.togglePlotsAction, Qt.SIGNAL("toggled(bool)"),
self.onPlotsButtonToggled)
self.ui.plotsButton.setDefaultAction(self.togglePlotsAction)
if door is not None:
self.setModel(door)
self.connect(self.ui.buttonBox, Qt.SIGNAL("clicked(QAbstractButton *)"), self.onDialogButtonClicked)
#Taurus Configuration properties and delegates
self.registerConfigDelegate(self.ui.channelEditor)
def getModelClass(self):
'''reimplemented from :class:`TaurusBaseWidget`'''
return taurus.core.taurusdevice.TaurusDevice
def onChooseScanDirButtonClicked(self):
ret = Qt.QFileDialog.getExistingDirectory (self, 'Choose directory for saving files', self.ui.pathLE.text())
if ret:
self.ui.pathLE.setText(ret)
self.ui.pathLE.emit(Qt.SIGNAL('textEdited (QString)'), ret)
def onDialogButtonClicked(self, button):
role = self.ui.buttonBox.buttonRole(button)
if role == Qt.QDialogButtonBox.ApplyRole:
self.writeExperimentConfiguration(ask=False)
elif role == Qt.QDialogButtonBox.ResetRole:
self._reloadConf()
def closeEvent(self, event):
'''This event handler receives widget close events'''
if self.isDataChanged():
self.writeExperimentConfiguration(ask=True)
Qt.QWidget.closeEvent(self, event)
def setModel(self, model):
'''reimplemented from :class:`TaurusBaseWidget`'''
TaurusBaseWidget.setModel(self, model)
self._reloadConf(force=True)
#set the model of some child widgets
door = self.getModelObj()
if door is None: return
tghost = taurus.Database().getNormalName() #@todo: get the tghost from the door model instead
msname = door.macro_server.getFullName()
self.ui.taurusModelTree.setModel(tghost)
self.ui.sardanaElementTree.setModel(msname)
def _reloadConf(self, force=False):
if not force and self.isDataChanged():
op = Qt.QMessageBox.question(self, "Reload info from door",
"If you reload, all current experiment configuration changes will be lost. Reload?",
Qt.QMessageBox.Yes | Qt.QMessageBox.Cancel)
if op != Qt.QMessageBox.Yes:
return
door = self.getModelObj()
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/loot/loot_schematic/shared_geonosian_reinforcement_core_schematic.py
|
Python
|
mit
| 527
| 0.043643
|
#### NOTICE: THIS FIL
|
E IS AUTOGENERATED
#### MODIFICAT
|
IONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/loot_schematic/shared_geonosian_reinforcement_core_schematic.iff"
result.attribute_template_id = -1
result.stfName("craft_weapon_ingredients_n","geonosian_reinforcement_core_schematic")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
stefanseefeld/numba
|
numba/npyufunc/__init__.py
|
Python
|
bsd-2-clause
| 809
| 0.001236
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
from .decorators import Vectorize, GUVectorize, vectorize, guvectorize
from ._internal import PyUFunc_None, PyUFunc_Zero, PyUFunc_One
from . import _internal, array_exprs
if hasattr(_internal, 'PyUFunc_ReorderableNone'):
PyUFunc_ReorderableNone = _internal.PyUFunc_ReorderableNone
del _internal, array_exprs
|
def _init():
def init_vectorize():
from numba.cuda.vectorizers import CUDAVectorize
return CUDAVectorize
def init_guvectorize():
from numba.cuda.vectorizers import CUDAGUFuncVecto
|
rize
return CUDAGUFuncVectorize
Vectorize.target_registry.ondemand['cuda'] = init_vectorize
GUVectorize.target_registry.ondemand['cuda'] = init_guvectorize
_init()
del _init
|
ncdesouza/bookworm
|
env/lib/python2.7/site-packages/sqlalchemy/ext/mutable.py
|
Python
|
gpl-3.0
| 23,156
| 0.000043
|
# ext/mutable.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
.. versionadded:: 0.7 :mod:`sqlalchemy.ext.mutable` replaces SQLAlchemy's
legacy approach to in-place mutations of scalar values; see
:ref:`07_migration_mutation_extension`.
.. _mutable_scalars:
Establishing Mutability on Scalar Column Values
===============================================
A typical example of a "mutable" structure is a Python dictionary.
Following the example introduced in :ref:`types_toplevel`, we
begin with a custom type that marshals Python dictionaries into
JSON strings before being persisted::
from sqlalchemy.types import TypeDecorator, VARCHAR
import json
class JSONEncodedDict(TypeDecorator):
"Represents an immutable structure as a json-encoded string."
impl = VARCHAR
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
The usage of ``json`` is only for the purposes of example. The
:mod:`sqlalchemy.ext.mutable` extension can be used
with any type whose target Python type may be mutable, including
:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself
tracks all parents which reference it. Below, we illustrate the a simple
version of the :class:`.MutableDict` dictionary object, which applies
the :class:`.Mutable` mixin to a plain Python dictionary::
from sqlalchemy.ext.mutable import Mutable
class MutableDict(Mutable, dict):
@classmethod
def coerce(cls, key, value):
"Convert plain dictionaries to MutableDict."
if not isinstance(value, MutableDict):
if isinstance(value, dict):
return MutableDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
"Detect dictionary set events and emit change events."
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
"Detect dictionary del events and emit change events."
dict.__delitem__(self, key)
self.changed()
The above dictionary class takes the approach of subclassing the Python
built-in ``dict`` to produce a dict
subclass which routes all mutation events through ``__setitem__``. There are
variants on this approach, such as subclassing ``UserDict.UserDict`` or
``collections.MutableMapping``; the part that's important to this example is
that the :meth:`.Mutable.changed` method is called whenever an in-place
change to the datastructure takes place.
We also redefine the :meth:`.Mutable.coerce` method which will be used to
convert any values that are not instances of ``MutableDict``, such
as the plain dictionaries returned by the ``json`` module, into the
appropriate type. Defining this method is optional; we could just as well
created our ``JSONEncodedDict`` such that it always returns an instance
of ``MutableDict``, and additionally ensured that all calling code
uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not
overridden, any values applied to a parent object which are not instances
of the mutable type will raise a ``ValueError``.
Our new ``MutableDict`` type offers a class method
:meth:`~.Mutable.as_mutable` which we can use within column metadata
to associate with types. This method grabs the given type object or
class and associates a listener that will detect all future mappings
of this type, applying event listening instrumentation to the mapped
attribute. Such as, with classical table metadata::
from sqlalchemy import Table, Column, Integer
my_data = Table('my_data', metadata,
Column('id', Integer, primary_key=True),
Column('data', MutableDict.as_mutable(JSONEncodedDict))
)
Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict``
(if the type object was not an instance already), which will intercept any
attributes which are mapped against this type. Below we establish a simple
mapping against the ``my_data`` table::
from sqlalchemy import mapper
class MyDataClass(object):
pass
# associates mutation listeners with MyDataClass.data
mapper(MyDataClass, my_data)
The ``MyDataClass.data`` member will now be notified of in place changes
to its value.
There's no difference in usage when using declarative::
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(MutableDict.as_mutable(JSONEncodedDict))
Any in-place changes to the ``MyDataClass.data`` member
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> m1 = MyDataClass(data={'value1':'foo'})
>>> sess.add(m1)
>>> sess.commit()
>>> m1.data['value1'] = 'bar'
>>> assert m1 in sess.dirty
True
The ``MutableDict`` can be associated with all future instances
of ``JSONEncodedDict`` in one step, using
:meth:`~.Mutable.associate_with`. This is similar to
:meth:`~.Mutable.as_mutable` except it will intercept all occurrences
of ``MutableDict`` in all mappings unconditionally, without
the need to declare it individually::
MutableDict.associate_with(JSONEncodedDict)
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(JSONEncodedDict)
Supporting Pickling
--------------------
The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the
placement of a ``weakref.WeakKeyDictionary`` upon the value object, which
stores a mapping of parent mapped objects keyed to the attribute name under
which they are associated with this value. ``WeakKeyDictionary`` objects are
not picklable, due to the fact that they contain weakrefs and function
callbacks. In our case, this is a good thing, since if this dictionary were
picklable, it could lead to an excessively large pickle size for our value
objects that are pickled by themselves outside of the context of the parent.
The developer responsibility here is only to provide a ``__getstate__`` method
that excludes the :meth:`~MutableBase._parents` collection from the pickle
stream::
class MyMutableType(Mutable):
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_parents', None)
return d
With our dictionary example, we need to return the contents of the dict itself
(and also restore them on __s
|
etstate__)::
class MutableDict(Mutable, d
|
ict):
# ....
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
In the case that our mutable value object is pickled as it is attached to one
or more parent objects that are also part of the pickle, the :class:`.Mutable`
mixin will re-establish the :attr:`.Mutable._parents` collection on each value
object as the owning parents themselves are unpickled.
.. _mutable_composites:
Establishing Mutability on Composites
=====================================
Composites are a special ORM feature which allow a single scalar attribute to
be assigned an object value which represents information "composed" from one
or more columns from the underlying mapped table. The usual example is that of
a geometric "point", and is introduced in :ref:`mapper_composite`.
.. versionchanged:: 0.7
|
messagebird/python-rest-api
|
examples/voice_message.py
|
Python
|
bsd-2-clause
| 1,642
| 0.017052
|
#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
# ACCESS_KEY = ''
# MESSAGE_ID = ''
try:
ACCESS_KEY
except NameError:
print('You need to set an ACCESS_KEY constant in this file')
sys.exit(1)
try:
MESSAGE_ID
except NameError:
print('You need to set a MESSAGE_ID constant in this file')
sys.exit(1)
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the VoiceMessage
|
object for the specified MESSAGE_ID.
vmsg = client.voice_message(MESSAGE_ID)
# Print the object information.
print('\nThe following information was returned as a VoiceMessage object:\n')
print(' id : %s' % vmsg.id)
print(' href : %s' % vmsg.href)
print(' originator : %s' % vmsg.originator)
print(' body : %s' % vmsg.bo
|
dy)
print(' reference : %s' % vmsg.reference)
print(' language : %s' % vmsg.language)
print(' voice : %s' % vmsg.voice)
print(' repeat : %s' % vmsg.repeat)
print(' ifMachine : %s' % vmsg.ifMachine)
print(' scheduledDatetime : %s' % vmsg.scheduledDatetime)
print(' createdDatetime : %s' % vmsg.createdDatetime)
print(' recipients : %s\n' % vmsg.recipients)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a VoiceMessage object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
brandonPurvis/osf.io
|
tests/test_auth.py
|
Python
|
apache-2.0
| 13,540
| 0.000517
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from nose.tools import * # noqa; PEP8 asserts
from webtest_plus import TestApp
import mock
import httplib as http
from flask import Flask
from modularodm import Q
from werkzeug.wrappers import BaseResponse
from framework import auth
from framework.auth import cas
from framework.sessions import Session
from framework.exceptions import HTTPError
from tests.base import OsfTestCase, assert_is_redirect
from tests.factories import (
UserFactory, UnregUserFactory, AuthFactory,
ProjectFactory, NodeFactory, AuthUserFactory, PrivateLinkFactory
)
from framework.auth import User, Auth
from framework.auth.decorators import must_be_logged_in
from website import mails
from website import settings
from website.project.decorators import (
must_have_permission, must_be_contributor,
must_have_addon, must_be_addon_authorizer,
)
class TestAuthUtils(OsfTestCase):
def test_unreg_user_can_register(self):
user = UnregUserFactory()
auth.register_unconfirmed(
username=user.username,
password='gattaca',
fullname='Rosie',
)
assert_true(user.get_confirmation_token(user.username))
@mock.patch('framework.auth.views.mails.send_mail')
def test_confirm_email(self, mock_mail):
user = UnregUserFactory()
auth.register_unconfirmed(
username=user.username,
password='gattaca',
fullname='Rosie',
)
token = user.get_confirmation_token(user.username)
res = self.app.get('/confirm/{}/{}'.format(user._id, token), allow_redirects=False)
res = res.follow()
assert_equal(res.status_code, 302)
assert_in('login?service=', res.location)
user.reload()
assert_equal(len(mock_mail.call_args_list), 1)
empty, kwargs = mock_mail.call_args
kwargs['user'].reload()
assert_equal(empty, ())
assert_equal(kwargs, {
'user': user,
'mimetype': 'html',
'mail': mails.WELCOME,
'to_addr': user.username,
})
self.app.set_cookie(settings.COOKIE_NAME, user.get_or_create_cookie())
res = self.app.get('/confirm/{}/{}'.format(user._id, token))
res = res.follow()
assert_equal(res.status_code, 302)
assert_in('dashboard', res.location)
assert_equal(len(mock_mail.call_args_list), 1)
session = Session.find(
Q('data.auth_user_id', 'eq', user._id)
).sort(
'-date_modified'
).limit(1)[0]
assert_equal(len(session.data['status']), 1)
def test_get_user_by_id(self):
user = UserFactory()
asse
|
rt_equal(User.load(user._id), user)
def test_get_user_by_email(self):
user = UserFactory()
assert_equal(auth.get_user(email=user.username), user)
def test_get_user_with_wrong_password_returns_false(self):
user = UserFactory.build()
user.set_password('killerqueen')
assert_false(
auth.get_user(email=user.username, password='wrong')
|
)
class TestAuthObject(OsfTestCase):
def test_repr(self):
auth = AuthFactory()
rep = repr(auth)
assert_in(str(auth.user), rep)
def test_factory(self):
auth_obj = AuthFactory()
assert_true(isinstance(auth_obj.user, auth.User))
def test_from_kwargs(self):
user = UserFactory()
request_args = {'view_only': 'mykey'}
kwargs = {'user': user}
auth_obj = Auth.from_kwargs(request_args, kwargs)
assert_equal(auth_obj.user, user)
assert_equal(auth_obj.private_key, request_args['view_only'])
def test_logged_in(self):
user = UserFactory()
auth_obj = Auth(user=user)
assert_true(auth_obj.logged_in)
auth2 = Auth(user=None)
assert_false(auth2.logged_in)
class TestPrivateLink(OsfTestCase):
def setUp(self):
super(TestPrivateLink, self).setUp()
self.flaskapp = Flask('testing_private_links')
@self.flaskapp.route('/project/<pid>/')
@must_be_contributor
def project_get(**kwargs):
return 'success', 200
self.app = TestApp(self.flaskapp)
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory()
self.link.nodes.append(self.project)
self.link.save()
@mock.patch('website.project.decorators.Auth.from_kwargs')
def test_has_private_link_key(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth(user=None)
res = self.app.get('/project/{0}'.format(self.project._primary_key),
{'view_only': self.link.key})
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.body, 'success')
@mock.patch('website.project.decorators.Auth.from_kwargs')
def test_does_not_have_key(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth(user=None)
res = self.app.get('/project/{0}'.format(self.project._primary_key),
{'key': None})
assert_is_redirect(res)
# Flask app for testing view decorators
decoratorapp = Flask('decorators')
@must_be_contributor
def view_that_needs_contributor(**kwargs):
return kwargs.get('node') or kwargs.get('parent')
class AuthAppTestCase(OsfTestCase):
def setUp(self):
self.ctx = decoratorapp.test_request_context()
self.ctx.push()
def tearDown(self):
self.ctx.pop()
class TestMustBeContributorDecorator(AuthAppTestCase):
def setUp(self):
super(TestMustBeContributorDecorator, self).setUp()
self.contrib = AuthUserFactory()
self.project = ProjectFactory()
self.project.add_contributor(self.contrib, auth=Auth(self.project.creator))
self.project.save()
def test_must_be_contributor_when_user_is_contributor(self):
result = view_that_needs_contributor(
pid=self.project._primary_key,
user=self.contrib)
assert_equal(result, self.project)
def test_must_be_contributor_when_user_is_not_contributor_raises_error(self):
non_contributor = AuthUserFactory()
with assert_raises(HTTPError):
view_that_needs_contributor(
pid=self.project._primary_key,
user=non_contributor
)
def test_must_be_contributor_no_user(self):
res = view_that_needs_contributor(
pid=self.project._primary_key,
user=None,
)
assert_is_redirect(res)
# redirects to login url
redirect_url = res.headers['Location']
login_url = cas.get_login_url(service_url='http://localhost/')
assert_equal(redirect_url, login_url)
def test_must_be_contributor_parent_admin(self):
user = UserFactory()
node = NodeFactory(parent=self.project, creator=user)
res = view_that_needs_contributor(
pid=self.project._id,
nid=node._id,
user=self.project.creator,
)
assert_equal(res, node)
def test_must_be_contributor_parent_write(self):
user = UserFactory()
node = NodeFactory(parent=self.project, creator=user)
self.project.set_permissions(self.project.creator, ['read', 'write'])
self.project.save()
with assert_raises(HTTPError) as exc_info:
view_that_needs_contributor(
pid=self.project._id,
nid=node._id,
user=self.project.creator,
)
assert_equal(exc_info.exception.code, 403)
@must_be_logged_in
def protected(**kwargs):
return 'open sesame'
@must_have_permission('dance')
def thriller(**kwargs):
return 'chiller'
class TestPermissionDecorators(AuthAppTestCase):
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_be_logged_in_decorator_with_user(self, mock_from_kwargs):
user = UserFactory()
mock_from_kwargs.return_value = Auth(user=user)
protected()
@mock.patch('framework
|
shtripat/gluster_bridge
|
tendrl/gluster_bridge/flows/set_volume_option.py
|
Python
|
lgpl-2.1
| 705
| 0
|
import json
import etcd
from tendrl.gluster_bridge.atoms.volume.set import Set
class SetVolumeOption(object):
def __init__(self, api_job):
super(SetVolumeOption, self).__init__()
self.api_job = ap
|
i_job
self.atom = SetVolumeOption
def start(self):
attributes = json.loads(self.api_job['
|
attributes'].decode('utf-8'))
vol_name = attributes['volname']
option = attributes['option_name']
option_value = attributes['option_value']
self.atom().start(vol_name, option, option_value)
self.api_job['status'] = "finished"
etcd.Client().write(self.api_job['request_id'],
json.dumps(self.api_job))
|
caioserra/apiAdwords
|
examples/adspygoogle/dfp/v201308/get_user_team_association.py
|
Python
|
apache-2.0
| 1,916
| 0.001566
|
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets a user team association by the user and team ID.
To determine which teams exist, run get_all_teams.py. To determine which users
exist, run get_all_users.py.
Tags: UserTeamAssociationService.getUserTeamAssociation
"""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.j
|
oin('..', '..', '..', '..'))
# Initialize appropriate service.
user_team_association_service = client.GetService(
'UserTeamAssociationService', version='v201308')
# Set the IDs of th
|
e user and team to get the association for.
user_id = 'INSERT_USER_ID_HERE'
team_id = 'INSERT_TEAM_ID_HERE'
# Get user team association.
user_team_association = user_team_association_service.GetUserTeamAssociation(
team_id, user_id)[0]
# Display results.
print ('User team association between user with ID \'%s\' and team with ID '
'\'%s\' was found.' % (user_team_association['userId'],
user_team_association['teamId']))
|
arielalmendral/ert
|
python/python/ert_gui/ide/keywords/definitions/range_string_argument.py
|
Python
|
gpl-3.0
| 2,862
| 0.004892
|
import re
from ert_gui.ide.keywords.definitions import ArgumentDefinition
class RangeStringArgument(ArgumentDefinition):
NOT_A_VALID_RANGE_STRING = "The input should be of the type: <b><pre>\n\t1,3-5,9,17\n</pre></b>i.e. integer values separated by commas, and dashes to represent ranges."
VALUE_NOT_IN_RANGE = "A value must be in the range from 0 to %d."
PATTERN = re.compile("^[0-9\-, \t]+$")
RANGE_PATTERN = re.compile("^[ \t]*([0-9]+)[ \t]*-[ \t]*([0-9]+)[ \t]*$")
NUMBER_PATTERN = re.compile("^[ \t]*([0-9]+)[ \t]*$")
def __init__(self, max_value=None, **kwargs):
super(RangeStringArgument, self).__init__(**kwargs)
self.__max_value = max_value
def validate(self, token):
validation_status = super(RangeStringArgument, self).validate(token)
if not validation_status:
return validation_status
else:
match = RangeStringArgument.PATTERN.match(token)
if match is None:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.NOT_A_VALID_RANGE_STRING)
else:
groups = token.split(",")
for group in groups:
range_match = RangeStringArgument.RANGE_PATTERN.match(group)
number_match = RangeStringArgument.NUMBER_PATTERN.match(group)
if range_match is None and number_match is None:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.NOT_A_VALID_RANGE_STRING)
break
if range_match:
num_1 = int(range_match.group(1))
num_2 = int(range_match.group(2))
if not num_2 > num_1:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgum
|
ent.NOT_A_VALID_RANGE_STRING)
break
if self.__max_value is not None and (num_1 >= self.__max_value or num_2 >= self.__max_value):
|
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.VALUE_NOT_IN_RANGE % (self.__max_value - 1))
break
if number_match and self.__max_value is not None:
num = int(number_match.group(1))
if num >= self.__max_value:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.VALUE_NOT_IN_RANGE % (self.__max_value - 1))
break
validation_status.setValue(token)
return validation_status
|
fxia22/ASM_xf
|
PythonD/site_python/twisted/im/tocsupport.py
|
Python
|
gpl-2.0
| 8,589
| 0.014321
|
# Twisted, the Framework of Your Internet
# Copyright (C) 2001-2003 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""TOC (i.e. AIM) support for Instance Messenger."""
# System Imports
import string, re
# Twisted Imports
from twisted.protocols import toc
from twisted.im.locals import ONLINE, OFFLINE, AWAY
from twisted.internet import defer, reactor, protocol
from twisted.internet.defer import succeed
# Sibling Imports
import basesupport, interfaces, locals
def dehtml(text):
text=string.replace(text,"<br>","\n")
text=string.replace(text,"<BR>","\n")
text=string.replace(text,"<Br>","\n") # XXX make this a regexp
text=string.replace(text,"<bR>","\n")
text=re.sub('<.*?>','',text)
text=string.replace(text,'>','>')
text=string.replace(text,'<','<')
text=string.replace(text,'&','&')
text=string.replace(text,' ',' ')
text=string.replace(text,'"','"')
return text
def html(text):
text=string.replace(text,'"','"')
text=string.replace(text,'&','&')
text=string.replace(text,'<','<')
text=string.replace(text,'>','>')
text=string.replace(text,"\n","<br>")
return '<font color="#000000" back="#ffffff" size=3>%s</font>'%text
class TOCPerson(basesupport.AbstractPerson):
def isOnline(self):
return self.status != OFFLINE
def getStatus(self):
return self.status
def getIdleTime(self):
return str(self.idletime)
def setStatusAndIdle(self, status, idletime):
if self.account.client is None:
raise locals.OfflineError
self.status = status
self.idletime = idletime
self.account.client.chat.getContactsList().setContactStatus(self)
def sendMessage(self, text, meta=None):
if self.account.client is None:
raise locals.OfflineError
if meta:
if meta.get("style", None) == "emote":
text="* "+text+"* "
self.account.client.say(self.name,html(text))
return succeed(text)
class TOCGroup(basesupport.AbstractGroup):
__implements__ = (interfaces.IGroup,)
def __init__(self, name, tocAccount):
basesupport.AbstractGroup.__init__(self, name, tocAccount)
self.roomID = self.client.roomID[self.name]
def sendGroupMessage(self, text, meta=None):
if self.account.client is None:
raise locals.OfflineError
if meta:
if meta.get("style", None) == "emote":
text="* "+text+"* "
self.account.client.chat_say(self.roomID,html(text))
return succeed(text)
def leave(self):
if self.account.client is None:
raise locals.OfflineError
self.account.client.chat_leave(self.roomID)
class TOCProto(basesupport.AbstractClientMixin, toc.TOCClient):
def __init__(self, account, chatui, logonDeferred):
toc.TOCClient.__init__(self, account.username, account.password)
basesupport.AbstractClientMixin.__init__(self, account, chatui,
logonDeferred)
self.roomID = {}
self.roomIDreverse = {}
def _debug(self, m):
pass #print '<toc debug>', repr(m)
def getGroupConversation(self, name, hide=0):
return self.chat.getGroupConversation(
self.chat.getGroup(name, self), hide)
def addContact(self, name):
self.add_buddy([name])
if not self._buddylist.has_key('TwistedIM'):
self._buddylist['TwistedIM'] = []
if name in self._buddylist['TwistedIM']:
# whoops, don't add again
return
self._buddylist['TwistedIM'].append(name)
self.set_config(self._config_mode, self._buddylist, self._permit, self._deny)
def getPerson(self,name):
return self.chat.getPerson(name, self)
def onLine(self):
self.account._isOnline = 1
#print '$$!&*$&!(@$*& TOC ONLINE *!#@&$(!*%&'
def gotConfig(self, mode, buddylist, permit, deny):
#print 'got toc config', repr(mode), repr(buddylist), repr(permit), repr(deny)
self._config_mode = mode
self._buddylist = buddylist
self._permit = permit
self._deny = deny
if permit:
self._debug('adding permit')
self.add_permit(permit)
if deny:
self._debug('adding deny')
self.add_deny(deny)
clist=[]
for k in buddylist.keys():
self.add_buddy(buddylist[k])
for name in buddylist[k]:
self.getPerson(name).setStatusAndIdle(OFFLINE, '--')
self.signon()
name = None
def tocNICK(self,data):
if not self.name:
print 'Waiting for second NICK', data
self.name=data[0]
self.accountName = '%s (TOC)' % self.name
self.chat.getContactsList()
else:
print 'reregistering...?', data
self.name=data[0]
# self.accountName = "%s (TOC)"%data[0]
if self._logonDeferred is not None:
self._logonDeferred.callback(self)
### Error Messages
def hearError(self, code, args):
print '*** TOC ERROR ***', repr(code), repr(args)
def hearWarning(self, newamount, username):
print '*** TOC WARNING ***', repr(newamount), repr(username)
### Buddy Messages
def hearMessage(self,username,message,autoreply):
if autoreply:
message='<AUTO-REPLY>: '+message
self.chat.getConversation(self.getPerson(username)
).showMessage(dehtml(message))
def updateBuddy(self,username,online,evilness,signontime,idletime,userclass,away):
if away:
status=AWAY
elif online:
status=ONLINE
else:
status=OFFLINE
self.getPerson(username).setStatusAndIdle(status, idletime)
### Group Chat
def chatJoined(self, roomid, roomname, users):
self.roo
|
mID[roomname]=roomid
self.roomIDreverse[roomid]=roomname
self.getGroupConversation(roomname
|
).setGroupMembers(users)
def chatUpdate(self,roomid,member,inroom):
group=self.roomIDreverse[roomid]
if inroom:
self.getGroupConversation(group).memberJoined(member)
else:
self.getGroupConversation(group).memberLeft(member)
def chatHearMessage(self, roomid, username, message):
if toc.normalize(username) == toc.normalize(self.name):
return # ignore the message
group=self.roomIDreverse[roomid]
self.getGroupConversation(group).showGroupMessage(username, dehtml(message))
def chatHearWhisper(self, roomid, username, message):
print '*** user whispered *** ', roomid, username, message
def chatInvited(self, roomid, roomname, username, message):
print '*** user invited us to chat *** ',roomid, roomname, username, message
def chatLeft(self, roomid):
group=self.roomIDreverse[roomid]
self.getGroupConversation(group,1)
del self.roomID[group]
del self.roomIDreverse[roomid]
def rvousProposal(self,type,cookie,user,vip,port,**kw):
print '*** rendezvous. ***', type, cookie, user, vip, port, kw
def receiveBytes(self, user, file, chunk, sofar, total):
print '*** File transfer! ***', user, file, chunk, sofar, total
def joinGroup(self,name):
self.chat_join(4,toc.normalize(name))
class TOCAccount(basesupport.AbstractAccount):
__implements__ = (interfaces.IAccount,)
|
lgh8820/ansible-test
|
yjy_all_scripts/clear_arch_tables.py
|
Python
|
mit
| 630
| 0.036508
|
#!/usr/bin/env python
# -*- coding=utf8 -*-
"""clear the mysql database yjy_archtecture tables in aa.txt"""
import MySQLdb as mdb
db_conn = mdb.connect("localhost","root","HEkgDDZZ","yjy_human")
cursor = d
|
b_conn.cursor()
with open("/root/scripts/clear_human_sql.tables") as f:
|
tables = f.readlines()
print tables
try:
for table in tables:
tb = table.strip()
print tb
sql = """TRUNCATE TABLE """+ tb
cursor.execute(sql)
data = cursor.fetchall()
print data
sql1 = """select * from """+ tb
cursor.execute(sql1)
data1 = cursor.fetchall()
print data1
except mdb.Error, e:
print e
db_conn.close()
|
JeremyRubin/bitcoin
|
test/functional/p2p_leak.py
|
Python
|
mit
| 7,392
| 0.001894
|
#!/usr/bin/env python3
# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
Before receiving a VERACK, a node should not send anything but VERSION/VERACK
and feature negotiation messages (WTXIDRELAY, SENDADDRV2).
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
import time
from test_framework.messages import (
msg_getaddr,
msg_ping,
msg_version,
)
from test_framework.p2p import (
P2PInterface,
P2P_SUBVERSION,
P2P_SERVICES,
P2P_VERSION_RELAY,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
)
PEER_TIMEOUT = 3
class LazyPeer(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
self.got_wtxidrelay = False
self.got_sendaddrv2 = False
def bad_message(self, message):
self.unexpected_msg = True
print("should not have received message: %s" % message.msgtype)
def on_open(self):
self.ever_connected = True
# Does not respond to "version" with "verack"
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def on_getaddr(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_feefilter(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
def on_wtxidrelay(self, message): self.got_wtxidrelay = True
def on_sendaddrv2(self, message): self.got_sendaddrv2 = True
# Peer that sends a version but not a verack.
class NoVerackIdlePeer(LazyPeer):
def __init__(self):
self.version_received = False
super().__init__()
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PVersionStore(P2PInterface):
version_received = None
def on_version(self, msg):
# Responds with an appropriate verack
super().on_version(msg)
self.version_received = msg
class P2PLeakTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[f"-peertimeout={PEER_TIMEOUT}"]]
def create_old_version(self, nversion):
old_version_msg = msg_version()
old_version_msg.nVersion = nversion
old_version_msg.strSubVer = P2P_SUBVERSION
old_version_msg.nServices = P2P_SERVICES
old_version_msg.relay = P2P_VERSION_RELAY
return old_version_msg
def run_test(self):
self.log.info('Check that the node doesn\'t send unexpected messages before handshake completion')
# Peer that never sends a version, nor any other messages. It shouldn't receive anything from the node.
no_version_idle_peer = self.nodes[0].add_p2p_connection(LazyPeer(), send_version=False, wait_for_verack=False)
# Peer that sends a version but not a verack.
no_verack_idle_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), wait_for_verack=False)
# Pre-wtxidRelay peer that sends a version but not a verack and does not support feature negotiation
# messages wh
|
ich start at nVersion == 70016
pre_wtxidrelay_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), send_version=False, wait_for_verack=False)
pre_wtxidrelay_peer.send_message(self.create_old_version(70015))
# Wait until the peer gets the verack in response to the version. Though, don't wait for the node to receive the
# verack, since the peer never sent one
no_verack_idle
|
_peer.wait_for_verack()
pre_wtxidrelay_peer.wait_for_verack()
no_version_idle_peer.wait_until(lambda: no_version_idle_peer.ever_connected)
no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received)
pre_wtxidrelay_peer.wait_until(lambda: pre_wtxidrelay_peer.version_received)
# Mine a block and make sure that it's not sent to the connected peers
self.nodes[0].generate(nblocks=1)
# Give the node enough time to possibly leak out a message
time.sleep(PEER_TIMEOUT + 2)
# Make sure only expected messages came in
assert not no_version_idle_peer.unexpected_msg
assert not no_version_idle_peer.got_wtxidrelay
assert not no_version_idle_peer.got_sendaddrv2
assert not no_verack_idle_peer.unexpected_msg
assert no_verack_idle_peer.got_wtxidrelay
assert no_verack_idle_peer.got_sendaddrv2
assert not pre_wtxidrelay_peer.unexpected_msg
assert not pre_wtxidrelay_peer.got_wtxidrelay
assert not pre_wtxidrelay_peer.got_sendaddrv2
# Expect peers to be disconnected due to timeout
assert not no_version_idle_peer.is_connected
assert not no_verack_idle_peer.is_connected
assert not pre_wtxidrelay_peer.is_connected
self.log.info('Check that the version message does not leak the local address of the node')
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
ver = p2p_version_store.version_received
# Check that received time is within one hour of now
assert_greater_than_or_equal(ver.nTime, time.time() - 3600)
assert_greater_than_or_equal(time.time() + 3600, ver.nTime)
assert_equal(ver.addrFrom.port, 0)
assert_equal(ver.addrFrom.ip, '0.0.0.0')
assert_equal(ver.nStartingHeight, 201)
assert_equal(ver.relay, 1)
self.log.info('Check that old peers are disconnected')
p2p_old_peer = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
with self.nodes[0].assert_debug_log(['peer=4 using obsolete version 31799; disconnecting']):
p2p_old_peer.send_message(self.create_old_version(31799))
p2p_old_peer.wait_for_disconnect()
if __name__ == '__main__':
P2PLeakTest().main()
|
johnkerl/scripts-math
|
pythonlib/randmatc_m.py
|
Python
|
bsd-2-clause
| 1,088
| 0.02114
|
#!/usr/bin/python -Wall
# ================================================================
# Joh
|
n Kerl
# kerl.john.r@gmail.com
# 2008-02-06
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
imp
|
ort sys
import randc_m # For random complex scalars
import math
from sackmatc_m import *
# ----------------------------------------------------------------
def randmatc(m, n):
A = make_zero_matrix(m, n)
for i in range(0, m):
for j in range(0, n):
A[i][j] = randc_m.randc_mean_sq_1()
return A
# ----------------------------------------------------------------
def randsqmatc(n):
return randmatc(n, n)
# ----------------------------------------------------------------
def randgue(n):
A = make_zero_matrix(n, n)
for i in range(0, n):
for j in range(i, n):
A[i][j] = randc_m.randc_mean_sq_1()
for i in range(0, n):
for j in range(0, i):
A[i][j] = conj(A[j][i])
return A
# ----------------------------------------------------------------
def randxxt(n):
A = randmatc(n, n)
return A.transpose() * A
|
frostblooded/kanq
|
api/models/medal.py
|
Python
|
mit
| 373
| 0.002681
|
from django.db import models
from .user import User
from .post import Post
class Medal(models.Model):
rank = models.Int
|
egerField()
user = models.ForeignKey(User, relat
|
ed_name='medals')
post = models.OneToOneField(Post, on_delete=models.CASCADE, related_name='medal')
def __str__(self):
return "%s %s" % (self.post.title, self.rank)
|
diego-d5000/MisValesMd
|
env/lib/python2.7/site-packages/django/core/management/commands/makemessages.py
|
Python
|
mit
| 22,994
| 0.003305
|
from __future__ import unicode_literals
import fnmatch
import glob
import io
import os
import re
import sys
from itertools import dropwhile
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, handle_extensions, popen_wrapper,
)
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_str
from django.utils.functional import cached_property, total_ordering
from django.utils.jslex import prepare_js_for_gettext
from django.utils.text import get_text_list
plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
STATUS_OK = 0
def check_programs(*programs):
for program in programs:
if find_command(program) is None:
raise CommandError("Can't find %s. Make sure you have GNU "
"gettext tools 0.15 or newer installed." % program)
def gettext_popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding="utf-8"):
"""
Makes sure text obtained from stdout of gettext utilities is Unicode.
"""
stdout, stderr, status_code = popen_wrapper(args, os_err_exc_type=os_err_exc_type)
if os.name == 'nt' and six.PY3 and stdout_encoding != DEFAULT_LOCALE_ENCODING:
# This looks weird because it's undoing what
# subprocess.Popen(universal_newlines=True).communicate()
# does when capturing PO files contents from stdout of gettext command
# line programs. No need to do anything on Python 2 because it's
# already a byte-string there (#23271).
stdout = stdout.encode(DEFAULT_LOCALE_ENCODING).decode(stdout_encoding)
if six.PY2:
stdout = stdout.decode(stdout_encoding)
return stdout, stderr, status_code
@total_ordering
class TranslatableFile(object):
def __init__(self, dirpath, file_name, locale_dir):
self.file = file_name
self.dirpath = dirpath
self.locale_dir = locale_dir
def __repr__(self):
return "<TranslatableFile: %s>" % os.sep.join([self.dirpath, self.file])
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
return self.path < other.path
@property
def path(self):
return os.path.join(self.dirpath, self.file)
def process(self, command, domain):
"""
Extract translatable literals from self.file for :param domain:,
creating or updating the POT file.
Uses the xgettext GNU gettext utility.
"""
from django.utils.translation import templatize
if command.verbosity > 1:
command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
file_ext = os.path.splitext(self.file)[1]
if domain == 'djangojs':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = command.gettext_version < (0, 18, 3)
if is_templatized:
with io.open(orig_file, 'r', encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
src_data = prepare_js_for_gettext(src_data)
work_file = os.path.join(self.dirpath, '%s.c' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(src_data)
args = [
'xgettext',
'-d', domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
elif domain == 'django':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = file_ext != '.py'
if is_templatized:
with io.open(orig_file, encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
content = templatize(src_data, orig_file[2:])
work_file = os.path.join(self.dirpath, '%s.py' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(content)
args = [
'xgettext',
'-d', domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
'--keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
'--keyword=npgettext_lazy:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
else:
return
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
if is_templatized:
os.unlink(work_file)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(self.file, errors))
elif command.verbosity > 0:
# Print warnings
command.stdout.write(errors)
if msgs:
# Write/append messages to pot file
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
if is_templatized:
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old = '#: ' + work_file
new = '#: ' + orig_file
else:
old = '#: ' + work_file[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
write_pot_file(potfile, msgs)
if is_templatized:
os.unlink(work_file)
def write_pot_file(potfile, msgs):
"""
Write the :param potfile: POT file with the :param msgs: contents,
previously making sure its format is valid.
"""
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
with io.open(potfile, 'a', encoding='utf-8') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = ("Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this
|
command with one of either the "
"--locale, --exclude or --all options.")
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-
|
code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default=[], dest='locale', action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.')
parser.add_argument('--exclude', '-x', default=[], dest='exclude', action='append',
help='Locales to exclude. Default is none. Can be used multiple times.')
parser.add_argument('--domain', '-d', default='django', des
|
beblount/Steer-Clear-Backend
|
tests/forms_tests.py
|
Python
|
mit
| 3,278
| 0.00061
|
from steerclear import app
from steerclear.forms import RideForm
from steerclear.models import *
import unittest, flask
"""
RideFormTestCase
----------------
Test class for the RideForm class
"""
class RideFormTestCase(unittest.TestCase):
"""
submit_form
-----------
helper method to submit a RideForm by faking
a request context. Returns True is the form
validated and False if not.
*payload* is a dictionary of name/value pairs
of the form data that is being submitted
"""
def submit_form(self, payload):
with app.test_request_context():
form = RideForm(data=payload)
return form.validate()
def setUp(self):
self.payload = {
u"num_pa
|
ssengers": 4,
u"start_latitude": 1.1,
u"start_l
|
ongitude": 2.2,
u"end_latitude": 3.3,
u"end_longitude": 4.4,
}
"""
test_ride_form_correct_submit
-----------------------------
Tests that a RideForm can be validated correctly
"""
def test_ride_form_correct_submit(self):
result = self.submit_form(self.payload)
self.assertTrue(result)
"""
test_data_required_fields
-------------------------
tests that a RideForm is not valid unless
all fields are included in the form data
"""
def test_data_required_fields(self):
payload = self.payload
for key in payload.keys():
bad_payload = payload.copy()
bad_payload.pop(key, None)
result = self.submit_form(bad_payload)
self.assertFalse(result)
"""
test_num_passengers_min_range
-----------------------------
Tests that a RideForm accepts the correct min
range value for the 'num_passengers' field
"""
def test_num_passengers_min_range(self):
payload = self.payload.copy()
payload[u'num_passengers'] = 1
result = self.submit_form(payload)
self.assertTrue(result)
"""
test_num_passengers_max_range
-----------------------------
Tests that a RideForm accepts the correct max
range value for the 'num_passengers' field
"""
def test_num_passengers_max_range(self):
payload = self.payload.copy()
payload[u'num_passengers'] = 8
result = self.submit_form(payload)
self.assertTrue(result)
"""
test_num_passengers_bad_range
-----------------------------
Tests that a RideForm does not accept values
for the 'num_passengers' field that are out of range
"""
def test_num_passengers_bad_range(self):
bad_payload = self.payload.copy()
bad_payload[u'num_passengers'] = 0
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = -1
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = -100
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = 9
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = 100
result = self.submit_form(bad_payload)
self.assertFalse(result)
|
dbiesecke/dbiesecke.github.io
|
repo/script.module.urlresolver/lib/urlresolver/plugins/bitporno.py
|
Python
|
mit
| 1,372
| 0.005831
|
'''
urlresolver XBMC Addon
Copyright (C) 2017
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of t
|
he License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implie
|
d warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from urlresolver.plugins.__generic_resolver__ import GenericResolver
class BitPornoResolver(GenericResolver):
#print "print UR BitPorno"
name = 'BitPorno'
domains = ['bitporno.com']
pattern = '(?://|\.)(bitporno\.com)/(?:\?v=|embed/)([a-zA-Z0-9]+)'
def get_url(self, host, media_id):
print "print UR BitPorno self, host, media_id", self,host, media_id
print "print return", self._default_get_url(host, media_id, template='http://{host}/?v={media_id}')
return self._default_get_url(host, media_id, template='http://{host}/?v={media_id}')
return "https://www.bitporno.com/?v=FM11XRJLMP"
@classmethod
def _is_enabled(cls):
return True
|
grodrigo/django_general
|
persons/urls.py
|
Python
|
gpl-3.0
| 427
| 0
|
from django.conf.urls import url
from . import views
app_name = 'persons'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^contact/$', views.contact, name='contact'),
url(r'^thanks/$', views.thanks, name='thanks'),
url(r'^upload/$',
|
views.upload_file, name='upload_file'),
url(r'^success/$', views.success, name='success'),
url(r'^uploadImage/$', views.uploadImage, name='
|
uploadImage'),
]
|
l5h5t7/ZeroNet
|
src/Test/TestFileRequest.py
|
Python
|
gpl-2.0
| 3,984
| 0.00251
|
import cStringIO as StringIO
import pytest
import time
from Connection import ConnectionServer
from Connection import Connection
from File import FileServer
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestFileRequest:
def testGetFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer("127.0.0.1", 1545)
connection = client.getConnection("127.0.0.1", 1544)
file_server.sites[site.address] = site
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert "sign" in response["body"]
# Invalid file
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
assert "File read error" in response["error"]
# Location over size
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024})
assert "File read error" in response["error"]
# Stream from parent dir
response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0})
assert "File read error" in response["error"]
# Invalid site
response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
connection.close()
client.stop()
def testStreamFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer("127.0.0.1", 1545)
connection = client.getConnection("127.0.0.1", 1544)
fil
|
e_server.sites[site.address] = site
buff = StringIO.StringIO()
response = connection.reque
|
st("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
assert "stream_bytes" in response
assert "sign" in buff.getvalue()
# Invalid file
buff = StringIO.StringIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
assert "File read error" in response["error"]
# Location over size
buff = StringIO.StringIO()
response = connection.request(
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
)
assert "File read error" in response["error"]
# Stream from parent dir
buff = StringIO.StringIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
assert "File read error" in response["error"]
connection.close()
client.stop()
def testPex(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer("127.0.0.1", 1545)
client.sites[site_temp.address] = site_temp
site_temp.connection_server = client
connection = client.getConnection("127.0.0.1", 1544)
# Add new fake peer to site
fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True)
# Add fake connection to it
fake_peer.connection = Connection(file_server, "1.2.3.4", 11337)
fake_peer.connection.last_recv_time = time.time()
assert fake_peer in site.getConnectablePeers()
# Add file_server as peer to client
peer_file_server = site_temp.addPeer("127.0.0.1", 1544)
assert "1.2.3.4:11337" not in site_temp.peers
assert peer_file_server.pex()
assert "1.2.3.4:11337" in site_temp.peers
connection.close()
client.stop()
|
PisiLinuxNew/gonullu
|
gonullu/__init__.py
|
Python
|
gpl-3.0
| 132
| 0
|
from gonullu.farm import
|
Farm
from gonullu.docker import Docker
from g
|
onullu.volunteer import Volunteer
from gonullu.log import Log
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.