repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
mseroczynski/platformio
|
tests/commands/test_lib.py
|
Python
|
mit
| 2,036
| 0
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from os import listdir
from os.path import isdir, isfile, join
import re
from platformio.commands.lib import cli
from platformio import util
def validate_libfold
|
er():
libs_path = util.get_lib_dir()
installed_libs = listdir(libs_path)
for lib in in
|
stalled_libs:
assert isdir(join(libs_path, lib))
assert isfile(join(libs_path, lib, ".library.json")) and isfile(
join(libs_path, lib, "library.json"))
def test_lib_search(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["search", "DHT22"])
validate_cliresult(result)
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
assert int(match.group(1)) > 2
result = clirunner.invoke(cli, ["search", "DHT22", "--platform=timsp430"])
validate_cliresult(result)
match = re.search(r"Found\s+(\d+)\slibraries:", result.output)
assert int(match.group(1)) == 1
def test_lib_install(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["install", "58", "115"])
validate_cliresult(result)
validate_libfolder()
def test_lib_list(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["list"])
validate_cliresult(result)
assert "58" in result.output and "115" in result.output
def test_lib_show(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["show", "115"])
validate_cliresult(result)
assert "arduino" in result.output and "atmelavr" in result.output
result = clirunner.invoke(cli, ["show", "58"])
validate_cliresult(result)
assert "energia" in result.output and "timsp430" in result.output
def test_lib_update(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["update"])
validate_cliresult(result)
assert "58" in result.output and "115" in result.output
def test_lib_uninstall(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["uninstall", "58", "115"])
validate_cliresult(result)
|
DinoTools/dionaea
|
modules/python/dionaea/__init__.py
|
Python
|
gpl-2.0
| 6,176
| 0.001457
|
# This file is part of the dionaea honeypot
#
# SPDX-FileCopyrightText: 2009 Markus Koetter
# SPDX-FileCopyrightText: 2016-2020 PhiBo (DinoTools)
#
# SPDX-License-Identifier: GPL-2.0-or-later
import glob
import logging
import pkgutil
import traceback
from threading import Event, Thread
from typing import Callable, Optional
import yaml
logger = logging.getLogger('dionaea')
logger.setLevel(logging.DEBUG)
loaded_submodules = []
class RegisterClasses(type):
def __init__(self, name, bases, nmspc):
super(RegisterClasses, self).__init__(name, bases, nmspc)
if not hasattr(self, 'registry'):
self.registry = set()
self.registry.add(self)
self.registry -= set(bases)
def __iter__(self):
return iter(self.registry)
class ServiceLoader(object, metaclass=RegisterClasses):
@classmethod
def start(cls, addr, iface=None):
raise NotImplementedError("do it")
@classmethod
def stop(cls, daemon):
daemon.close()
class IHandlerLoader(object, metaclass=RegisterClasses):
@classmethod
def start(cls):
raise NotImplementedError("do it")
@classmethod
def stop(cls, ihandler):
ihandler.stop()
class SubTimer(Thread):
"""
Our own Timer class because some attributes we have to user are undocumented in the Python stub files.
:param interval: Wait interval sconds until the callback is called.
:param function: The callback function.
:param delay: Time in seconds before the callback is called for the first time. It not set interval is used.
:param repeat: Call function every $interval seconds.
:param args: Optional arguments passed to the callback function.
:param kwargs: Opional arguments passed to the callback function.
"""
def __init__(self, interval: float, function: Callable, delay: Optional[float] = None, repeat=False,
args: Optional[list] = None, kwargs: Optional[dict] = None):
Thread.__init__(self)
self.interval = interval
self.function = function
self.delay = delay
if self.delay is None:
self.delay = self.interval
self.repeat = repeat
self.args = args if args is not None else []
self.kwargs = kwargs if kwargs is not None else {}
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet."""
self.finished.set()
def run(self) -> None:
self.finished.wait(self.delay)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
while self.repeat and not self.finished.wait(self.interval):
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
class Timer(object):
"""
Extend Timer with additional functions like cancel and reset it. It uses the SubTimer() internally.
:param interval: Wait interval sconds until the callback is called.
:param function: The callback function.
:param delay: Time in seconds before the callback is called for the first time. It not set interval is used.
:param repeat: Call function every $interval seconds.
:param args: Optional arguments passed to the callback function.
:param kwargs: Opional arguments passed to the callback function.
"""
def __init__(self, interval: float, function: Callable, delay: Optional[float] = None, repeat=False,
args: Optional[list] = None, kwargs: Optional[dict] = None):
self.interval = interval
self.function = function
self.delay = delay
if
|
self.delay is None:
self.delay = self.interval
self.repeat = repeat
self.args = args if args is not None else []
self.kwargs = kwargs if kwargs is not None else {}
self._timer: Optional[SubTimer] = None
def start(self) -> None:
"""Start the Timer"""
self._timer = SubTimer(
interval=self.interval,
function=self.function,
delay=self.delay,
repeat=self.repeat,
|
args=self.args,
kwargs=self.kwargs,
)
self._timer.start()
def cancel(self) -> None:
"""Cancel the Timer"""
if self._timer:
self._timer.cancel()
def reset(self) -> None:
"""Restart the Timer"""
self.cancel()
self.start()
def load_submodules(base_pkg=None):
if base_pkg is None:
import dionaea as base_pkg
prefix = base_pkg.__name__ + "."
for importer, modname, ispkg in pkgutil.iter_modules(base_pkg.__path__, prefix):
if modname in loaded_submodules:
continue
logger.info("Import module %s", modname)
try:
__import__(modname, fromlist="dummy")
except Exception as e:
logger.warning("Error loading module: {}".format(str(e)))
for msg in traceback.format_exc().split("\n"):
logger.warning(msg.rstrip())
loaded_submodules.append(modname)
def load_config_from_files(filename_patterns):
configs = []
for filename_pattern in filename_patterns:
for filename in glob.glob(filename_pattern):
fp = open(filename)
try:
file_configs = yaml.safe_load(fp)
except yaml.YAMLError as e:
if hasattr(e, 'problem_mark'):
mark = e.problem_mark
logger.error(
"Error while parsing config file '%s' at line: %d column: %d message: '%s'",
filename,
mark.line + 1,
mark.column + 1,
e.problem
)
if e.context is not None:
logger.debug("Parser(context): %s" % e.context)
else:
logger.error("Unknown error while parsing config file '%s'", filename)
# Skip processing
continue
if isinstance(file_configs, (tuple, list)):
configs += file_configs
return configs
|
njsmith/partiwm
|
xpra/scripts/main.py
|
Python
|
gpl-2.0
| 12,235
| 0.002452
|
# This file is part of Parti.
# Copyright (C) 2008 Nathaniel Smith <njs@pobox.com>
# Parti is released under the terms of the GNU GPL v2, or, at your option, any
# later version. See the file COPYING for details.
import gobject
import sys
import os
import socket
import time
from optparse import OptionParser
import logging
from subprocess import Popen, PIPE
import xpra
from xpra.bencode import bencode
from xpra.dotxpra import DotXpra
from xpra.platform import (XPRA_LOCAL_SERVERS_SUPPORTED,
DEFAULT_SSH_CMD,
GOT_PASSWORD_PROMPT_SUGGESTION)
from xpra.protocol import TwoFileConnection, SocketConnection
def nox():
if "DISPLAY" in os.environ:
del os.environ["DISPLAY"]
# This is an error on Fedora/RH, so make it an error everywhere so it will
# be noticed:
import warnings
warnings.filterwarnings("error", "could not open display")
def main(script_file, cmdline):
#################################################################
## NOTE NOTE NOTE
##
## If you modify anything here, then remember to update the man page
## (xpra.1) as well!
##
## NOTE NOTE NOTE
#################################################################
if XPRA_LOCAL_SERVERS_SUPPORTED:
|
start_str = "\t%prog start DISPLAY\n"
list_str = "\t%prog list\n"
upgrade_str = "\t%prog upgrade DISPLAY"
note_str = ""
else:
start_str = ""
list_str = ""
upgrade_str = ""
note_str = "(This xpra install does not support starting local servers.)"
parser = OptionParser(version="xpra v%s" % xpra.__version__,
usage="".join(["\n",
start
|
_str,
"\t%prog attach [DISPLAY]\n",
"\t%prog stop [DISPLAY]\n",
list_str,
upgrade_str,
note_str]))
if XPRA_LOCAL_SERVERS_SUPPORTED:
parser.add_option("--start-child", action="append",
dest="children", metavar="CMD",
help="program to spawn in new server (may be repeated)")
parser.add_option("--exit-with-children", action="store_true",
dest="exit_with_children", default=False,
help="Terminate server when --start-child command(s) exit")
parser.add_option("--no-daemon", action="store_false",
dest="daemon", default=True,
help="Don't daemonize when running as a server")
parser.add_option("--xvfb", action="store",
dest="xvfb", default="Xvfb", metavar="CMD",
help="How to run the headless X server (default: '%default')")
parser.add_option("--bind-tcp", action="store",
dest="bind_tcp", default=None,
metavar="[HOST]:PORT",
help="Listen for connections over TCP (insecure)")
parser.add_option("-z", "--compress", action="store",
dest="compression_level", type="int", default=3,
metavar="LEVEL",
help="How hard to work on compressing data."
+ " 0 to disable compression,"
+ " 9 for maximal (slowest) compression. Default: %default.")
parser.add_option("--ssh", action="store",
dest="ssh", default=DEFAULT_SSH_CMD, metavar="CMD",
help="How to run ssh (default: '%default')")
parser.add_option("--remote-xpra", action="store",
dest="remote_xpra", default=".xpra/run-xpra",
metavar="CMD",
help="How to run xpra on the remote host (default: '%default')")
parser.add_option("-d", "--debug", action="store",
dest="debug", default=None, metavar="FILTER1,FILTER2,...",
help="List of categories to enable debugging for (or \"all\")")
(options, args) = parser.parse_args(cmdline[1:])
if not args:
parser.error("need a mode")
logging.root.setLevel(logging.INFO)
if options.debug is not None:
categories = options.debug.split(",")
for cat in categories:
if cat.startswith("-"):
logging.getLogger(cat[1:]).setLevel(logging.INFO)
if cat == "all":
logger = logging.root
else:
logger = logging.getLogger(cat)
logger.setLevel(logging.DEBUG)
logging.root.addHandler(logging.StreamHandler(sys.stderr))
mode = args.pop(0)
if mode in ("start", "upgrade") and XPRA_LOCAL_SERVERS_SUPPORTED:
nox()
from xpra.scripts.server import run_server
run_server(parser, options, mode, script_file, args)
elif mode == "attach":
try:
run_client(parser, options, args)
except KeyboardInterrupt:
sys.stdout.write("Exiting on keyboard interrupt\n")
elif mode == "stop":
nox()
run_stop(parser, options, args)
elif mode == "list" and XPRA_LOCAL_SERVERS_SUPPORTED:
run_list(parser, options, args)
elif mode == "_proxy" and XPRA_LOCAL_SERVERS_SUPPORTED:
nox()
run_proxy(parser, options, args)
else:
parser.error("invalid mode '%s'" % mode)
def parse_display_name(parser, opts, display_name):
if display_name.startswith("ssh:"):
desc = {
"type": "ssh",
"local": False
}
sshspec = display_name[len("ssh:"):]
if ":" in sshspec:
(desc["host"], desc["display"]) = sshspec.split(":", 1)
desc["display"] = ":" + desc["display"]
desc["display_as_args"] = [desc["display"]]
else:
desc["host"] = sshspec
desc["display"] = None
desc["display_as_args"] = []
desc["ssh"] = opts.ssh.split()
desc["full_ssh"] = desc["ssh"] + ["-T", desc["host"]]
desc["remote_xpra"] = opts.remote_xpra.split()
desc["full_remote_xpra"] = desc["full_ssh"] + desc["remote_xpra"]
return desc
elif display_name.startswith(":"):
desc = {
"type": "unix-domain",
"local": True,
"display": display_name,
}
return desc
elif display_name.startswith("tcp:"):
desc = {
"type": "tcp",
"local": False,
}
host_spec = display_name[4:]
(desc["host"], port_str) = host_spec.split(":", 1)
desc["port"] = int(port_str)
if desc["host"] == "":
desc["host"] = "127.0.0.1"
return desc
else:
parser.error("unknown format for display name")
def pick_display(parser, opts, extra_args):
if len(extra_args) == 0:
if not XPRA_LOCAL_SERVERS_SUPPORTED:
parser.error("need to specify a display")
# Pick a default server
sockdir = DotXpra()
servers = sockdir.sockets()
live_servers = [display
for (state, display) in servers
if state is DotXpra.LIVE]
if len(live_servers) == 0:
parser.error("cannot find a live server to connect to")
elif len(live_servers) == 1:
return parse_display_name(parser, opts, live_servers[0])
else:
parser.error("there are multiple servers running, please specify")
elif len(extra_args) == 1:
return parse_display_name(parser, opts, extra_args[0])
else:
parser.error("too many arguments")
def _socket_connect(sock, target):
try:
sock.connect(target)
except socket.error, e:
sys.exit("Connection failed: %s" % (e,))
return SocketConnection(sock)
def connect_or_fail(display_desc):
if display_desc["type"] == "ssh":
cmd = (display_desc["full_remote_xpra"]
+ ["_proxy"] + display_desc["display_as
|
pcingola/schemas
|
tools/sphinx/avpr2rest.py
|
Python
|
apache-2.0
| 5,874
| 0.016513
|
import sys
import json
import os
import re
import argparse
def get_file_locations():
parser = argparse.ArgumentParser()
parser.add_argument('input', help='Input AVPR filename(s)', nargs='+')
parser.add_argument('output', help='Output directory')
args = parser.parse_args()
return (args.input, args.output)
def typename(typeobject):
if isinstance(typeobject, list):
union_names = [typename(item) for item in typeobject]
return '|'.join(union_names)
elif isinstance(typeobject, dict):
if typeobject['type'] == 'array':
return 'array<%s>' % typename(typeobject['items'])
elif typeobject['type'] == 'map':
return 'map<%s>' % typename(typeobject['values'])
elif isinstance(typeobject, basestring):
return typeobject
raise ValueError
def cleanup_doc(doc,indent=0):
return '\n'.join([' '*indent + line for line in doc.split('\n')])
if __name__ == '__main__':
avpr_filenames, rest_directory = get_file_locations()
for avpr_filename in avpr_filenames:
base_filename = os.path.basename(avpr_filename)
name = os.path.splitext(base_filename)[0]
rest_filename = os.path.join(rest_directory, name+'.rst')
with open(avpr_filename,'r') as f:
data = json.load(f)
output = data['protocol'] + '\n'
output += '*' * len(data['protocol']) + '\n\n'
if 'doc' in data:
output += cleanup_doc(data['doc']) + '\n\n'
for message_name in data['messages']:
message_def = data['messages'][message_name]
doc = message_def['doc']
# process formal parameters ('request')
request = message_def['request']
# collect the names
param_names = []
for param in request:
param_names.append(param['name'])
response = message_def['response']
erro
|
rs = message_def['errors']
output += " .. function:: %s(%s)\n\n" % (message_name,
', '.join(param_names))
for param in request:
output += " :param %s: %s: %s\n" % (param['name'], param['type'],
param['doc'])
output += " :return type: %s\n" % response
|
output += " :throws: %s\n\n" % ', '.join(errors)
output += cleanup_doc(doc)
output += "\n\n"
for item in data['types']:
output += '.. avro:%s:: %s\n\n' % (item['type'], item['name'])
if item['type'] == 'record':
for field in item['fields']:
output += ' :field %s:\n' % field['name']
if 'doc' in field:
output += cleanup_doc(field['doc'],indent=4) + '\n'
output += ' :type %s: %s\n' % (field['name'], typename(field['type']))
output += '\n'
if item['type'] == 'enum':
output += ' :symbols: %s\n' % '|'.join(item['symbols'])
if item['type'] == 'fixed':
output += ' :size: %s\n' % item['size']
if 'doc' in item:
output += cleanup_doc(item['doc'],indent=2) + '\n\n'
with open(rest_filename,'w') as f:
f.write(output)
def get_file_locations():
parser = argparse.ArgumentParser()
parser.add_argument('input', help='Input AVPR filename(s)', nargs='+')
parser.add_argument('output', help='Output directory')
args = parser.parse_args()
return (args.input, args.output)
def typename(typeobject):
if isinstance(typeobject, list):
union_names = [typename(item) for item in typeobject]
return '|'.join(union_names)
elif isinstance(typeobject, dict):
if typeobject['type'] == 'array':
return 'array<%s>' % typename(typeobject['items'])
elif typeobject['type'] == 'map':
return 'map<%s>' % typename(typeobject['values'])
elif isinstance(typeobject, basestring):
return typeobject
raise ValueError
if __name__ == '__main__':
avpr_filenames, rest_directory = get_file_locations()
for avpr_filename in avpr_filenames:
base_filename = os.path.basename(avpr_filename)
name = os.path.splitext(base_filename)[0]
rest_filename = os.path.join(rest_directory, name+'.rst')
with open(avpr_filename,'r') as f:
data = json.load(f)
output = data['protocol'] + '\n'
output += '*' * len(data['protocol']) + '\n\n'
if 'doc' in data:
output += cleanup_doc(data['doc']) + '\n\n'
for message_name in data['messages']:
message_def = data['messages'][message_name]
doc = message_def['doc']
# process formal parameters ('request')
request = message_def['request']
# collect the names
param_names = []
for param in request:
param_names.append(param['name'])
response = message_def['response']
errors = message_def['errors']
output += " .. function:: %s(%s)\n\n" % (message_name,
', '.join(param_names))
for param in request:
output += " :param %s: %s: %s\n" % (param['name'], param['type'],
param['doc'])
output += " :return type: %s\n" % response
output += " :throws: %s\n\n" % ', '.join(errors)
output += cleanup_doc(doc)
output += "\n\n"
for item in data['types']:
output += '.. avro:%s:: %s\n\n' % (item['type'], item['name'])
if item['type'] == 'record':
for field in item['fields']:
output += ' :field %s:\n' % field['name']
if 'doc' in field:
output += cleanup_doc(field['doc'],indent=4) + '\n'
output += ' :type %s: %s\n' % (field['name'], typename(field['type']))
output += '\n'
if item['type'] == 'enum':
output += ' :symbols: %s\n' % '|'.join(item['symbols'])
if item['type'] == 'fixed':
output += ' :size: %s\n' % item['size']
if 'doc' in item:
output += cleanup_doc(item['doc'],indent=2) + '\n\n'
with open(rest_filename,'w') as f:
f.write(output)
|
Gazing/Frawt
|
django/frawt/api/urls.py
|
Python
|
mit
| 253
| 0.003953
|
from django.conf.urls import url
from . import views
urlpatterns = [
|
url(r'^$', views.index, name='api_index'),
url(r'^time$', views.get_server_time, name='api_time'),
url(r'^rooms/available', views.find_available, name='api_a
|
vailable'),
]
|
TrentFranks/ssNMR-Topspin-Python
|
LoadExp.py
|
Python
|
mit
| 1,871
| 0.02031
|
"""
Load appropriate Pulse Program and acquisition parameters
Arguments:
-1D: load nD experiment as a 1D
-2D: load nD experiment as a 2D (unless 1D experiment)
-3D: load nD experiment as a 3D (unless 1D, or 2D then highest)
-CC, hCC: load a 2D CC experiment (default to DARR)
More to come when it starts working
W.T. Franks FMP Berlin
"""
import math
import sys
import os
from sys import argv
sys.path.append(root.UtilPath.getTopspinHome()+ '/exp/stan/nmr/py/BioPY/modules/')
import LoadExp as Load
import TS_Version as Ver
WdB="W"
if Ver.get()[1] == "2": WdB="dB"
cmds=argv
# JAVA GUI
Load.CCpanel()
"""
# define a frame with buttons
button1 = JButton('FT', actionPerformed = execute_ft)
button2 = JButton('TD', actionPerformed = get_td)
frame = JFrame('TopSpin / Python GUI Example') # create window with title
frame.setSize(200, 100) # set window size x, y
frame.setLayout(FlowLayout()) # layout manager for horizontal alignment
frame.add(button1)
frame.add(button2)
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
frame.setVisible(True) SELECT("Holy Buttons Batman","Stuff",\
["Continue", "Finished","Button1","Bu
|
tton2"]) == 1
# Variables to track merged elements
Hhp, Chp, Nhp, HDec, hC, hN, NCa, NCo, CH, hhC, Nh, CX = 0,0,0,0,0,0,0,0,0,0,0,0
MAS, Phases = 0,0
########################
# Read in preferences #
########################
i=2
if len(cmds) <= 2 : help()
if len(cmds) >= 2 :
for cmd in cmds[1:]:
if cmd.find('-1D') >=0 or cmd.find('-1d') >=0:
|
nD=1
if cmd.find('-2D') >=0 or cmd.find('-2d') >=0:
nD=2
if cmd.find('-3D') >=0 or cmd.find('-3d') >=0:
nD=3
if cmd.find('-ex') >=0 or cmd.find('-EXPNO') >=0 or cmd.find('-EX') >=0 :
expno=int(cmds[i])
SkipFileDialog=1
if cmd.find('-q') >=0 or cmd.find('-Q') >=0 or cmd.find('-qt') >=0 or cmd.find('-QT') >=0 :
quiet=1
i=i+1
"""
|
Johnzero/OE7
|
openerp/addons-fg/fg_account/report/period_check.py
|
Python
|
agpl-3.0
| 6,202
| 0.030795
|
# -*- coding: utf-8 -*-
import tools
from osv import fields, osv
class reconcile_item(osv.osv_memory):
_name = "fg_account.reconcile.item"
_columns = {
'ref_doc':fields.reference('单据', selection=[('fg_sale.order','销售订单'),('fg_account.bill','收款单')],
size=128, readonly=True),
'o_date': fields.date('单据日期', readonly=True),
'name':fields.char('单号', size=24),
'o_partner': fields.many2one('res.partner', '客户', readonly=True),
't':fields.char('项目', size=12, readonly=True),
'reconciled':fields.boolean('已对账', readonly=True),
'cleared':fields.boolean('已清账', readonly=True),
'amount': fields.float('金额', digits=(16,4), readonly=True),
'balance':fields.float('余额', digits=(16,4), readonly=True),
'note':fields.text('附注'),
}
_order = 'o_date asc'
def button_view(self, cr, uid, ids, context=None):
record = self.browse(cr, uid, ids)[0]
r = {
'type': 'ir.actions.act_window',
'name': '查看单据',
'view_mode': 'form',
'view_type': 'form',
'res_model': record.ref_doc._table_name,
'res_id': record.ref_doc.id,
'target': 'new',
'context': context,
}
#if record.ref_doc._table_name == 'fg_account.bill':
# r['res_id'] = record.id - 1000000000
#
#print r
return r
class period_check(osv.osv):
_name = "fg_account.period.check"
_auto = False
_rec_name = 'ref_doc'
_columns = {
'ref_doc':fields.reference('单据', selection=[('fg_sale.order','销售订单'),('fg_account.bill','收款单')],
size=128, readonly=True),
'o_date': fields.date('单据日期', readonly=True),
'name':fields.char('单号', size=24),
'o_partner': fields.many2one('res.partner', '客户', readonly=True),
't':fields.char('项目', size=12, readonly=True),
'reconciled':fields.boolean('已对账', readonly=True),
'cleared':fields.boolean('已清账', readonly=True),
'amount': fields.float('金额', digits=(16,4), readonly=True),
'due_date_from':fields.function(lambda *a,**k:{}, method=True, type='date',string="开始日期"),
'due_date_to':fields.function(lambda *a,**k:{}, method=True, type='date',string="结束日期"),
'note':fields.text('附注'),
}
_order = 'o_date asc'
def button_view(self, cr, uid, ids, context=None):
record = self.browse(cr, uid, ids)[0]
r = {
'type': 'ir.actions.act_window',
'name': '查看单据',
'view_mode': 'form',
'view_type': 'form',
'res_model': record.ref_doc._table_name,
|
'res_id': record.id,
'target': 'new',
'context': context,
}
if record.ref_doc._table_name == 'fg_account.bill':
r['res_id'] = record.id - 1000000000
return r
def button_clear(self, cr, uid, ids, context=None):
order_obj = s
|
elf.pool.get('fg_sale.order')
#this should all be order.
#check_record's id IS the id of order.
order_obj.write(cr, uid, ids, {'clear':True})
return True
def button_unclear(self, cr, uid, ids, context=None):
order_obj = self.pool.get('fg_sale.order')
#this should all be order.
#check_record's id IS the id of order.
order_obj.write(cr, uid, ids, {'clear':False})
return True
def init(self, cr):
tools.drop_view_if_exists(cr, 'fg_account_period_check')
cr.execute("""
create or replace view fg_account_period_check as (
(
SELECT
o."id" AS ID,
o.name as name,
'fg_sale.order,' || o."id" AS ref_doc,
o.date_order AS o_date,
o.partner_id AS o_partner,
'发货额' AS T,
o.reconciled AS reconciled,
SUM(line.subtotal_amount)AS amount,
o.note AS note,
o.clear as cleared
FROM
fg_sale_order_line line
JOIN fg_sale_order o ON o."id" = line.order_id
WHERE
o."state" = 'done'
AND NOT o.minus
GROUP BY
o. ID,
o."name",
o.date_confirm,
o.partner_id
)
UNION ALL
(
SELECT
o."id" AS ID,
o.name as name,
'fg_sale.order,' || o."id" AS ref_doc,
o.date_order AS o_date,
o.partner_id AS o_partner,
'退货' AS T,
o.reconciled AS reconciled,
SUM(line.subtotal_amount)AS amount,
o.note AS note,
o.clear as cleared
FROM
fg_sale_order_line line
JOIN fg_sale_order o ON o."id" = line.order_id
WHERE
o."state" = 'done'
AND o.minus
GROUP BY
o. ID,
o."name",
o.date_confirm,
o.partner_id
)
UNION ALL
(
SELECT
(bill."id"+ 1000000000) AS ID,
bill.name as name,
'fg_account.bill,' || bill."id" AS ref_doc,
bill.date_check AS o_date,
bill.partner_id AS o_parnter,
cate."name" AS T,
bill.reconciled AS reconciled,
(0-bill.amount) AS amount,
bill.note AS note,
False as cleared
FROM
fg_account_bill bill
JOIN fg_account_bill_category cate ON bill.category_id = cate. ID
WHERE
bill."state" IN('check', 'done')
)
ORDER BY id desc
)
""")
|
iafan/zing
|
pootle/runner.py
|
Python
|
gpl-3.0
| 11,729
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle con
|
tributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
import sys
from argparse import SUPPRESS, ArgumentParser
from django.conf import settings
from django.core import management
import syspath_override # no
|
qa
#: Length for the generated :setting:`SECRET_KEY`
KEY_LENGTH = 50
#: Default path for the settings file
DEFAULT_SETTINGS_PATH = '~/.zing/zing.conf'
#: Template that will be used to initialize settings from
SETTINGS_TEMPLATE_FILENAME = 'settings/90-local.conf.template'
# Python 2+3 support for input()
if sys.version_info[0] < 3:
input = raw_input
def add_help_to_parser(parser):
parser.add_help = True
parser.add_argument("-h", "--help",
action="help", default=SUPPRESS,
help="Show this help message and exit")
def init_settings(settings_filepath, template_filename,
db="sqlite", db_name="dbs/zing.db", db_user="",
db_password="", db_host="", db_port=""):
"""Initializes a sample settings file for new installations.
:param settings_filepath: The target file path where the initial settings
will be written to.
:param template_filename: Template file used to initialize settings from.
:param db: Database engine to use
(default=sqlite, choices=[mysql, postgresql]).
:param db_name: Database name (default: zingdb) or path to database file
if using sqlite (default: dbs/zing.db)
:param db_user: Name of the database user. Not used with sqlite.
:param db_password: Password for the database user. Not used with sqlite.
:param db_host: Database host. Defaults to localhost. Not used with sqlite.
:param db_port: Database port. Defaults to backend default. Not used with
sqlite.
"""
from base64 import b64encode
dirname = os.path.dirname(settings_filepath)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
if db == "sqlite":
db_name = "working_path('%s')" % (db_name or "dbs/zing.db")
db_user = db_password = db_host = db_port = "''"
else:
db_name = "'%s'" % (db_name or "zingdb")
db_user = "'%s'" % (db_user or "zing")
db_password = "'%s'" % db_password
db_host = "'%s'" % db_host
db_port = "'%s'" % db_port
db_module = {
'sqlite': 'sqlite3',
'mysql': 'mysql',
'postgresql': 'postgresql',
}[db]
context = {
"default_key": ("'%s'"
% b64encode(os.urandom(KEY_LENGTH)).decode("utf-8")),
"db_engine": "'django.db.backends.%s'" % db_module,
"db_name": db_name,
"db_user": db_user,
"db_password": db_password,
"db_host": db_host,
"db_port": db_port,
}
with open(settings_filepath, 'w') as settings:
with open(template_filename) as template:
settings.write(
(template.read().decode("utf8") % context).encode("utf8"))
def init_command(parser, settings_template, args):
"""Parse and run the `init` command
:param parser: `argparse.ArgumentParser` instance to use for parsing
:param settings_template: Template file for initializing settings from.
:param args: Arguments to call init command with.
"""
src_dir = os.path.abspath(os.path.dirname(__file__))
add_help_to_parser(parser)
parser.add_argument("--db",
default="sqlite",
choices=['sqlite', 'mysql', 'postgresql'],
help=(u"Use the specified database backend (default: "
u"%(default)s)."))
parser.add_argument("--db-name", default="",
help=(u"Database name (default: 'zingdb') or path "
u"to database file if using sqlite (default: "
u"'%s/dbs/zing.db')" % src_dir))
parser.add_argument("--db-user", default="",
help=(u"Name of the database user. Not used with "
u"sqlite."))
parser.add_argument("--db-host", default="",
help=(u"Database host. Defaults to localhost. Not "
u"used with sqlite."))
parser.add_argument("--db-port", default="",
help=(u"Database port. Defaults to backend default. "
u"Not used with sqlite."))
args, remainder_ = parser.parse_known_args(args)
config_path = os.path.expanduser(args.config)
if os.path.exists(config_path):
resp = None
if args.noinput:
resp = 'n'
else:
resp = input("File already exists at %r, overwrite? [Ny] "
% config_path).lower()
if resp not in ("y", "yes"):
print("File already exists, not overwriting.")
exit(2)
try:
init_settings(config_path, settings_template,
db=args.db, db_name=args.db_name, db_user=args.db_user,
db_host=args.db_host, db_port=args.db_port)
except (IOError, OSError) as e:
raise e.__class__('Unable to write default settings file to %r'
% config_path)
if args.db in ['mysql', 'postgresql']:
print("Configuration file created at %r. Your database password is "
"not currently set . You may want to update the database "
"settings now" % config_path)
else:
print("Configuration file created at %r" % config_path)
def set_sync_mode(noinput=False):
"""Sets ASYNC = False on all redis worker queues
"""
from .core.utils.redis_rq import rq_workers_are_running
if rq_workers_are_running():
redis_warning = ("\nYou currently have RQ workers running.\n\n"
"Running in synchronous mode may conflict with jobs "
"that are dispatched to your workers.\n\n"
"It is safer to stop any workers before using "
"synchronous commands.\n\n")
if noinput:
print("Warning: %s" % redis_warning)
else:
resp = input("%sDo you wish to proceed? [Ny] " % redis_warning)
if resp not in ("y", "yes"):
print("RQ workers running, not proceeding.")
exit(2)
# Update settings to set queues to ASYNC = False.
for q in settings.RQ_QUEUES.itervalues():
q['ASYNC'] = False
def configure_app(project, config_path, django_settings_module, runner_name):
"""Determines which settings file to use and sets environment variables
accordingly.
:param project: Project's name. Will be used to generate the settings
environment variable.
:param config_path: The path to the user's configuration file.
:param django_settings_module: The module that ``DJANGO_SETTINGS_MODULE``
will be set to.
:param runner_name: The name of the running script.
"""
settings_envvar = project.upper() + '_SETTINGS'
# Normalize path and expand ~ constructions
config_path = os.path.normpath(
os.path.abspath(os.path.expanduser(config_path),))
if not (os.path.exists(config_path) or
os.environ.get(settings_envvar, None)):
print(u"Configuration file does not exist at %r or "
u"%r environment variable has not been set.\n"
u"Use '%s init' to initialize the configuration file." %
(config_path, settings_envvar, runner_name))
sys.exit(2)
os.environ.setdefault(settings_envvar, config_path)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', django_settings_module)
def run_app(project, default_settings_path, settings_template,
django_settings_module):
"""Wrapper around django-admin.py.
:param project: Project's name.
:para
|
jrocketfingers/sanic
|
sanic/testing.py
|
Python
|
mit
| 3,419
| 0.000585
|
import traceback
from sanic.log import log
HOST = '127.0.0.1'
PORT = 42101
class SanicTestClient:
def __init__(self, app):
self.app = app
async def _local_request(self, method, uri, cookies=None, *args, **kwargs):
import aiohttp
if uri.startswith(('http:', 'https:', 'ftp:', 'ftps://' '//')):
url = uri
else:
url = 'http://{host}:{port}{uri}'.format(
host=HOST, port=PORT, uri=uri)
log.info(url)
conn = aiohttp.TCPConnector(verify_ssl=False)
async with aiohttp.ClientSession(
cookies=cookies, connector=conn) as session:
async with getattr(
session, method.lower())(url, *args, **kwargs) as response:
try:
response.text = await response.text()
except UnicodeDecodeError as e:
response.text = None
response.body = await response.read()
return response
def _sanic_endpoint_test(
self, method='get', uri='/', gather_request=True,
debug=False, server_kwargs={},
*request_args, **request_kwargs):
results = [None, None]
exceptions = []
if gather_request:
def _collect_request(request):
if results[0] is None:
results[0] = request
self.app.request_middleware.appendleft(_collect_request)
@self.app.listener('after_server_start')
async def _collect_response(sanic, loop):
try:
response = await self._local_request(
method, uri, *request_args,
**request_kwargs)
results[-1] = response
except Exception as e:
log.error(
'Exception:\n{}'.format(traceback.format_exc()))
exceptions.append(e)
self.app.stop()
self.app.run(host=HOST, debug=debug, port=PORT, **server_kwargs)
self.app.l
|
isteners['after_server_start'].pop()
if exceptions:
raise ValueError("Exception during request: {}".format(exceptions))
if gather_request:
try:
|
request, response = results
return request, response
except:
raise ValueError(
"Request and response object expected, got ({})".format(
results))
else:
try:
return results[-1]
except:
raise ValueError(
"Request object expected, got ({})".format(results))
def get(self, *args, **kwargs):
return self._sanic_endpoint_test('get', *args, **kwargs)
def post(self, *args, **kwargs):
return self._sanic_endpoint_test('post', *args, **kwargs)
def put(self, *args, **kwargs):
return self._sanic_endpoint_test('put', *args, **kwargs)
def delete(self, *args, **kwargs):
return self._sanic_endpoint_test('delete', *args, **kwargs)
def patch(self, *args, **kwargs):
return self._sanic_endpoint_test('patch', *args, **kwargs)
def options(self, *args, **kwargs):
return self._sanic_endpoint_test('options', *args, **kwargs)
def head(self, *args, **kwargs):
return self._sanic_endpoint_test('head', *args, **kwargs)
|
guschmue/tensorflow
|
tensorflow/python/ops/control_flow_grad.py
|
Python
|
apache-2.0
| 9,075
| 0.009477
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in control_flow_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import,undefined-variable
from tensorflow.python.ops.control_flow_ops import *
from tensorflow.python.ops.gen_control_flow_ops import *
# pylint: enable=wildcard-import
def _SwitchGrad(op, *grad):
"""Gradients for a Switch op is calculated using a Merge op.
If the switch is a loop switch, it will be visited twice. We create
the merge on the first visit, and update the other input of the merge
on the second visit. A next_iteration is also added on second visit.
"""
graph = ops.get_default_graph()
# pylint: disable=protected-access
op_ctxt = op._get_control_flow_context()
grad_ctxt = graph._get_control_flow_
|
context()
# pylint: enable=protected-access
if isinstance(op_ctxt, WhileContext):
merge_grad = grad_ctxt.grad_state.switch_map.get(op)
if merge_grad is not None:
# This is the second time this Switch is visited. It comes from
# the non-exit branch of the Switch, so update the
|
second input
# to the Merge.
# TODO(yuanbyu): Perform shape inference with this new input.
if grad[1] is not None:
# pylint: disable=protected-access
control_flow_ops._AddNextAndBackEdge(merge_grad, grad[1])
# pylint: enable=protected-access
return None, None
elif grad[0] is not None:
# This is the first time this Switch is visited. It comes from
# the Exit branch, which is grad[0]. grad[1] is empty at this point.
# Use grad[0] for both inputs to merge for now, but update the second
# input of merge when we see this Switch the second time.
merge_grad = merge([grad[0], grad[0]], name="b_switch")[0]
grad_ctxt.grad_state.switch_map[op] = merge_grad
return merge_grad, None
else:
# This is the first time this Switch is visited. It comes from the
# Identity branch. Such a Switch has `None` gradient for the Exit branch,
# meaning the output is not differentiable.
return None, None
elif isinstance(op_ctxt, CondContext):
zero_grad = grad[1 - op_ctxt.branch]
# At this point, we have created zero_grad guarded by the right switch.
# Unfortunately, we may still get None here for not trainable data types.
if zero_grad is None:
return None, None
return merge(grad, name="cond_grad")[0], None
else:
false_grad = switch(grad[0], op.inputs[1])[0]
true_grad = switch(grad[1], op.inputs[1])[1]
return merge([false_grad, true_grad])[0], None
ops.RegisterGradient("Switch")(_SwitchGrad)
ops.RegisterGradient("RefSwitch")(_SwitchGrad)
@ops.RegisterGradient("Merge")
def _MergeGrad(op, grad, _):
"""Gradients for a Merge op are calculated using a Switch op."""
input_op = op.inputs[0].op
graph = ops.get_default_graph()
# pylint: disable=protected-access
op_ctxt = control_flow_ops._GetOutputContext(input_op)
grad_ctxt = graph._get_control_flow_context()
# pylint: enable=protected-access
if isinstance(op_ctxt, WhileContext):
# pylint: disable=protected-access
return control_flow_ops._SwitchRefOrTensor(grad, grad_ctxt.pivot)
# pylint: enable=protected-access
elif isinstance(op_ctxt, CondContext):
pred = op_ctxt.pred
if grad_ctxt and grad_ctxt.grad_state:
# This Merge node is part of a cond within a loop.
# The backprop needs to have the value of this predicate for every
# iteration. So we must have its values accumulated in the forward, and
# use the accumulated values as the predicate for this backprop switch.
grad_state = grad_ctxt.grad_state
real_pred = grad_state.history_map.get(pred.name)
if real_pred is None:
# Remember the value of pred for every iteration.
grad_ctxt = grad_state.grad_context
grad_ctxt.Exit()
history_pred = grad_state.AddForwardAccumulator(pred)
grad_ctxt.Enter()
# Add the stack pop op. If pred.op is in a (outer) CondContext,
# the stack pop will be guarded with a switch.
real_pred = grad_state.AddBackpropAccumulatedValue(history_pred, pred)
grad_state.history_map[pred.name] = real_pred
pred = real_pred
# pylint: disable=protected-access
return control_flow_ops._SwitchRefOrTensor(grad, pred, name="cond_grad")
# pylint: enable=protected-access
else:
num_inputs = len(op.inputs)
cond = [math_ops.equal(op.outputs[1], i) for i in xrange(num_inputs)]
# pylint: disable=protected-access
return [control_flow_ops._SwitchRefOrTensor(grad, cond[i])[1]
for i in xrange(num_inputs)]
# pylint: enable=protected-access
@ops.RegisterGradient("RefMerge")
def _RefMergeGrad(op, grad, _):
return _MergeGrad(op, grad, _)
@ops.RegisterGradient("Exit")
def _ExitGrad(op, grad):
"""Gradients for an exit op are calculated using an Enter op."""
graph = ops.get_default_graph()
# pylint: disable=protected-access
grad_ctxt = graph._get_control_flow_context()
# pylint: enable=protected-access
if not grad_ctxt.back_prop:
# The flag `back_prop` is set by users to suppress gradient
# computation for this loop. If the attribute `back_prop` is false,
# no gradient computation.
return None
# pylint: disable=protected-access
if op._get_control_flow_context().grad_state:
raise TypeError("Second-order gradient for while loops not supported.")
# pylint: enable=protected-access
if isinstance(grad, ops.Tensor):
grad_ctxt.AddName(grad.name)
else:
if not isinstance(grad, (ops.IndexedSlices, sparse_tensor.SparseTensor)):
raise TypeError("Type %s not supported" % type(grad))
grad_ctxt.AddName(grad.values.name)
grad_ctxt.AddName(grad.indices.name)
dense_shape = grad.dense_shape
if dense_shape is not None:
grad_ctxt.AddName(dense_shape.name)
grad_ctxt.Enter()
# pylint: disable=protected-access
result = control_flow_ops._Enter(
grad, grad_ctxt.name, is_constant=False,
parallel_iterations=grad_ctxt.parallel_iterations,
name="b_exit")
# pylint: enable=protected-access
grad_ctxt.loop_enters.append(result)
grad_ctxt.Exit()
return result
ops.RegisterGradient("RefExit")(_ExitGrad)
@ops.RegisterGradient("NextIteration")
def _NextIterationGrad(_, grad):
"""A forward next_iteration is translated into a backprop identity.
Note that the backprop next_iteration is added in switch grad.
"""
return grad
@ops.RegisterGradient("RefNextIteration")
def _RefNextIterationGrad(_, grad):
return _NextIterationGrad(_, grad)
@ops.RegisterGradient("Enter")
def _EnterGrad(op, grad):
"""Gradients for an Enter are calculated using an Exit op.
For loop variables, grad is the gradient so just add an exit.
For loop invariants, we need to add an accumulator loop.
"""
graph = ops.get_default_graph()
# pylint: disable=protected-access
grad_ctxt = graph._get_control_flow_context()
# pylint: enable=protected-access
if not grad_ctxt.back_prop:
# Skip gradient computation, if the attribute `back_prop` is false.
return grad
if grad_ct
|
SCUT16K/SmsSender
|
server/config/production_sample.py
|
Python
|
apache-2.0
| 272
| 0.003676
|
# coding: utf-8
from .default import Config
c
|
lass ProductionConfig(Config):
# Site domain
SITE_DOMAI
|
N = "http://www.twtf.com"
# Db config
SQLALCHEMY_DATABASE_URI = "mysql+pymysql://dbuser:dbpass@localhost/databasename"
# Sentry
SENTRY_DSN = ''
|
smkr/pyclipse
|
plugins/org.python.pydev.jython/jysrc/pyedit_assign_params_to_attributes.py
|
Python
|
epl-1.0
| 3,468
| 0.012111
|
"""Assign Params to Attributes by Joel Hedlund <joel.hedlund at gmail.com>.
PyDev script for generating python code that assigns method parameter
values to attributes of self with the same name. Activates with 'a' by
default. Edit global constants ACTIVATION_STRING and WAIT_FOR_ENTER if this
does not suit your needs. See docs on the class AssignToAttribsOfSelf for
more details.
Contact the author for bug reports/feature requests.
Changed:Fabio Zadrozny (binded to Ctrl+1 too)
"""
__version__ = "1.0.1"
__copyright__ = """Available under the same conditions as PyDev.
See PyDev license for details.
http://pydev.sourceforge.net
"""
# Change this if the default does not suit your needs
ACTIVATION_STRING = 'a'
WAIT_FOR_ENTER = False
# For earlier Python versions
True, False = 1,0
# Set to True to force Jython script interpreter restart on save events.
# Useful for Jython PyDev script development, not useful otherwise.
DEBUG = False
# This is a magic trick that tells the PyDev Extensions editor about the
# namespace provided for pydev scripts:
if False:
from org.python.pydev.editor import PyEdit #@UnresolvedImport
cmd = 'command string'
editor = PyEdit
assert cmd is not None
assert editor is not None
if DEBUG and cmd == 'onSave':
from org.python.pydev.jython import JythonPlugin #@UnresolvedImport
editor.pyEditScripting.interpreter = JythonPlugin.newPythonInterpreter()
from org.eclipse.jface.action import Action #@UnresolvedImport
#=======================================================================================================================
# AssignToAttribsOfSelfAction
#=======================================================================================================================
class AssignToAttribsOfSelfAction(Action):
def __init__(self, assign_to_attribs_helper):
Action.__init__(self)
self.assign_to_attribs_helper = assign_to_attribs_helper
def run(self):
self.assign_to_attribs_helper.run()
#=======================================================================================================================
# Actually bind the actions
#=====================================
|
==========================================
|
========================================
if cmd == 'onCreateActions' or (DEBUG and cmd == 'onSave'):
from org.python.pydev.editor.correctionassist import PythonCorrectionProcessor #@UnresolvedImport
import assign_params_to_attributes_action as helper
import assign_params_to_attributes_assist
#---------------------------------------------------------------------------------------------- Bind it to Ctrl+2, a
sDescription = 'Assign method params to attribs of self'
assign_to_attribs_helper = helper.AssignToAttribsOfSelf(editor)
editor.addOfflineActionListener(
ACTIVATION_STRING, AssignToAttribsOfSelfAction(assign_to_attribs_helper), sDescription, WAIT_FOR_ENTER)
#------------------------------------------------------------------------------------------------- Bind it to Ctrl+1
ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST = 'ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST'
if not PythonCorrectionProcessor.hasAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST):
assist = assign_params_to_attributes_assist.AssistAssignParamsToAttributes()
PythonCorrectionProcessor.addAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST, assist)
|
gotthardp/rabbitmq-email
|
test/send.py
|
Python
|
mpl-2.0
| 327
| 0
|
#!/usr/bin/env python
import smtplib
from email.m
|
ime.text import MIMEText
me = "me@example.com"
you = "you@example.com"
msg = MIMEText("Hello world!")
msg['From'] = me
msg['To'] = you
msg['Subject'] = 'Gree
|
tings'
s = smtplib.SMTP('localhost', 2525)
s.login("guest", "guest")
s.sendmail(me, [you], msg.as_string())
s.quit()
|
hjanime/VisTrails
|
vistrails/db/versions/v0_3_1/persistence/__init__.py
|
Python
|
bsd-3-clause
| 2,007
| 0.013453
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF T
|
HE POSSIBILITY OF SUCH DAMAGE."
##
###############
|
################################################################
from __future__ import division
from vistrails.db.versions.v0_3_0.persistence import DAOList
|
lloy/Project
|
cdsagent/cdsagent/vdisk/disk.py
|
Python
|
apache-2.0
| 205
| 0
|
import logging
LOG
|
= log
|
ging.getLogger(__name__)
__author__ = 'Hardy.zheng'
class DiskPoller(object):
def __init__(self):
pass
def run(self):
LOG.info('DiskPoller start...')
|
zhantyzgz/polaris
|
plugins/voicerss.py
|
Python
|
gpl-2.0
| 2,555
| 0.00274
|
from core.utils import *
commands = [
('/voicerss', ['language', 'text'])
]
description = 'Generates an audio file using Voice RSS API.'
shortcut = '/vr '
langs = [
'af', 'aq', 'ar', 'hy', 'ca', 'zh', 'zh-cn', 'zh-tw', 'zh-yue',
'hr', 'cs', 'da', 'nl', 'en-au', 'en-uk', 'en-us', 'eo',
'fi', 'fr', 'de', 'el', 'ht', 'hu', 'is', 'id',
'it', 'ja', 'ko', 'la', 'lv', 'mk', 'no', 'pl',
'pt', 'pt-br', 'ro', 'ru', 'sr', 'sk', 'es', 'es-es',
'es-us', 'sw', 'sv', 'ta', 'th', 'tr', 'vi', 'cy'
]
def run(m):
input = get_input(m)
if not input:
return send_message(m, lang.errors.input)
for v in langs:
if first_word(input) == v:
lang = v
text = all_but_first_word(input)
break
else:
|
lang = 'en-us'
text = input
url = 'https://api.voicerss.org'
params = {
'key': config.keys.voicerss,
'src': text,
'hl': lang,
'r': '2',
'c': 'ogg',
'f': '16khz_16bit_stereo'
}
jstr = requests.get(url, params=params)
if jstr.status_code != 200:
send_alert('%s\n%s' % (lang.errors.connection, jstr.text))
return send_message(
|
m, lang.errors.connection)
voice = download(jstr.url, params=params)
if voice:
send_voice(m, voice)
else:
send_message(m, lang.errors.download)
def inline(m):
input = get_input(m)
for v in langs:
if first_word(input) == v:
lang = v
text = all_but_first_word(input)
break
else:
lang = 'en-us'
text = input
url = 'https://api.voicerss.org'
params = {
'key': config.keys.voicerss,
'src': text,
'hl': lang,
'r': '2',
'c': 'ogg',
'f': '16khz_16bit_stereo'
}
jstr = requests.get(url, params=params)
results = []
if jstr.status_code != 200:
result = {
'type': 'article',
'id': jstr.status_code,
'title': lang.errors.connection,
'input_message_content': '%s\n%s' % (lang.errors.connection, jstr.text),
'description': jstr.text
}
results.append(result)
return
result = {
'type': 'voice',
'id': m.id,
'voice_url': jstr.url,
'title': text
}
results.append(result)
answer_inline_query(m, results)
|
amosnier/python_for_kids
|
extra_code/03_loopy_turtle_01.py
|
Python
|
gpl-3.0
| 862
| 0.006961
|
import turtle
turtle.clearscreen()
t = turtle.Turtle()
#turtle.tracer(0, 0)
t.fillcolor(0.9, 0.9, 0.6)
t.begin_fill()
for i in range(0, 5):
t.forward(100)
t.right(144)
t.end_fill()
t.up()
t.ba
|
ckward(200)
t.down()
t.fillcolor(0.7, 0.95, 0.7)
t.begin_fill()
for i in range(0, 5):
t.forward(100)
t.left(72)
t.end_fill()
t.up()
t.right(90)
t.forward(200)
t.down()
t.fillcolor(0.7, 0.9, 0)
t.begin_fill()
for i in range(0, 6):
t.forward(100)
t.left(60)
t.end_fill()
t.up()
t.left(90)
t.forward(500)
t.down()
t.fillcolo
|
r(0.95, 0, 0.5)
t.begin_fill()
for i in range(100, 0, -1):
t.forward(i)
t.left(60)
t.end_fill()
t.up()
t.left(60)
t.backward(300)
t.down()
t.fillcolor(0.80, 0, 0.7)
t.begin_fill()
for i in range(60, 120):
t.forward(180 - i)
t.left(i)
t.end_fill()
turtle.update()
|
andersbll/ipcv
|
ipcv/misc/donuts.py
|
Python
|
mit
| 1,393
| 0.000718
|
import numpy as np
def donut(shape, radius, width, distribution='gaussian'):
'''Generate a 2D Gaussian window of the given shape. width specifies the
size of the Gaussian. radius specifies the distance to origo such that
the window becomes a ring.'''
if not distribution in ['gaussian', 'lognormal']:
raise ValueError('Invalid distribution function specified.')
h, w = shape
y = np.linspace(-h/2., h/2., h)
x = np.linspace(-w/2., w/2., w)
xv, yv = np.meshgrid(x, y)
if distribution == 'lognormal' and radius > 0:
mean = radius
var = width**2
mu = np.log(mean**2 / np.sqrt(var + mean**2))
sigma = np.sqrt(np.log(var/mean**2 + 1))
d = np.sqrt(xv**2+yv**2) + 1e-5
return 1/(d*sigma*np.sqrt(2*np.pi))*np.exp(-(np.log(d)-mu)**2
/ (2*sigma**2))
else:
sigma = width
mu = radius
return np.exp(-(np.sqrt(xv**2+yv**2) - mu)**2/(2*sigma**2))
def donuts(shape, n_donuts, radi
|
us_max, width_min, width_ratio=1.0,
distribution='gaussian'):
ra
|
dii = np.linspace(0, radius_max, n_donuts)
widths = [float(width_min)*width_ratio**i for i in range(n_donuts)]
weights = [donut(shape, r, w, distribution)
for (r, w) in zip(radii, widths)]
weights = [w/np.sum(w) for w in weights]
return weights
|
dylanseago/LeagueOfLadders
|
leagueofladders/apps/myleague/admin.py
|
Python
|
apache-2.0
| 489
| 0.002045
|
from django.contrib import admin
from leagueofladders.apps.myleague.models import Leagu
|
e, Membership
class MembershipInline(admin.TabularInline):
model = Membership
extra = 1
@admin.register(League)
class LeagueAdmin(admin.ModelAdmin):
fields = ('name', 'owner', 'is_public')
inlines = [MembershipInline]
list_display = ('name', 'owner', 'is_public', 'date_modified')
list_filter =
|
['date_modified', 'is_public']
search_fields = ['name', 'owner__username']
|
julianwachholz/praw
|
tests/test_decorators.py
|
Python
|
gpl-3.0
| 311
| 0
|
from __future__ import print_function, unicode_literals
import unittest
from praw.decorators import restrict_access
class DecoratorTest(unittest.
|
TestCase):
def test_require_access_failure(self):
self.assertRaises(TypeError, restrict_access, scope=None,
oau
|
th_only=True)
|
MusculoskeletalAtlasProject/mapclient-src
|
mapclient/tools/ui_pluginmanagerdialog.py
|
Python
|
gpl-3.0
| 5,669
| 0.003528
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'qt/pluginmanagerdialog.ui'
#
# Created: Wed Jan 28 16:54:28 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_PluginManagerDialog(object):
def setupUi(self, PluginManagerDialog):
PluginManagerDialog.setObjectName("PluginManagerDialog")
PluginManagerDialog.resize(567, 496)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/mapclient/images/icon-app.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
PluginManagerDialog.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(PluginManagerDialog)
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox = QtGui.QGroupBox(PluginManagerDialog)
self.groupBox.setAlignment(QtCore.Qt.AlignCenter)
self.groupBox.setFlat(False)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_4 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.label = QtGui.QLabel(self.groupBox)
self.label.setObjectName("label")
self.verticalLayout_4.addWidget(self.label)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.directoryListing = QtGui.QListWidget(self.groupBox)
self.directoryListing.setObjectName("directoryListing")
self.verticalLayout_3.addWidget(self.directoryListing)
self.defaultPluginCheckBox = QtGui.QCheckBox(self.groupBox)
self.defaultPluginCheckBox.setChecked(True)
self.defaultPluginCheckBox.setObjectName("defaultPluginCheckBox")
self.verticalLayout_3.addWidget(self.defaultPluginCheckBox)
self.horizontalLayout.addLayout(self.verticalLayout_3)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.addButton = QtGui.QPushButton(self.groupBox)
self.addButton.setObjectName("addButton")
self.verticalLayout_2.addWidget(self.addButton)
self.removeButton = QtGui.QPushButton(self.groupBox)
self.removeButton.setObjectName("removeButton")
self.verticalLayout_2.addWidget(self.removeButton)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem)
self.reloadButton = QtGui.QPushButton(self.groupBox)
self.reloadButton.setObjectName("reloadButton")
self.verticalLayout_2.addWidget(self.reloadButton)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.verticalLayout_4.addLayout(self.horizontalLayout)
self.verticalLayout.addWidget(self.groupBox)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.advancedButton = QtGui.QPushButton(PluginManagerDialog)
self.advancedButton.setMinimumSize(QtCore.QSize(90, 0))
self.advancedButton.setObjectName("advancedButton")
self.horizontalLayout_2.addWidget(self.advancedButton)
spacerItem1 = QtGui.QSpacerItem(80, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.buttonBox = QtGui.QDialogButtonBox(PluginManagerDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.horizontalLayout_2.addWidget(self.buttonBox)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.retranslateUi(PluginManagerDialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), PluginManagerDialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), PluginManagerDialog.reject)
QtCore.QMetaObject.connectSlotsByName(PluginManagerDialog)
def retranslateUi(self, PluginManagerDialog):
PluginManagerDialog.setWindowTitle(QtGui.QApplication.translate("PluginManagerDialog", "Plugin Manager", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("PluginManagerDialog", "Plugin Manager", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("PluginManagerDialog", "Plugin directories:", None, QtGui.QApplication.UnicodeUTF8))
self.defaultPluginCheckBox.setText(QtGui.QApplication.translate("PluginManagerDialog", "Use default plugin directory", None, QtGui.QApplication.UnicodeUTF8))
self.addButton.setText(QtGui.QApplication.translate("PluginManagerDialog", "Add Directory", None, QtGui.QApplication.UnicodeUTF8))
self.removeButton.setText(QtGui.QApplication.translate("PluginManagerDialog", "Remove Directory", None, QtGui.QApplication.UnicodeUTF8))
self.re
|
loadButton.setToolTip(QtGui.QApplication.translate("PluginManagerDialog", "Reload the plugins from the current plugin directories", None, QtGui.QApplication.UnicodeUTF8))
self.reloadButton.setText(QtGui.QApplication.translate("PluginManagerDialog", "Reload", None, QtGui.QApplication.UnicodeUTF8))
self.advancedButton.setText(QtGui.QApplication.translate("Pl
|
uginManagerDialog", "Advanced...", None, QtGui.QApplication.UnicodeUTF8))
from . import resources_rc
|
aslab/rct
|
higgs/branches/ros-groovy/higgs_gazebo_simulation/rqt_robot_plugins/rqt_pose_view/setup.py
|
Python
|
gpl-3.0
| 222
| 0
|
#!/usr/bin/env python
from distutils.core import setup
fr
|
om catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutil
|
s_setup(
packages=['rqt_pose_view'],
package_dir={'': 'src'}
)
setup(**d)
|
ncliam/serverpos
|
openerp/addons/mail/tests/test_mail_features.py
|
Python
|
agpl-3.0
| 59,265
| 0.006109
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from ..mail_mail import mail_mail
from ..mail_thread import mail_thread
from .common import TestMail
from openerp.tools import mute_logger, email_split, html2plaintext
from openerp.tools.mail import html_sanitize
class test_mail(TestMail):
def test_000_alias_setup(self):
""" Test basic mail.alias setup works, before trying to use them for routing """
cr, uid = self.cr, self.uid
self.user_valentin_id = self.res_users.create(cr, uid,
{'name': 'Valentin Cognito', 'email': 'valentin.cognito@gmail.com', 'login': 'valentin.cognito', 'alias_name': 'valentin.cognito'})
self.user_valentin = self.res_users.browse(cr, uid, self.user_valentin_id)
self.assertEquals(self.user_valentin.alias_name, self.user_valentin.login, "Login should be used as alias")
self.user_pagan_id = self.res_users.create(cr, uid,
{'name': 'Pagan Le Marchant', 'email': 'plmarchant@gmail.com', 'login': 'plmarchant@gmail.com', 'alias_name': 'plmarchant@gmail.com'})
self.user_pagan = self.res_users.browse(cr, uid, self.user_pagan_id)
self.assertEquals(self.user_pagan.alias_name, 'plmarchant', "If login is an email, the alias should keep only the local part")
self.user_barty_id = self.res_users.create(cr, uid,
{'name': 'Bartholomew Ironside', 'email': 'barty@gmail.com', 'login': 'b4r+_#_R3wl$$', 'alias_name': 'b4r+_#_R3wl$$'})
self.user_barty = self.res_users.browse(cr, uid, self.user_barty_id)
self.assertEquals(self.user_barty.alias_name, 'b4r+_-_r3wl-', 'Disallowed chars should be replaced by hyphens')
def test_00_followers_function_field(self):
""" Tests designed for the many2
|
many function field 'follower_ids'.
We will test to perform writes using the many2many commands 0, 3, 4,
5 and 6. """
cr, uid, user_admin, part
|
ner_bert_id, group_pigs = self.cr, self.uid, self.user_admin, self.partner_bert_id, self.group_pigs
# Data: create 'disturbing' values in mail.followers: same res_id, other res_model; same res_model, other res_id
group_dummy_id = self.mail_group.create(cr, uid,
{'name': 'Dummy group'}, {'mail_create_nolog': True})
self.mail_followers.create(cr, uid,
{'res_model': 'mail.thread', 'res_id': self.group_pigs_id, 'partner_id': partner_bert_id})
self.mail_followers.create(cr, uid,
{'res_model': 'mail.group', 'res_id': group_dummy_id, 'partner_id': partner_bert_id})
# Pigs just created: should be only Admin as follower
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan')
# Subscribe Bert through a '4' command
group_pigs.write({'message_follower_ids': [(4, partner_bert_id)]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the only Pigs fans')
# Unsubscribe Bert through a '3' command
group_pigs.write({'message_follower_ids': [(3, partner_bert_id)]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan')
# Set followers through a '6' command
group_pigs.write({'message_follower_ids': [(6, 0, [partner_bert_id])]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the only Pigs fan')
# Add a follower created on the fly through a '0' command
group_pigs.write({'message_follower_ids': [(0, 0, {'name': 'Patrick Fiori'})]})
partner_patrick_id = self.res_partner.search(cr, uid, [('name', '=', 'Patrick Fiori')])[0]
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertEqual(follower_ids, set([partner_bert_id, partner_patrick_id]), 'Bert and Patrick should be the only Pigs fans')
# Finally, unlink through a '5' command
group_pigs.write({'message_follower_ids': [(5, 0)]})
group_pigs.refresh()
follower_ids = set([follower.id for follower in group_pigs.message_follower_ids])
self.assertFalse(follower_ids, 'Pigs group should not have fans anymore')
# Test dummy data has not been altered
fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.thread'), ('res_id', '=', self.group_pigs_id)])
follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)])
self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the follower of dummy mail.thread data')
fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', group_dummy_id)])
follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)])
self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the followers of dummy mail.group data')
def test_05_message_followers_and_subtypes(self):
""" Tests designed for the subscriber API as well as message subtypes """
cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs
# Data: message subtypes
self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_def', 'default': True, 'res_model': 'mail.group'})
self.mail_message_subtype.create(cr, uid, {'name': 'mt_other_def', 'default': True, 'res_model': 'crm.lead'})
self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_def', 'default': True, 'res_model': False})
mt_mg_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_nodef', 'default': False, 'res_model': 'mail.group'})
mt_all_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_nodef', 'default': False, 'res_model': False})
default_group_subtypes = self.mail_message_subtype.search(cr, uid, [('default', '=', True), '|', ('res_model', '=', 'mail.group'), ('res_model', '=', False)])
# ----------------------------------------
# CASE1: test subscriptions with subtypes
# ----------------------------------------
# Do: subscribe Raoul, should have default subtypes
group_pigs.message_subscribe_users([user_raoul.id])
group_pigs.refresh()
# Test: 2 followers (Admin and Raoul)
follower_ids = [follower.id for follower in group_pigs.message_follower_ids]
self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]),
'message_subscribe: Admin and Raoul should be the only 2 Pigs fans')
|
vesellov/bitdust.devel
|
lib/fastjsonrpc/client.py
|
Python
|
agpl-3.0
| 12,736
| 0.000236
|
#!/usr/bin/env python
"""
Copyright 2012 Tadeas Moravec.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============
JSONRPC Client
==============
Provides a Proxy class, that can be used for calling remote functions via
JSON-RPC.
"""
from __future__ import absolute_import
import base64
# from zope.interface import implements
from zope.interface import implementer
from twisted.internet.defer import succeed
from twisted.web.iweb import IBodyProducer
from twisted.cred.credentials import Anonymous, UsernamePassword
from twisted.internet import reactor
from twisted.internet.protocol import Protocol
from twisted.internet.defer import Deferred
from twisted.web.client import (Agent, ContentDecoderAgent, GzipDecoder,
HTTPConnectionPool)
from twisted.web.http_headers import Headers
from . import jsonrpc
class ReceiverProtocol(Protocol):
"""
Protocol for receiving the server response.
It's only purpose is to get the HTTP request body. Instance of this
will be passed to the Response's deliverBody method.
"""
def __init__(self, finished):
"""
@type finished: t.i.d.Deferred
@param finished: Deferred to be called when we've got all the data.
"""
self.body = b''
self.finished = finished
def dataReceived(self, data):
"""
Appends data to the internal buffer.
@type data: str (bytearray, buffer?)
@param data: Data from server. 'Should' be (a part of) JSON
"""
self.body += data
def connectionLost(self, reason):
"""
|
Fires the finished's callback with data we've receive
|
d.
@type reason: t.p.f.Failure
@param reason: Failure, wrapping several potential reasons. It can
wrap t.w.c.ResponseDone, in which case everything is OK. It can wrap
t.w.h.PotentialDataLoss. Or it can wrap an Exception, in case of an
error.
@TODO inspect reason for failures
"""
self.finished.callback(self.body)
@implementer(IBodyProducer)
class StringProducer(object):
"""
There's no FileBodyProducer in Twisted < 12.0.0 See
http://twistedmatrix.com/documents/current/web/howto/client.html for
details about this class.
"""
# implements(IBodyProducer)
def __init__(self, body):
self.body = body
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.body)
return succeed(None)
def pauseProducing(self):
pass
def stopProducing(self):
pass
class ProxyFactory(object):
"""
A factory to create Proxy objects.
Passed parameters are used to create all proxies. Supports creating
proxies with a connection pool shared between them.
"""
def __init__(self, **kwargs):
"""
@type version: int
@param version: Which JSON-RPC version to use? The default is 1.0.
@type connectTimeout: float
@param connectTimeout: Connection timeout. Note that we don't connect
when creating this object, but in callRemote, so the timeout
will apply to callRemote.
@type credentials: twisted.cred.credentials.ICredentials
@param credentials: Credentials for basic HTTP authentication.
Supported are Anonymous and UsernamePassword classes.
If None then t.c.c.Anonymous object is used as default.
@type contextFactory: twisted.internet.ssl.ClientContextFactory
@param contextFactory: A context factory for SSL clients.
If None then Agent's default is used.
@type persistent: bool
@param persistent: Boolean indicating whether connections should be
persistent. If None then no persistent connections are created
(default behavior of t.w.c.Agent class).
@type maxPersistentPerHost: int
@param maxPersistentPerHost: The maximum number of cached persistent
connections for a host:port destination.
@type cachedConnectionTimeout: int
@param cachedConnectionTimeout: Number of seconds a cached persistent
connection will stay open before disconnecting.
@type retryAutomatically: bool
@param retryAutomatically: Boolean indicating whether idempotent
requests should be retried once if no response was received.
@type compressedHTTP: bool
@param compressedHTTP: Boolean indicating whether proxies can support
HTTP compression (actually gzip).
@type sharedPool: bool
@type sharedPool: Share one connection pool between all created proxies.
The default is False.
"""
self._version = kwargs.get('version') or jsonrpc.VERSION_1
self._connectTimeout = kwargs.get('connectTimeout')
self._credentials = kwargs.get('credentials')
self._contextFactory = kwargs.get('contextFactory')
self._persistent = kwargs.get('persistent') or False
self._maxPersistentPerHost = kwargs.get('maxPersistentPerHost')
if self._maxPersistentPerHost is None:
self._maxPersistentPerHost = HTTPConnectionPool.maxPersistentPerHost
self._cachedConnectionTimeout = kwargs.get('cachedConnectionTimeout')
if self._cachedConnectionTimeout is None:
self._cachedConnectionTimeout = HTTPConnectionPool.cachedConnectionTimeout
self._retryAutomatically = kwargs.get('retryAutomatically')
if self._retryAutomatically is None:
self._retryAutomatically = HTTPConnectionPool.retryAutomatically
self._compressedHTTP = kwargs.get('compressedHTTP') or False
self._sharedPool = kwargs.get('sharedPool') or False
self._pool = None
if self._sharedPool:
self._pool = self._getConnectionPool()
def getProxy(self, url):
"""
Create a Proxy object by parameters passed to the factory.
@type url: str
@param url: URL of the RPC server. Supports HTTP and HTTPS for now,
more might come in the future.
@rtype: Proxy
@return: Newly created Proxy object.
"""
pool = None
if self._sharedPool:
pool = self._pool
elif self._persistent:
pool = self._getConnectionPool()
kwargs = {'version': self._version,
'connectTimeout': self._connectTimeout,
'credentials': self._credentials,
'contextFactory': self._contextFactory,
'pool': pool}
proxy = Proxy(url, **kwargs)
if self._compressedHTTP:
self._setContentDecoder(proxy)
return proxy
def _getConnectionPool(self):
pool = HTTPConnectionPool(reactor, self._persistent)
if self._persistent:
pool.maxPersistentPerHost = self._maxPersistentPerHost
pool.cachedConnectionTimeout = self._cachedConnectionTimeout
pool.retryAutomatically = self._retryAutomatically
return pool
def _setContentDecoder(self, proxy):
proxy.agent = ContentDecoderAgent(proxy.agent, [('gzip', GzipDecoder)])
class Proxy(object):
"""
A proxy to one specific JSON-RPC server.
Pass the server URL to the constructor and call
proxy.callRemote('method', *args) to call 'method' with *args.
"""
def __init__(self, url, version=jsonrpc.VERSION_1, connectTimeout=None,
credentials=None, contextFactory=None, pool=None):
"""
@type url: str
@param url: URL of the RPC server. Supports HTTP and HTTPS for now,
more might c
|
iotile/coretools
|
iotilesensorgraph/test/test_datastream.py
|
Python
|
gpl-3.0
| 8,473
| 0.000354
|
"""Tests for DataStream objects."""
import pytest
from iotile.core.exceptions import InternalError
from iotile.sg import DataStream, DataStreamSelector
def test_stream_type_parsing():
"""Make sure we can parse each type of stream."""
# Make sure parsing stream type works
stream = DataStream.FromString('buffered 1')
assert stream.stream_type == stream.BufferedType
stream = DataStream.FromString(u'buffered 1')
assert stream.stream_type == stream.BufferedType
stream = DataStream.FromString('unbuffered 1')
assert stream.stream_type == stream.UnbufferedType
stream = DataStream.FromString(u'unbuffered 1')
assert stream.stream_type == stream.UnbufferedType
stream = DataStream.FromString('counter 1')
assert stream.stream_type == stream.CounterType
stream = DataStream.FromString(u'counter 1')
assert stream.stream_type == stream.CounterType
stream = DataStream.FromString('constant 1')
assert stream.stream_type == stream.ConstantType
stream = DataStream.FromString(u'constant 1')
assert stream.stream_type == stream.ConstantType
stream = DataStream.FromString('output 1')
assert stream.stream_type == stream.OutputType
stream = DataStream.FromString(u'output 1')
assert stream.stream_type == stream.OutputType
def test_stream_id_parsing():
"""Make sure we can parse stream ids."""
stream = DataStream.FromString('buffered 1')
assert stream.stream_id == 1
stream = DataStream.FromString('buffered 0x100')
assert stream.stream_id == 0x100
stream = DataStream.FromString(u'buffered 1')
assert stream.stream_id == 1
stream = DataStream.FromString(u'buffered 0x100')
assert stream.stream_id == 0x100
def test_system_parsing():
"""Make sure we can parse the system prefix."""
stream = DataStream.FromString('buffered 1')
assert stream.system is False
stream = DataStream.FromString(u'buffered 1')
assert stream.system is False
stream = DataStream.FromString('system buffered 1')
assert stream.system is True
stream = DataStream.FromString(u'system buffered 1')
assert stream.system is True
def test_stringification():
"""Make sure we can stringify DataStream objects."""
stream1 = DataStream.FromString('system buffered 1')
stream2 = DataStream.FromString('buffered 0xF')
assert str(stream1) == str('system buffered 1')
assert str(stream2) == str('buffered 15')
def test_selector_parsing():
"""Make sure we can parse DataStreamSelector strings."""
# Make sure parsing stream type works
stream = DataStreamSelector.FromString('buffered 1')
assert stream.match_type == DataStream.BufferedType
stream = DataStreamSelector.FromString(u'buffered 1')
assert stream.match_type == DataStream.BufferedType
stream = DataStreamSelector.FromString('unbuffered 1')
assert stream.match_type == DataStream.UnbufferedType
stream = DataStreamSelector.FromString(u'unbuffered 1')
assert stream.match_type == DataStream.UnbufferedType
stream = DataStreamSelector.FromString('counter 1')
assert stream.match_type == DataStream.CounterType
stream = DataStreamSelector.FromString(u'counter 1')
assert stream.match_type == DataStream.CounterType
stream = DataStreamSelector.FromString('constant 1')
assert stream.match_type == DataStream.ConstantType
stream =
|
DataStreamSelector.FromString(u'constant 1')
assert stream.match_type == DataStream.ConstantType
stream = DataStreamSelector.FromString('output 1')
assert stream.match_type == DataStream.OutputType
strea
|
m = DataStreamSelector.FromString(u'output 1')
assert stream.match_type == DataStream.OutputType
def test_stream_selector_id_parsing():
"""Make sure we can parse stream ids."""
stream = DataStreamSelector.FromString('buffered 1')
assert stream.match_id == 1
assert stream.match_spec == DataStreamSelector.MatchUserOnly
stream = DataStreamSelector.FromString('buffered 0x100')
assert stream.match_id == 0x100
assert stream.match_spec == DataStreamSelector.MatchUserOnly
stream = DataStreamSelector.FromString(u'buffered 1')
assert stream.match_id == 1
assert stream.match_spec == DataStreamSelector.MatchUserOnly
stream = DataStreamSelector.FromString(u'buffered 0x100')
assert stream.match_id == 0x100
assert stream.match_spec == DataStreamSelector.MatchUserOnly
stream = DataStreamSelector.FromString(u'system buffered 0x100')
assert stream.match_id == 0x100
assert stream.match_spec == DataStreamSelector.MatchSystemOnly
stream = DataStreamSelector.FromString(u'all buffered')
assert stream.match_id is None
assert stream.match_spec == DataStreamSelector.MatchUserAndBreaks
stream = DataStreamSelector.FromString(u'all user buffered')
assert stream.match_id is None
assert stream.match_spec == DataStreamSelector.MatchUserOnly
stream = DataStreamSelector.FromString(u'all combined buffered')
assert stream.match_id is None
assert stream.match_spec == DataStreamSelector.MatchCombined
stream = DataStreamSelector.FromString(u'all system buffered')
assert stream.match_id is None
assert stream.match_spec == DataStreamSelector.MatchSystemOnly
def test_matching():
"""Test selector stream matching."""
sel = DataStreamSelector.FromString(u'all system buffered')
assert sel.matches(DataStream.FromString('system buffered 1'))
assert not sel.matches(DataStream.FromString('buffered 1'))
assert not sel.matches(DataStream.FromString('counter 1'))
sel = DataStreamSelector.FromString(u'all user outputs')
assert sel.matches(DataStream.FromString('output 1'))
assert not sel.matches(DataStream.FromString('system output 1'))
assert not sel.matches(DataStream.FromString('counter 1'))
sel = DataStreamSelector.FromString(u'all combined outputs')
assert sel.matches(DataStream.FromString('output 1'))
assert sel.matches(DataStream.FromString('system output 1'))
assert not sel.matches(DataStream.FromString('counter 1'))
sel = DataStreamSelector.FromString(u'all outputs')
assert sel.matches(DataStream.FromString('output 1'))
assert sel.matches(DataStream.FromString('system output 1024'))
assert not sel.matches(DataStream.FromString('system output 1'))
assert not sel.matches(DataStream.FromString('counter 1'))
def test_encoding():
"""Test data stream and selector encoding."""
sel = DataStreamSelector.FromString(u'all system output')
assert sel.encode() == 0x5FFF
sel = DataStreamSelector.FromString(u'all user output')
assert sel.encode() == 0x57FF
sel = DataStreamSelector.FromString(u'all output')
assert sel.encode() == 0xD7FF
sel = DataStreamSelector.FromString(u'all combined output')
assert sel.encode() == 0xDFFF
stream = DataStream.FromString('output 1')
assert stream.encode() == 0x5001
stream = DataStream.FromString('unbuffered 10')
assert stream.encode() == 0x100a
def test_selector_from_encoded():
"""Make sure we can create a selector from an encoded value."""
sel = DataStreamSelector.FromEncoded(0x5FFF)
assert str(sel) == 'all system outputs'
sel = DataStreamSelector.FromEncoded(0xD7FF)
assert str(sel) == 'all outputs'
sel = DataStreamSelector.FromEncoded(0x100a)
assert str(sel) == 'unbuffered 10'
assert str(DataStreamSelector.FromEncoded(DataStreamSelector.FromString('all combined output').encode())) == 'all combined outputs'
def test_buffered_pluralization():
"""Make sure we don't incorrectly pluralize buffered streams."""
sel = DataStreamSelector.FromString('all buffered')
assert str(sel) == 'all buffered'
def test_important_inputs():
"""Make sure we support matching important inputs and outputs."""
imp_stream = DataStream.FromString('system input 1024')
imp_store_stream = DataStream.FromString('system input 1536')
assert imp_stream.important is True
assert imp_store_stream.important is True
assert imp_stream.associated_stream() == DataStream.FromString('system output
|
gwq5210/python_learn
|
decorator.py
|
Python
|
gpl-2.0
| 116
| 0.051724
|
#!/usr/bin/e
|
nv python
# coding=utf-8
def now():
print '2015-9-10';
f = now;
print now.__name__;
print f.__name__;
| |
lumened/battmonitor
|
api_charger.py
|
Python
|
gpl-2.0
| 2,610
| 0.02069
|
# This handles all interactions needed for interpreting and controlling the charger
import time
import api_adc, config
import apigpio.api_gpio as api_gpio
# Charger Control
def init_control():
api_gpio.init_pin(27)
api_gpio.off_pin(27) # Set line to low
def deinit_control():
api_gpio.deinit_pin(27)
def line_high_control():
api_gpio.on_pin(27)
def line_low_control():
api_gpio.off_pin(27)
def line_falling_control():
line_high_control()
time.sleep(0.10)
line_low_control()
# Charger Status
def update_state(write_to_file=False):
'''
This function inputs the LED state from the charger and interprets the result
'''
config.led_volt = api_adc.get_charger_led_value()
if config.led_volt<0.7: config.led_state = config.state['off']
elif config.led_volt>1.0 and config.led_volt<1.5: config.led_state = config.state['detected']
elif config.led_vo
|
lt>2.3 and config.led_volt<2.8: config.led_state = config.state['charging']
elif config.led_volt>4.5 and config.led_volt<5.5: config.led_state = config.state['full']
# Madness
# elif config.led_volt>5.5 : config.led_state = config.state['detected']
if write_to_file:
f = open('/home/pi/touch-flux/src/battmonitor/data.txt', "a")
if conf
|
ig.led_state==config.state['off']: f.write('U')
elif config.led_state == config.state['charging']: f.write('C')
else: f.write('P')
f.close()
return None
# Battery State
def update_battery(write_to_file=False):
'''
This function inputs the battery voltage and updates the shared file
'''
config.bat_volt = api_adc.get_battery_value()*3
bat_percent = 100*(config.bat_volt - config.bat_min)/(config.bat_max - config.bat_min)
if bat_percent<0 : bat_percent = 0
elif bat_percent>100 : bat_percent = 101
if write_to_file:
f = open('/home/pi/touch-flux/src/battmonitor/data.txt', "w")
# if system_state == config.charging:
# f.write("CCC")
# else:
f.write("%.3d" % bat_percent)
if config.DEBUG: print("%.3d" % bat_percent)
f.close()
return None
def check_bat_volt_high():
'''
Returns a boolean that identifies whether the battery has reached the threshold
'''
if config.bat_volt > config.bat_max: return True
return False
def check_bat_volt_low():
'''
Returns a boolean that identifies whether the battery has reached the threshold
'''
if config.bat_volt < config.bat_min: return True
return False
|
cactusbin/nyt
|
matplotlib/examples/pylab_examples/webapp_demo.py
|
Python
|
unlicense
| 1,713
| 0.001751
|
#!/usr/bin/env python
# -*- noplot -*-
"""
This example shows how to use the agg backend directly to create
images, which may be of use to web application developers who want
full control over their code without using the pyplot interface to
manage figures, figure closing etc.
.. note::
It is not necessary to avoid using the pyplot interface in order to
create figures without a graphical front-end - simply setting
the backend to "Agg" would be sufficient.
It is also worth noting that, because matplotlib can save figures to file-like
object, matplotlib can also be used inside a cgi-script *without* needing to
write a figure to disk.
"""
from matplotlib.backends.backend_agg import FigureCanvas
|
Agg
from matplotlib.figure import Figur
|
e
import numpy as np
def make_fig():
"""
Make a figure and save it to "webagg.png".
"""
fig = Figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot([1, 2, 3], 'ro--', markersize=12, markerfacecolor='g')
# make a translucent scatter collection
x = np.random.rand(100)
y = np.random.rand(100)
area = np.pi * (10 * np.random.rand(100)) ** 2 # 0 to 10 point radiuses
c = ax.scatter(x, y, area)
c.set_alpha(0.5)
# add some text decoration
ax.set_title('My first image')
ax.set_ylabel('Some numbers')
ax.set_xticks((.2, .4, .6, .8))
labels = ax.set_xticklabels(('Bill', 'Fred', 'Ted', 'Ed'))
# To set object properties, you can either iterate over the
# objects manually, or define you own set command, as in setapi
# above.
for label in labels:
label.set_rotation(45)
label.set_fontsize(12)
FigureCanvasAgg(fig).print_png('webapp.png', dpi=150)
make_fig()
|
pritha-srivastava/sm
|
drivers/LVHDoFCoESR.py
|
Python
|
lgpl-2.1
| 3,290
| 0.001216
|
#!/usr/bin/python
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# LVHDoFCoESR: LVHD over Fibre Channel over Ethernet driver
#
import SR
import LVHDoHBASR
import LVHDSR
import SRCommand
import sys
import xs_errors
import util
CAPABILITIES = ["SR_PROBE", "SR_UPDATE", "SR_METADATA", "SR_TRIM",
"VDI_CREATE", "VDI_DELETE", "VDI_ATTACH", "VDI_DETACH",
"VDI_GENERATE_CONFIG", "VDI_SNAPSHOT", "VDI_CLONE",
"VDI_RESIZE", "ATOMIC_PAUSE", "VDI_RESET_ON_BOOT/2",
"VDI_UPDATE"]
CONFIGURATION = [['SCSIid', 'The scsi_id of the destination LUN'],
['allocation', 'Valid values are thick or thin(optional,\
defaults to thick)']]
DRIVER_INFO = {
|
'name': 'LVHD over FCoE',
'description': 'SR plugin which represents disks as VHDs on Logical \
|
Volumes within a Volume Group created on a FCoE LUN',
'vendor': 'Citrix Systems Inc',
'copyright': '(C) 2015 Citrix Systems Inc',
'driver_version': '1.0',
'required_api_version': '1.0',
'capabilities': CAPABILITIES,
'configuration': CONFIGURATION
}
class LVHDoFCoESR(LVHDoHBASR.LVHDoHBASR):
"""LVHD over FCoE storage repository"""
def handles(type):
if __name__ == '__main__':
name = sys.argv[0]
else:
name = __name__
if name.endswith("LVMoFCoESR"):
return type == "lvmofcoe" # for the initial switch from LVM
if type == "lvhdofcoe":
return True
return False
handles = staticmethod(handles)
def load(self, sr_uuid):
driver = SR.driver('hba')
if 'type' not in self.original_srcmd.params['device_config'] or \
'type' in self.original_srcmd.params['device_config'] and \
self.original_srcmd.dconf['type'] == "any":
self.original_srcmd.dconf['type'] = "fcoe"
self.hbasr = driver(self.original_srcmd, sr_uuid)
pbd = None
try:
pbd = util.find_my_pbd(self.session, self.host_ref, self.sr_ref)
except:
pass
if not self.dconf.has_key('SCSIid') or not self.dconf['SCSIid']:
print >>sys.stderr, self.hbasr.print_devs()
raise xs_errors.XenError('ConfigSCSIid')
self.SCSIid = self.dconf['SCSIid']
self._pathrefresh(LVHDoFCoESR)
LVHDSR.LVHDSR.load(self, sr_uuid)
def vdi(self, uuid):
return LVHDoFCoEVDI(self, uuid)
class LVHDoFCoEVDI(LVHDoHBASR.LVHDoHBAVDI):
pass
if __name__ == '__main__':
SRCommand.run(LVHDoFCoESR, DRIVER_INFO)
else:
SR.registerSR(LVHDoFCoESR)
|
GoogleCloudPlatform/buildpacks
|
builders/testdata/python/functions/conflicting_dependencies/main.py
|
Python
|
apache-2.0
| 926
| 0.00324
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the
|
License for the specific language governing permissions and
# limitations under the License.
# GCF Python 3.7 legacy worker has additional dependencies available by default
# to user functions. This test ensures that those dependencies can be overriden
# through a user's requirements.txt.
import yarl
def testF
|
unction(request):
if yarl.__version__ != '1.4.2':
return 'FAIL: got %s, want %s' % yarl.__version__, '1.4.2'
return 'PASS'
|
tomvanderlee/youtube-podcaster
|
setup.py
|
Python
|
mit
| 660
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup file for youtube_podcaster.
This file was generated with PyScaffold 2.4.2, a tool that easily
puts up a scaffold for your new Python project. Learn more under:
http://pyscaffold.readthedocs.org/
"""
import sys
from setuptools import setup
def setup_package():
needs_sphinx = {'build_sphinx', 'upload_docs'}
|
.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['six', 'pyscaffold>=2.4rc1,<2.5a0'] + sphinx,
tests_require=['pytest_cov', 'pytest'],
use_pyscaffold=True)
if __name__ == "__main__":
|
setup_package()
|
plotly/python-api
|
packages/python/plotly/plotly/validators/layout/annotation/_arrowcolor.py
|
Python
|
mit
| 479
| 0.002088
|
impo
|
rt _plotly_utils.basevalidators
class ArrowcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="arrowcolor", parent_name="layout.annotation", **kwargs
):
super(ArrowcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "arraydraw
|
"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
yaricom/brainhash
|
src/experiment_cA3_1_dt_th_al_ah.py
|
Python
|
gpl-3.0
| 1,954
| 0.011771
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
The experiment with 10 Hz/5Hz, wisp, attention, 70, cA 3, delta, theta, alpha low, alpha high, batch size = 1 and
balanced data set
@author: yaric
"""
import experiment as ex
import config
from time import time
experiment_name = 'cA_3_1_dt-th-a_l-a_h' # will be used as parent dir for analyzer results
# The sample records identifiers
signal_ids = ['IO_10_2', 'IO_TXT', 'IO_SKY', 'KS_10_2', 'RO_10_2']
noise_ids = ['noise']
# Setup analyzer configuration
analyzer_config = ex.defaultAnalyzerConfig()
analyzer_config['batch_size'] = 1
analyzer_config['learning_rate'] = 0.1
analyzer_config['n_hidden'] = 3
analyzer_config['training_epochs'] = 50000
analyzer_config['encoder'] = 'cA'
analyzer_config['bands'] = 'delta,theta,alpha_l,alpha_h'
start
|
= time()
#
# Run analyzer
#
"""
print("\nStart analysis with parameters:\n%s\n" % analyzer_config)
print("Start analysis for signal records: %s" % signal_ids)
ex.runEEGAnalyzerWithIDs(ids_list=signal_ids,
experiment_name=experiment_name,
a_config=analyzer_config)
print("Start analysis for noise records: %s" % noise_ids)
ex.runEEGAnalyzerWithIDs(ids_list=noise_ids,
|
experiment_name=experiment_name,
a_config=analyzer_config)
"""
#
# Run classifiers
#
signal_dir = "%s/%s" % (config.analyzer_out_dir, experiment_name)
noise_dir = "%s/%s/%s" % (config.analyzer_out_dir, experiment_name, noise_ids[0])
out_suffix = experiment_name
print("Run classifiers over analyzed records. \nSignal dir: %s\nNoise dir: %s"
% (signal_dir, noise_dir))
ex.runClassifier(signal_dir=signal_dir,
signal_records=signal_ids,
noise_dir=noise_dir,
out_suffix=out_suffix)
print("\n\nExperiment %s took %.2f seconds.\n"
% (experiment_name, time() - start))
|
Jumpscale/jumpscale6_core
|
apps/agentcontroller/jumpscripts/core/monitoring_infogathering/info_gather_disks.py
|
Python
|
bsd-2-clause
| 566
| 0.008834
|
from JumpScale import j
|
descr = """
Checks disks' status
"""
organization = "jumpscale"
name = 'check_
|
disks'
author = "zains@codescalers.com"
license = "bsd"
version = "1.0"
category = "system.disks"
async = True
queue = 'process'
roles = []
enable = True
period=0
log=False
def action():
import JumpScale.lib.diskmanager
result = dict()
disks = j.system.platform.diskmanager.partitionsFind(mounted=True, prefix='', minsize=0, maxsize=None)
for disk in disks:
result[disk.path] = {'free': disk.free, 'size': disk.size}
return result
|
dhuppenkothen/clarsach
|
clarsach/respond.py
|
Python
|
gpl-3.0
| 10,929
| 0.001738
|
# Contains functionality for responses
import numpy as np
import astropy.io.fits as fits
__all__ = ["RMF", "ARF"]
class RMF(object):
def __init__(self, filename):
self._load_rmf(filename)
pass
def _load_rmf(self, filename):
"""
Load an RMF from a FITS file.
Parameters
----------
filename : str
The file name with the RMF file
Attributes
----------
n_grp : numpy.ndarray
the Array with the number of channels in each
channel set
f_chan : numpy.ndarray
The starting channel for each channel group;
If an element i in n_grp > 1, then the resulting
row entry in f_chan will be a list of length n_grp[i];
otherwise it will be a single number
n_chan : numpy.ndarray
The number of channels in each channel group. The same
logic as for f_chan applies
matrix : numpy.ndarray
The redistribution matrix as a flattened 1D vector
energ_lo : numpy.ndarray
The lower edges of the energy bins
energ_hi : numpy.ndarray
The upper edges of the energy bins
detchans : int
The number of channels in the detector
"""
# open the FITS file and extract the MATRIX extension
# which contains the redistribution matrix and
# anxillary information
hdulist = fits.open(filename)
# get all the extension names
extnames = np.array([h.name for h in hdulist])
# figure out the right extension to use
if "MATRIX" in extnames:
h = hdulist["MATRIX"]
elif "SPECRESP MATRIX" in extnames:
h = hdulist["SPECRESP MATRIX"]
data = h.data
hdr = h.header
hdulist.close()
# extract + store the attributes described in the docstring
n_grp = np.array(data.field("N_GRP"))
f_chan = np.array(data.field('F_CHAN'))
n_chan = np.array(data.field("N_CHAN"))
matrix = np.array(data.field("MATRIX"))
self.energ_lo = np.array(data.field("ENERG_LO"))
self.energ_hi = np.array(data.field("ENERG_HI"))
self.energ_unit = data.columns["ENERG_LO"].unit
self.detchans = hdr["DETCHANS"]
self.offset = self.__get_tlmin(h)
# flatten the variable-length arrays
self.n_grp, self.f_chan, self.n_chan, self.matrix = \
self._flatten_arrays(n_grp, f_chan, n_chan, matrix)
return
def __get_tlmin(self, h):
"""
Get the tlmin keyword for `F_CHAN`.
Parameters
----------
h : an astropy.io.fits.hdu.table.BinTableHDU object
The extension containing the `F_CHAN` column
Returns
-------
tlmin : int
The tlmin keyword
"""
# get the header
hdr = h.header
# get the keys of all
keys = np.array(list(hdr.keys()))
# find the place where the tlmin keyword is defined
t = np.array(["TLMIN" in k for k in keys])
# get the index of the TLMIN keyword
tlmin_idx = np.hstack(np.where(t))[0]
# get the corresponding value
tlmin = np.int(list(hdr.items())[tlmin_idx][1])
return tlmin
def _flatten_arrays(self, n_grp, f_chan, n_chan, matrix):
if not len(n_grp) == len(f_chan) == len(n_chan) == len(matrix):
raise ValueError("Arrays must be of same length!")
# find all non-zero groups
nz_idx = (n_grp > 0)
# stack all non-zero rows in the matrix
matrix_flat = np.hstack(matrix[nz_idx])
# stack all nonzero rows in n_chan and f_chan
#n_chan_flat = np.hstack(n_chan[nz_idx])
#f_chan_flat = np.hstack(f_chan[nz_idx])
# some matrices actually have more elements
# than groups in `n_grp`, so we'll only pick out
# those values that have a correspondence in
# n_grp
f_chan_new = []
n_chan_new = []
for i,t in enumerate(nz_idx):
if t:
n = n_grp[i]
f = f_chan[i]
nc = n_chan[i]
if np.size(f) == 1:
f_chan_new.append(f)
n_chan_new.append(nc)
else:
f_chan_new.append(f[:n])
n_chan_new.append(nc[:n])
n_chan_flat = np.hstack(n_chan_new)
f_chan_flat = np.hstack(f_chan_new)
# if n_chan is zero, we'll remove those as well.
nz_idx2 = (n_chan_flat > 0)
n_chan_flat = n_chan_flat[nz_idx2]
f_chan_flat = f_chan_flat[nz_idx2]
return n_grp, f_chan_flat, n_chan_flat, matrix_flat
def apply_rmf(self, spec):
"""
Fold the spectrum through the redistribution matrix.
The redistribution matrix is saved as a flattened 1-dimensional
vector to save space. In reality, for each entry in the flux
vector, there exists one or more sets of channels that this
flux is redistributed into. The additional arrays `n_grp`,
`f_chan` and `n_chan` store this information:
* `n_group` stores the number of channel groups for each
energy bin
* `f_chan` stores th
|
e *first channel* that each channel
for each channel set
* `n_chan` stores the number of channels in each channel
set
As a result, for a given energy bin i, we need to look up the
number of channel sets in `n_grp` for that energy bin. We
then need to loop over the number of channel sets
|
. For each
channel set, we look up the first channel into which flux
will be distributed as well as the number of channels in the
group. We then need to also loop over the these channels and
actually use the corresponding elements in the redistribution
matrix to redistribute the photon flux into channels.
All of this is basically a big bookkeeping exercise in making
sure to get the indices right.
Parameters
----------
spec : numpy.ndarray
The (model) spectrum to be folded
Returns
-------
counts : numpy.ndarray
The (model) spectrum after folding, in
counts/s/channel
"""
# get the number of channels in the data
nchannels = spec.shape[0]
# an empty array for the output counts
counts = np.zeros(nchannels)
# index for n_chan and f_chan incrementation
k = 0
# index for the response matrix incrementation
resp_idx = 0
# loop over all channels
for i in range(nchannels):
# this is the current bin in the flux spectrum to
# be folded
source_bin_i = spec[i]
# get the current number of groups
current_num_groups = self.n_grp[i]
# loop over the current number of groups
for j in range(current_num_groups):
current_num_chans = int(self.n_chan[k])
if current_num_chans == 0:
k += 1
resp_idx += current_num_chans
continue
else:
# get the right index for the start of the counts array
# to put the data into
counts_idx = int(self.f_chan[k] - self.offset)
# this is the current number of channels to use
k += 1
# add the flux to the subarray of the counts array that starts with
# counts_idx and runs over current_num_chans channels
counts[counts_idx:counts_idx +
current_num_chans] += self.matrix[resp_idx:resp_idx +
current_num_chans] * \
np.float(source_bin_i)
# iterate
|
cuemacro/chartpy
|
chartpy_examples/xkcd_example.py
|
Python
|
apache-2.0
| 2,081
| 0.004805
|
__author__ = 'saeedamen' # Saeed Amen
#
# Copyright 2016 Cuemacro
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and limitations under the License.
#
# support Quandl 3.x.x
try:
import quandl as Quandl
except:
# if import fails use Quandl 2.x.x
import Quandl
from chartpy import Chart, Style
# get your own free bQuandl API key from https://www.quandl.com/
try:
from chartpy.chartcred import ChartCred
cred = ChartCred()
quandl_api_key = cred.quandl_api_key
except:
quandl_api_key = "x"
# choose run_example = 0 for everything
# run_example = 1 - xkcd example
# run_example = 2 - fun xkcd example
run_example = 0
if run_example == 1 or run_example == 0:
df = Quandl.get(["FRED/A191RL1Q225SBEA"], authtoken=quandl_api_key)
df.columns = ["Real QoQ"]
# set the style of the plot
style = Style(title="US GDP", source="Quandl/Fred", xkcd=True)
# Chart object is initialised with the dataframe and our chart style
chart = Chart(df=df, chart_type='line', style=style, engine='matplotlib')
chart.plot()
if run_example == 2 or run_example
|
== 0:
import pandas, numpy
dt = pandas.date_range(start="1 Jan 1950", end="1 Apr 2017", freq='M')
data = numpy.arange(len(dt))
df = pandas.DataFrame(index=dt, data=data, columns=['Importance'])
# set the style of the plot
style = Style(title="Importance of puns", source="@saeedamenfx", xkcd=True, x_title="Puns", y_title="Importance")
# Chart object is initialised with the dataframe and our chart style
chart = Chart(df=df
|
, chart_type='line', style=style, engine='matplotlib')
chart.plot()
|
klebercode/klebercode
|
klebercode/blog/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 6,850
| 0.007737
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Entry'
db.create_table(u'blog_entry', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')()),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=200)),
('image', self.gf(u'sorl.thumbnail.fields.ImageField')(max_length=100)),
('body', self.gf('tinymce.models.HTMLField')()),
('publish', self.gf('django.db.models.fields.BooleanField')(default=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal(u'blog', ['Entry'])
# Adding M2M table for field categories on 'Entry'
m2m_table_name = db.shorten_name(u'blog_entry_categories')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('entry', models.ForeignKey(orm[u'blog.entry'], null=False)),
('category', models.ForeignKey(orm[u'core.category'], null=False))
))
db.create_unique(m2m_table_name, ['entry_id', 'category_id'])
def backwards(self, orm):
# Deleting model 'Entry'
db.delete_table(u'blog_entry')
# Removing M2M table for field categories on 'Entry'
db.delete_table(db.shorten_name(u'blog_entry_categories'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique'
|
: 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Pe
|
rmission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'blog.entry': {
'Meta': {'ordering': "['-created', 'title', 'author']", 'object_name': 'Entry'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'body': ('tinymce.models.HTMLField', [], {}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['core.Category']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': (u'sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.category': {
'Meta': {'ordering': "['order', 'name']", 'object_name': 'Category'},
'acronym': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'area': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['blog']
|
googleapis/python-aiplatform
|
google/cloud/aiplatform_v1beta1/types/tensorboard_service.py
|
Python
|
apache-2.0
| 42,035
| 0.000619
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1beta1.types import operation
from google.cloud.aiplatform_v1beta1.types import tensorboard as gca_tensorboard
from google.cloud.aiplatform_v1beta1.types import tensorboard_data
from google.cloud.aiplatform_v1beta1.types import (
tensorboard_experiment as gca_tensorboard_experiment,
)
from google.cloud.aiplatform_v1beta1.types import tensorboard_run as gca_tensorboard_run
from google.cloud.aiplatform_v1beta1.types import (
tensorboard_time_series as gca_tensorboard_time_series,
)
from google.protobuf import field_mask_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1beta1",
manifest={
"CreateTensorboardRequest",
"GetTensorboardRequest",
"ListTensorboardsRequest",
"ListTensorboardsResponse",
"UpdateTensorboardRequest",
"DeleteTensorboardRequest",
"CreateTensorboardExperimentRequest",
"GetTensorboardExperimentRequest",
"ListTensorboardExperimentsRequest",
"ListTensorboardExperimentsResponse",
"UpdateTensorboardExperimentRequest",
"DeleteTensorboardExperimentRequest",
"BatchCreateTensorboardRunsRequest",
"BatchCreateTensorboardRunsResponse",
"CreateTensorboardRunRequest",
"GetTensorboardRunRequest",
"ReadTensorboardBlobDataRequest",
"ReadTensorboardBlobDataResponse",
"ListTensorboardRunsRequest",
"ListTensorboardRunsResponse",
"UpdateTensorboardRunRequest",
"DeleteTensorboardRunRequest",
"BatchCreateTensorboardTimeSeriesRequest",
"BatchCreateTensorboardTimeSeriesResponse",
"CreateTensorboardTimeSeriesRequest",
"GetTensorboardTimeSeriesRequest",
"ListTensorboardTimeSeriesRequest",
"ListTensorboardTimeSeriesResponse",
"UpdateTensorboardTimeSeriesRequest",
"DeleteTensorboardTimeSeriesRequest",
"BatchReadTensorboardTimeSeriesDataRequest",
"BatchReadTensorboardTimeS
|
eriesDataResponse",
"ReadTensorbo
|
ardTimeSeriesDataRequest",
"ReadTensorboardTimeSeriesDataResponse",
"WriteTensorboardExperimentDataRequest",
"WriteTensorboardExperimentDataResponse",
"WriteTensorboardRunDataRequest",
"WriteTensorboardRunDataResponse",
"ExportTensorboardTimeSeriesDataRequest",
"ExportTensorboardTimeSeriesDataResponse",
"CreateTensorboardOperationMetadata",
"UpdateTensorboardOperationMetadata",
},
)
class CreateTensorboardRequest(proto.Message):
r"""Request message for
[TensorboardService.CreateTensorboard][google.cloud.aiplatform.v1beta1.TensorboardService.CreateTensorboard].
Attributes:
parent (str):
Required. The resource name of the Location to create the
Tensorboard in. Format:
``projects/{project}/locations/{location}``
tensorboard (google.cloud.aiplatform_v1beta1.types.Tensorboard):
Required. The Tensorboard to create.
"""
parent = proto.Field(proto.STRING, number=1,)
tensorboard = proto.Field(
proto.MESSAGE, number=2, message=gca_tensorboard.Tensorboard,
)
class GetTensorboardRequest(proto.Message):
r"""Request message for
[TensorboardService.GetTensorboard][google.cloud.aiplatform.v1beta1.TensorboardService.GetTensorboard].
Attributes:
name (str):
Required. The name of the Tensorboard resource. Format:
``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
"""
name = proto.Field(proto.STRING, number=1,)
class ListTensorboardsRequest(proto.Message):
r"""Request message for
[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1beta1.TensorboardService.ListTensorboards].
Attributes:
parent (str):
Required. The resource name of the Location to list
Tensorboards. Format:
``projects/{project}/locations/{location}``
filter (str):
Lists the Tensorboards that match the filter
expression.
page_size (int):
The maximum number of Tensorboards to return.
The service may return fewer than this value. If
unspecified, at most 100 Tensorboards will be
returned. The maximum value is 100; values above
100 will be coerced to 100.
page_token (str):
A page token, received from a previous
[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1beta1.TensorboardService.ListTensorboards]
call. Provide this to retrieve the subsequent page.
When paginating, all other parameters provided to
[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1beta1.TensorboardService.ListTensorboards]
must match the call that provided the page token.
order_by (str):
Field to use to sort the list.
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
"""
parent = proto.Field(proto.STRING, number=1,)
filter = proto.Field(proto.STRING, number=2,)
page_size = proto.Field(proto.INT32, number=3,)
page_token = proto.Field(proto.STRING, number=4,)
order_by = proto.Field(proto.STRING, number=5,)
read_mask = proto.Field(proto.MESSAGE, number=6, message=field_mask_pb2.FieldMask,)
class ListTensorboardsResponse(proto.Message):
r"""Response message for
[TensorboardService.ListTensorboards][google.cloud.aiplatform.v1beta1.TensorboardService.ListTensorboards].
Attributes:
tensorboards (Sequence[google.cloud.aiplatform_v1beta1.types.Tensorboard]):
The Tensorboards mathching the request.
next_page_token (str):
A token, which can be sent as
[ListTensorboardsRequest.page_token][google.cloud.aiplatform.v1beta1.ListTensorboardsRequest.page_token]
to retrieve the next page. If this field is omitted, there
are no subsequent pages.
"""
@property
def raw_page(self):
return self
tensorboards = proto.RepeatedField(
proto.MESSAGE, number=1, message=gca_tensorboard.Tensorboard,
)
next_page_token = proto.Field(proto.STRING, number=2,)
class UpdateTensorboardRequest(proto.Message):
r"""Request message for
[TensorboardService.UpdateTensorboard][google.cloud.aiplatform.v1beta1.TensorboardService.UpdateTensorboard].
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. Field mask is used to specify the fields to be
overwritten in the Tensorboard resource by the update. The
fields specified in the update_mask are relative to the
resource, not the full request. A field will be overwritten
if it is in the mask. If the user does not provide a mask
then all fields will be overwritten if new values are
specified.
tensorboard (google.cloud.aiplatform_v1beta1.types.Tensorboard):
Required. The Tensorboard's ``name`` field is used to
identify the Tensorboard to be updated. Format:
``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
"""
update_mask = proto.Field(
proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask,
)
tensorboard = proto.Field(
proto.MESSAGE, number=2, message=gca_tensorboard.Tensorboard,
|
explosion/srsly
|
srsly/tests/test_pickle_api.py
|
Python
|
mit
| 870
| 0.004598
|
from .._pickle_api import pickle_dumps, pickle_loads
def test_pickle_dumps():
data = {"hello": "world", "test": 123}
expected = [
b"\x80\x04\x95\x1e\x00\x00\x00\x00\x00\x00\x00}\x94(\x8c\x05hello\x94\x8c\x05world\x94\x8c\x04test\x94K{u.",
b"\x80\x04\x95\x1e\x00\x00\x00\x00\x00\x00\x00}\x94(\x8c\x04test\x94K{\x8c\x05hello\x94\x8c\x05world\x94u.",
b"\x80\x02}q\x00(X\x04\x00\x00\x00testq\x01K{X\x05\x00\x00\x00helloq\x02X\x05\x00\x00\x00worldq\x03u.",
b"\x8
|
0\x05\x95\x1e\x00\x00\x00\x00\x00\x00\x00}\x94(\x8c\x05hello\x94\x8c\x05world\x94\x8c\x04test\x94K{u.",
]
msg = pickle_dumps(data)
assert msg in expected
def test_pickle_loads():
msg = pickle_dumps({"hello": "world", "test": 123})
data = pickle_loads(msg)
assert len(data) == 2
assert data["hello"] ==
|
"world"
assert data["test"] == 123
|
sserrot/champion_relationships
|
venv/Lib/site-packages/PIL/PSDraw.py
|
Python
|
mit
| 6,735
| 0.000148
|
#
# The Python Imaging Library
# $Id$
#
# simple postscript graphics interface
#
# History:
# 1996-04-20 fl Created
# 1999-01-10 fl Added gsave/grestore to image method
# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge)
#
# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved.
# Copyright (c) 1996 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
import sys
from . import EpsImagePlugin
##
# Simple Postscript graphics interface.
class PSDraw:
"""
Sets up printing to the given file. If **fp** is omitted,
:py:attr:`sys.stdout` is assumed.
"""
def __init__(self, fp=None):
if not fp:
fp = sys.stdout
self.fp = fp
def _fp_write(self, to_write):
if self.fp == sys.stdout:
self.fp.write(to_write)
else:
self.fp.write(bytes(to_write, "UTF-8"))
def begin_document(self, id=None):
"""Set up printing of a document. (Write Postscript DSC header.)"""
# FIXME: incomplete
self._fp_write(
"%!PS-Adobe-3.0\n"
"save\n"
"/showpage { } def\n"
"%%EndComments\n"
"%%BeginDocument\n"
)
# self._fp_write(ERROR_PS) # debugging!
self._fp_write(EDROFF_PS)
self._fp_write(VDI_PS)
self._fp_write("%%EndProlog\n")
self.isofont = {}
def end_document(self):
"""Ends printing. (Write Postscript DSC footer.)"""
self._fp_write("%%EndDocument\nrestore showpage\n%%End\n")
if hasattr(self.fp, "flush"):
self.fp.flush()
def setfont(self, font, size):
"""
Selects which font to use.
:param font: A Postscript font name
:param size: Size in points.
"""
if font not in self.isofont:
# reencode font
self._fp_write("/PSDraw-{} ISOLatin1Encoding /{} E\n".format(font, font))
self.isofont[font] = 1
# rough
self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font))
def line(self, xy0, xy1):
"""
Draws a line between the two points. Coordinates are given in
Postscript point coordinates (72 points per inch, (0, 0) is the lower
left corner of the page).
"""
xy = xy0 + xy1
self._fp_write("%d %d %d %d Vl\n" % xy)
def rectangle(self, box):
"""
Draws a rectangle.
:param box: A 4-tuple of integers whose order and function is currently
undocumented.
Hint: the tuple is passed into this format string:
.. code-block:: python
%d %d M %d %d 0 Vr\n
"""
self._fp_write("%d %d M %d %d 0 Vr\n" % box)
def text(self, xy, text):
"""
Draws text at the given position. You must use
:py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method.
"""
text = "\\(".join(text.split("("))
text = "\\)".join(text.split(")"))
xy = xy + (text,)
self._fp_write("%d %d M (%s) S\n" % xy)
def image(self, box, im, dpi=None):
"""Draw a PIL image, centered in the given box."""
# default resolution depends on mode
if not dpi:
if im.mode == "1":
dpi = 200 # fax
else:
dpi = 100 # greyscale
# image size (on paper)
x = im.size[0] * 72 / dpi
y = im.size[1] * 72 / dpi
# max allowed size
xmax = float(box[2] - box[0])
ymax = float(box[3] - box[1])
if x > xmax:
y = y * xmax / x
x = xmax
if y > ymax:
x = x * ymax / y
y = ymax
dx = (xmax - x) / 2 + box[0]
dy = (ymax - y) / 2 + box[1]
self._fp_write("gsave\n{:f} {:f} translate\n".format(dx, dy))
if (x, y) != im.size:
# EpsImagePlugin._save prints the image at (0,0,xsize,ysize)
sx = x / im.size[0]
sy = y / im.size[1]
self._fp_write("{:f} {:f} scale\n".format(sx, sy))
EpsImagePlugin._save(im, self.fp, None, 0)
self._fp_write("\ngrestore\n")
# --------------------------------------------------------------------
# Postscript driver
#
# EDROFF.PS -- Postscript driver for Edroff 2
#
# History:
# 94-01-25 fl: created (edroff 2.04)
#
# Copyright (c) Fredrik Lundh 1994.
#
EDROFF_PS = """\
/S { show } bind def
/P { moveto show } bind def
/M { moveto } bind def
/X { 0 rmoveto } bind def
/Y { 0 exch rmoveto } bind def
/E { findfont
dup maxlength dict begin
{
1 index /FI
|
D ne { def } {
|
pop pop } ifelse
} forall
/Encoding exch def
dup /FontName exch def
currentdict end definefont pop
} bind def
/F { findfont exch scalefont dup setfont
[ exch /setfont cvx ] cvx bind def
} bind def
"""
#
# VDI.PS -- Postscript driver for VDI meta commands
#
# History:
# 94-01-25 fl: created (edroff 2.04)
#
# Copyright (c) Fredrik Lundh 1994.
#
VDI_PS = """\
/Vm { moveto } bind def
/Va { newpath arcn stroke } bind def
/Vl { moveto lineto stroke } bind def
/Vc { newpath 0 360 arc closepath } bind def
/Vr { exch dup 0 rlineto
exch dup neg 0 exch rlineto
exch neg 0 rlineto
0 exch rlineto
100 div setgray fill 0 setgray } bind def
/Tm matrix def
/Ve { Tm currentmatrix pop
translate scale newpath 0 0 .5 0 360 arc closepath
Tm setmatrix
} bind def
/Vf { currentgray exch setgray fill setgray } bind def
"""
#
# ERROR.PS -- Error handler
#
# History:
# 89-11-21 fl: created (pslist 1.10)
#
ERROR_PS = """\
/landscape false def
/errorBUF 200 string def
/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def
errordict begin /handleerror {
initmatrix /Courier findfont 10 scalefont setfont
newpath 72 720 moveto $error begin /newerror false def
(PostScript Error) show errorNL errorNL
(Error: ) show
/errorname load errorBUF cvs show errorNL errorNL
(Command: ) show
/command load dup type /stringtype ne { errorBUF cvs } if show
errorNL errorNL
(VMstatus: ) show
vmstatus errorBUF cvs show ( bytes available, ) show
errorBUF cvs show ( bytes used at level ) show
errorBUF cvs show errorNL errorNL
(Operand stargck: ) show errorNL /ostargck load {
dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
} forall errorNL
(Execution stargck: ) show errorNL /estargck load {
dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL
} forall
end showpage
} def end
"""
|
karst87/ml
|
01_openlibs/tensorflow/02_tfgirls/TensorFlow-and-DeepLearning-Tutorial-master/Season1/1-3/run.py
|
Python
|
mit
| 3,407
| 0.026719
|
# encoding: utf-8
# 为了 Python3 的兼容,如果你用的 Python2.7
from __future__ import print_function, division
import tensorflow as tf
print('Loaded TF version', tf.__version__, '\n\n')
# Tensor 在数学中是“张量”
# 标量,矢量/向量,张量
# 简单地理解
# 标量表示值
# 矢量表示位置(空间中的一个点)
# 张量表示整个空间
# 一维数组是矢量
# 多维数组是张量, 矩阵也是张量
# 4个重要的类型
# @Variable 计算图谱中的变量
# @Tensor 一个多维矩阵,带有很多方法
# @Graph 一个计算图谱
# @Session 用来运行一个计算图谱
# 三个重要的函数
# Variable 变量
# tf.Variable.__init__(
# initial_value=None, @Tensor
# trainable=True,
# collections=None,
# validate_shape=True,
# caching_device=None,
# name=None,
# variable_def=None,
# dtype=None)
# 注意:Variable是一个Class,Tensor也是一个Class
# Constant 常数
# tf.constant(value, dtype=None, shape=None, name='Const')
# return: a constant @Tensor
# Placeholder 暂时变量?
# tf.placeholder(dtype, shape=None, name=None)
# return: 一个还尚未存在的 @Tensor
# 让我们用计算图谱来实现一些简单的函数
# + - * / 四则运算
def basic_operation():
v1 = tf.Variable(10)
v2 = tf.Variable(5)
addv = v1 + v2
print(addv)
print(type(addv))
print(type(v1))
c1 = tf.constant(10)
c2 = tf.constant(5)
addc = c1 + c2
print(addc)
print(type(addc))
print(type(c1))
# 用来运行计算图谱的对象/实例?
# session is a runtime
sess = tf.Session()
# Variable -> 初始化 -> 有值的Tensor
tf.initialize_all_variables().run(session=sess)
print('变量是需要初始化的')
print('加法(v1, v2) = ', addv.eval(session=sess))
print('加法(v1, v2) = ', sess.run(addv))
print('加法(c1, c2) = ', addc.eval(session=sess))
print('\n\n')
#这种定义操作,再执行操作的模式被称之为“符号式编程” Symbolic Programming
# tf.Graph.__init__()
# Creates a new, empty Graph.
graph = tf.Graph()
with graph.as_default():
value1 = tf.constant([1,2])
value2 = tf.Variable([3,4])
mul = value1 / value2
with tf.Session(graph=graph) as mySess:
tf.initialize_all_variables().run()
print('一一对应的除法(value1, value2) = ', mySess.run(mul))
print('一一对应的除法(value1, value2) = ', mul.eval())
# tensor.eval(session=sess)
# sess.run(t
|
ensor)
# 省内存?placeholder才是王道
# def use_placeholder():
|
graph = tf.Graph()
with graph.as_default():
value1 = tf.placeholder(dtype=tf.float64)
value2 = tf.Variable([3, 4], dtype=tf.float64)
mul = value1 * value2
with tf.Session(graph=graph) as mySess:
tf.initialize_all_variables().run()
# 我们想象一下这个数据是从远程加载进来的
# 文件,网络
# 假装是 10 GB
value = load_from_remote()
for partialValue in load_partial(value, 2):
# runResult = mySess.run(mul, feed_dict={value1: partialValue})
evalResult = mul.eval(feed_dict={value1: partialValue})
print('乘法(value1, value2) = ', runResult)
# cross validation
def load_from_remote():
return [-x for x in range(1000)]
# 自定义的 Iterator
# yield, generator function
def load_partial(value, step):
index = 0
while index < len(value):
yield value[index:index+step]
index += step
return
if __name__ == '__main__':
basic_operation()
# use_placeholder()
|
Teamxrtc/webrtc-streaming-node
|
third_party/webrtc/src/chromium/src/build/android/pylib/base/test_collection.py
|
Python
|
mit
| 2,343
| 0.011524
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import threading
class TestCollection(object):
"""A threadsafe collection of tests.
Args:
tests: List of tests to put in the collection.
"""
def __init__(self, tests=None):
if not tests:
tests = []
self._lock = threading.Lock()
self._tests = []
self._tests_in_progress = 0
# Used to signal that an item is available or all items have been handled.
self._item_available_or_all_done = threading.Event()
for t in tests:
self.add(t)
def _pop(self):
"""Pop a test from the collection.
Waits until a test is available or all tests have been handled.
Returns:
A test or None if all tests have been handled.
"""
while True:
# Wait for a test to be available or all tests to have been handled.
self._item_available_or_all_done.wait()
with self._lock:
# Check which of the two conditions triggered the signal.
if self._tests_in_progress == 0:
return None
try:
return self._tests.pop(0)
except IndexError:
# Another thread beat us to the available test, wait again.
self._item_available_or_all_done.clear()
def add(self, test):
"""Add a test to the collection.
Args:
test: A test to add.
"""
with self._lock:
self._tests.append(test)
self._item_available_or_all_done.set()
self._tests_in_progres
|
s += 1
def test_completed(self):
"""Indicate that a test has been fully handled."""
with self._lock:
self._tests_in_progress -= 1
if self._tests_in_progress == 0:
# All tests have been handled, signal all waiting threads.
self._item_available_or_all_done.set()
def __iter__(s
|
elf):
"""Iterate through tests in the collection until all have been handled."""
while True:
r = self._pop()
if r is None:
break
yield r
def __len__(self):
"""Return the number of tests currently in the collection."""
return len(self._tests)
def test_names(self):
"""Return a list of the names of the tests currently in the collection."""
with self._lock:
return list(t.test for t in self._tests)
|
addition-it-solutions/project-all
|
openerp/addons/base/tests/test_osv.py
|
Python
|
agpl-3.0
| 4,654
| 0.004297
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 OpenERP S.A. http://www.openerp.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import unittest
from openerp.osv.query import Query
class QueryTestCase(unittest.TestCase):
def test_basic_query(self):
query = Query()
query.tables.extend(['"product_product"', '"product_template"'])
query.where_clause.append("product_product.template_id = product_template.id")
query.add_join(("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False) # add normal join
query.add_join(("prod
|
uct_product", "res_user", "user_id", "id", "user_id"), implicit=False, outer=True) # outer join
self.assertEquals(query.get_sql()[0].strip()
|
,
""""product_product" LEFT JOIN "res_user" as "product_product__user_id" ON ("product_product"."user_id" = "product_product__user_id"."id"),"product_template" JOIN "product_category" as "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") """.strip())
self.assertEquals(query.get_sql()[1].strip(), """product_product.template_id = product_template.id""".strip())
def test_query_chained_explicit_joins(self):
query = Query()
query.tables.extend(['"product_product"', '"product_template"'])
query.where_clause.append("product_product.template_id = product_template.id")
query.add_join(("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False) # add normal join
query.add_join(("product_template__categ_id", "res_user", "user_id", "id", "user_id"), implicit=False, outer=True) # CHAINED outer join
self.assertEquals(query.get_sql()[0].strip(),
""""product_product","product_template" JOIN "product_category" as "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" as "product_template__categ_id__user_id" ON ("product_template__categ_id"."user_id" = "product_template__categ_id__user_id"."id")""".strip())
self.assertEquals(query.get_sql()[1].strip(), """product_product.template_id = product_template.id""".strip())
def test_mixed_query_chained_explicit_implicit_joins(self):
query = Query()
query.tables.extend(['"product_product"', '"product_template"'])
query.where_clause.append("product_product.template_id = product_template.id")
query.add_join(("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False) # add normal join
query.add_join(("product_template__categ_id", "res_user", "user_id", "id", "user_id"), implicit=False, outer=True) # CHAINED outer join
query.tables.append('"account.account"')
query.where_clause.append("product_category.expense_account_id = account_account.id") # additional implicit join
self.assertEquals(query.get_sql()[0].strip(),
""""product_product","product_template" JOIN "product_category" as "product_template__categ_id" ON ("product_template"."categ_id" = "product_template__categ_id"."id") LEFT JOIN "res_user" as "product_template__categ_id__user_id" ON ("product_template__categ_id"."user_id" = "product_template__categ_id__user_id"."id"),"account.account" """.strip())
self.assertEquals(query.get_sql()[1].strip(), """product_product.template_id = product_template.id AND product_category.expense_account_id = account_account.id""".strip())
def test_raise_missing_lhs(self):
query = Query()
query.tables.append('"product_product"')
self.assertRaises(AssertionError, query.add_join, ("product_template", "product_category", "categ_id", "id", "categ_id"), implicit=False, outer=False)
|
Tankobot/mechalature
|
core/identify.py
|
Python
|
gpl-3.0
| 735
| 0
|
from core import MechalatureError
import shelve
__all__ = [
'MechalatureEvent',
'get_info'
]
word_bank = shelve.open('bin/word_bank')
class MechalatureEvent:
def __init__(self, name: str):
self.name = name
self._tags = set()
def tag(self, terms: set):
self._tags += terms
def check(self):
get_info(self)
clas
|
s TagError(MechalatureError):
def __init__(self, msg: str, tags: tuple):
self.tags = tags
super().__init__(msg)
possible_tags = (
'noun',
'adjective',
'verb',
'plural',
'singular'
)
def get_info(event: MechalatureEvent):
try:
tags = word_bank[eve
|
nt.name]
except KeyError:
tags = ()
# TODO
|
googleapis/python-os-config
|
google/cloud/osconfig_v1/types/__init__.py
|
Python
|
apache-2.0
| 4,373
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .inventory import (
GetInventoryRequest,
Inventory,
ListInventoriesRequest,
ListInventoriesResponse,
InventoryView,
)
from .os_policy import OSPolicy
from .os_policy_assignment_reports import (
GetOSPolicyAssignmentReportRequest,
ListOSPolicyAssignmentReportsRequest,
ListOSPolicyAssignmentReportsResponse,
OSPolicyAssignmentReport,
)
from .os_policy_assignments import (
CreateOSPolicyAssignmentRequest,
DeleteOSPolicyAssignmentRequest,
GetOSPolicyAssignmentRequest,
ListOSPolicyAssignmentRevisionsRequest,
ListOSPolicyAssignmentRevisionsResponse,
ListOSPolicyAssignmentsRequest,
ListOSPolicyAssignmentsResponse,
OSPolicyAssignment,
OSPolicyAssignmentOperationMetadata,
UpdateOSPolicyAssignmentRequest,
)
from .osconfig_common import FixedOrPercent
from .patch_deployments import (
CreatePatchDeploymentRequest,
DeletePatchDeploymentRequest,
GetPatchDeploymentRequest,
ListPatchDeploymentsRequest,
ListPatchDeploymentsResponse,
MonthlySchedule,
OneTimeSchedule,
PatchDeployment,
PausePatchDeploymentRequest,
RecurringSchedule,
ResumePatchDeploymentRequest,
UpdatePatchDeploymentRequest,
WeekDayOfMonth,
WeeklySchedule,
)
from .patch_jobs import (
AptSettings,
CancelPatchJobRequest,
ExecStep,
ExecStepConfig,
ExecutePatchJobRequest,
GcsObject,
GetPatchJobRequest,
GooSettings,
Instance,
ListPatchJobInstanceDetailsRequest,
ListPatchJobInstanceDetailsResponse,
ListPatchJobsRequest,
ListPatchJobsResponse,
PatchConfig,
PatchInstanceFilter,
PatchJob,
PatchJobInstanceDetails,
PatchRollout,
WindowsUpdateSettings,
YumSettings,
ZypperSettings,
)
from .vulnerability import (
CVSSv3,
GetVulnerabilityReportRequest,
ListVulnerabilityReportsRequest,
ListVulnerabilityReportsResponse,
VulnerabilityReport,
)
__all__ = (
"GetInventoryRequest",
"Inventory",
"ListInventoriesRequest",
"ListInventoriesResponse",
"InventoryView",
"OSPolicy",
"GetOSPolicyAssignmentReportRequest",
"ListOSPolicyAssignmentReportsRequest",
"ListOSPolicyAssignmentReportsResponse",
"OSPolicyAssignmentReport",
"CreateOSPolicyAssignmentRequest",
"DeleteOSPolicyAssignmentRequest",
"GetOSPolicyAssignmentRequest",
"ListOSPolicyAssignmentRevisionsRequest",
"ListOSPolicyAssignmentRevisionsResponse",
"ListOSPolicyAssignmentsRequest",
"ListOSPolicyAssignmentsResponse",
"OSPolicyAssignment",
"OSPolicyAssignmentOperationMetadata",
"UpdateOSPolicyAssignmentRequest",
"FixedOrPercent",
"CreatePatchDeploymentRequ
|
est",
"DeletePatchDeploymentRequest",
"GetPatchDeploymentRequest",
"ListPatchDeploymentsRequest",
"ListPatchDeploymentsResponse",
"MonthlySchedule",
"OneTimeSchedule",
"PatchDeployment",
"PausePatchDeploymentRequest",
"RecurringSchedule",
"ResumePatchDeploymentRequest",
"Update
|
PatchDeploymentRequest",
"WeekDayOfMonth",
"WeeklySchedule",
"AptSettings",
"CancelPatchJobRequest",
"ExecStep",
"ExecStepConfig",
"ExecutePatchJobRequest",
"GcsObject",
"GetPatchJobRequest",
"GooSettings",
"Instance",
"ListPatchJobInstanceDetailsRequest",
"ListPatchJobInstanceDetailsResponse",
"ListPatchJobsRequest",
"ListPatchJobsResponse",
"PatchConfig",
"PatchInstanceFilter",
"PatchJob",
"PatchJobInstanceDetails",
"PatchRollout",
"WindowsUpdateSettings",
"YumSettings",
"ZypperSettings",
"CVSSv3",
"GetVulnerabilityReportRequest",
"ListVulnerabilityReportsRequest",
"ListVulnerabilityReportsResponse",
"VulnerabilityReport",
)
|
LeotisBuchanan/olpc-datavisualization-
|
models.py
|
Python
|
gpl-2.0
| 341
| 0.005865
|
from olpc import db
class User(d
|
b.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
email = db.Column(db.String(120), unique=True)
def __init__(self, name, email):
self.name = name
self.email = email
def __repr__(self):
return
|
'<Name %r>' % self.name
|
alex8866/cinder
|
cinder/tests/zonemanager/test_cisco_fc_zone_client_cli.py
|
Python
|
apache-2.0
| 9,495
| 0
|
# (c) Copyright 2014 Cisco Systems Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Unit tests for Cisco fc zone client cli."""
from mock import patch
from cinder import exception
from cinder.openstack.common import processutils
from cinder import test
from cinder.zonemanager.drivers.cisco.cisco_fc_zone_client_cli \
import CiscoFCZoneClientCLI
import cinder.zonemanager.drivers.cisco.fc_zone_constants as ZoneConstant
nsshow = '20:1a:00:05:1e:e8:e3:29'
switch_data = ['VSAN 303\n',
'----------------------------------------------------------\n',
'FCID TYPE PWWN (VENDOR) FC4-TYPE:FEATURE\n',
'----------------------------------------------------------\n',
'0x030001 N 20:1a:00:05:1e:e8:e3:29 (Cisco) ipfc\n',
'0x030101 NL 10:00:00:00:77:99:60:2c (Interphase)\n',
'0x030200 NL 10:00:00:49:c9:28:c7:01\n']
cfgactv = ['zoneset name OpenStack_Cfg vsan 303\n',
'zone name openstack50060b0000c26604201900051ee8e329 vsan 303\n',
'pwwn 50:06:0b:00:00:c2:66:04\n',
'pwwn 20:19:00:05:1e:e8:e3:29\n']
active_zoneset = {
'zones': {
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29']},
'active_zone_config': 'OpenStack_Cfg'}
zoning_status_data_basic = [
'VSAN: 303 default-zone: deny distribute: active only Interop: default\n',
' mode: basic merge-control: allow\n',
' session: none\n',
' hard-zoning: enabled broadcast: unsupported\n',
' smart-zoning: disabled\n',
' rscn-format: fabric-address\n',
'Default zone:\n',
' qos: none broadcast: unsupported ronly: unsupported\n',
'Full Zoning Database :\n',
' DB size: 220 bytes\n',
' Zonesets:2 Zones:2 Aliases: 0\n',
'Active Zoning Database :\n',
' DB size: 80 bytes\n',
' Name: test-zs-test Zonesets:1 Zones:1\n',
'Status:\n']
zoning_status_basic = {'mode': 'basic', 'session': 'none'}
zoning_status_data_enhanced_nosess = [
'VSAN: 303 default-zone: deny distribute: active only Interop: default\n',
' mode: enhanced merge-control: allow\n',
' session: none\n',
' hard-zoning: enabled broadcast: unsupported\n',
' smart-zoning: disabled\n',
' rscn-format: fabric-address\n',
'Default zone:\n',
' qos: none broadcast: unsupported ronly: unsupported\n',
'Full Zoning Database :\n',
' DB size: 220 bytes\n',
' Zonesets:2 Zones:2 Aliases: 0\n',
'Active Zoning Database :\n',
' DB size: 80 bytes\n',
' Name: test-zs-test Zonesets:1 Zones:1\n',
'Status:\n']
zoning_status_enhanced_nosess = {'mode': 'enhanced', 'session': 'none'}
zoning_status_data_enhanced_sess = [
'VSAN: 303 default-zone: deny distribute: active only Interop: default\n',
' mode: enhanced merge-control: allow\n',
' session: otherthannone\n',
' hard-zoning: enabled broadcast: unsupported\n',
' smart-zoning: disabled\n',
' rscn-format: fabric-address\n',
'Default zone:\n',
' qos: none broadcast: unsupported ronly: unsupported\n',
'Full Zoning Database :\n',
' DB size: 220 bytes\n',
' Zonesets:2 Zones:2 Aliases: 0\n',
'Active Zoning Database :\n',
' DB size: 80 bytes\n',
' Name: test-zs-test Zonesets:1 Zones:1\n',
'Status:\n']
zoning_status_enhanced_sess = {'mode': 'enhanced', 'session': 'otherthannone'}
active_zoneset_multiple_zones = {
'zones': {
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29'],
'openstack50060b0000c26602201900051ee8e327':
['50:06:0b:00:00:c2:66:02', '20:19:00:05:1e:e8:e3:27']},
'active_zone_config': 'OpenStack_Cfg'}
new_zone = {'openstack10000012345678902001009876543210':
['10:00:00:12:34:56:78:90', '20:01:00:98:76:54:32:10']}
new_zones = {'openstack10000012345678902001009876543210':
['10:00:00:12:34:56:78:90', '20:01:00:98:76:54:32:10'],
'openstack10000011111111112001001111111111':
['10:00:00:11:11:11:11:11', '20:01:00:11:11:11:11:11']}
zone_names_to_delete = 'openstack50060b0000c26604201900051ee8e329'
class TestCiscoFCZoneClientCLI(CiscoFCZoneClientCLI, test.TestCase):
def setUp(self):
super(TestCiscoFCZoneClientCLI, self).setUp()
self.fabric_vsan = '303'
# override some of the functions
def __init__(self, *args, **kwargs):
test.TestCase.__init__(self, *args, **kwargs)
@patch.object(CiscoFCZoneClientCLI, '_get_switch_
|
info')
def test_get_active_zone_set(self, get_switch_info_mock):
cmd_list = [ZoneConstant.GET_ACTIVE_ZONE_CFG, self.fabric_vsan,
' | no-more']
get_switch_info_mock.return_value = cfgactv
active_zoneset_returned = self.get_active_zone_set()
get_switch_info_mock.assert_called_once_with(cmd_list)
self.assertDictMatch(active_zoneset_returned, active_zoneset)
@patch.object(Ci
|
scoFCZoneClientCLI, '_run_ssh')
def test_get_active_zone_set_ssh_error(self, run_ssh_mock):
run_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.CiscoZoningCliException,
self.get_active_zone_set)
@patch.object(CiscoFCZoneClientCLI, '_get_switch_info')
def test_get_zoning_status_basic(self, get_zoning_status_mock):
cmd_list = [ZoneConstant.GET_ZONE_STATUS, self.fabric_vsan]
get_zoning_status_mock.return_value = zoning_status_data_basic
zoning_status_returned = self.get_zoning_status()
get_zoning_status_mock.assert_called_once_with(cmd_list)
self.assertDictMatch(zoning_status_returned, zoning_status_basic)
@patch.object(CiscoFCZoneClientCLI, '_get_switch_info')
def test_get_zoning_status_enhanced_nosess(self, get_zoning_status_mock):
cmd_list = [ZoneConstant.GET_ZONE_STATUS, self.fabric_vsan]
get_zoning_status_mock.return_value =\
zoning_status_data_enhanced_nosess
zoning_status_returned = self.get_zoning_status()
get_zoning_status_mock.assert_called_once_with(cmd_list)
self.assertDictMatch(zoning_status_returned,
zoning_status_enhanced_nosess)
@patch.object(CiscoFCZoneClientCLI, '_get_switch_info')
def test_get_zoning_status_enhanced_sess(self, get_zoning_status_mock):
cmd_list = [ZoneConstant.GET_ZONE_STATUS, self.fabric_vsan]
get_zoning_status_mock.return_value = zoning_status_data_enhanced_sess
zoning_status_returned = self.get_zoning_status()
get_zoning_status_mock.assert_called_once_with(cmd_list)
self.assertDictMatch(zoning_status_returned,
zoning_status_enhanced_sess)
@patch.object(CiscoFCZoneClientCLI, '_get_switch_info')
def test_get_nameserver_info(self, get_switch_info_mock):
ns_info_list = []
ns_info_list_expected = ['20:1a:00:05:1e:e8:e3:29']
get_switch_info_mock.return_value = (switch_data)
ns_info_list = self.get_nameserver_info()
self.assertEqual(ns_info_list, ns_info_list_expected)
@patch.object(CiscoFCZoneClientCLI, '_run_ssh')
def test_get_nameserver_info_ssh_error(self, run_ssh_mock):
run_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.CiscoZoningCliException,
self.get_nameserver_info)
@
|
humw/algorithms_in_python
|
merge_sort/merge_sort.py
|
Python
|
gpl-2.0
| 667
| 0
|
def merge(a, b):
"""
inuput: two sorted lists
output: a merged sorted list
for example:
merge([2,3], [1,4])
--> [1,2,3,4]
"""
merged = []
w
|
hile a or b:
if a and b:
if a[0] < b[0]:
merged.append(a.pop(0))
else:
merged.append(b.pop(0))
else:
merged += a + b
break
return merged
def merge_sort(one_list):
# divide
if len(one_list) == 1:
return one_list
|
middle = int(len(one_list)/2)
left = merge_sort(one_list[:middle])
right = merge_sort(one_list[middle:])
# conquer
return merge(left, right)
|
ocefpaf/cartopy
|
lib/cartopy/tests/conftest.py
|
Python
|
lgpl-3.0
| 1,159
| 0
|
# (C) British Crown Copyright 2020, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy
|
of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
def pytest_configure(config):
# Register additional markers.
config.addinivalue_line('markers',
|
'natural_earth: mark tests that use Natural Earth '
'data, and the network, if not cached.')
config.addinivalue_line('markers',
'network: mark tests that use the network.')
|
normalnorway/normal.no
|
django/core/views.py
|
Python
|
gpl-3.0
| 733
| 0.0191
|
"""
Global views -- i.e., don't tied to any app/model.
"""
from core.shortcuts import render_to
@render_to ('index.html')
def index (request):
return {}
## /newsletter/
from utils.mailchimp import MailChimp
from django.conf import settings
#mailchimp = None
#if settings.MAILCHIMP_API_KEY:
# mailchimp = MailChimp (settings.MAILCHIMP_API_KEY)
mailchimp = MailChimp (settings.MAILCHIMP_API_KEY) if settings.MAILCHIMP_API_KEY else None
# or use config.mailchimp in core/context_processors.py (like piwik) ?
# or: MailChimp(None) => all operations is nop
# @todo log/warn missing api-key?
@render_to ('newsletter.html')
def newsletter (request):
return
|
{'cam
|
paigns': mailchimp.get_campaigns if mailchimp else None}
|
colour-science/colour
|
colour/utilities/tests/test_data_structures.py
|
Python
|
bsd-3-clause
| 17,079
| 0.000586
|
"""Defines the unit tests for the :mod:`colour.utilities.data_structures` module."""
import numpy as np
import operator
import pickle
import unittest
from colour.utilities import (
Structure,
Lookup,
CaseInsensitiveMapping,
LazyCaseInsensitiveMapping,
Node,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright 2013 Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"TestStructure",
"TestLookup",
"TestCaseInsensitiveMapping",
"TestLazyCaseInsensitiveMapping",
"TestNode",
]
class TestStructure(unittest.TestCase):
"""
Define :class:`colour.utilities.data_structures.Structure` class unit
tests methods.
"""
def test_Structure(self):
"""Test :class:`colour.utilities.data_structures.Structure` class."""
structure = Structure(John="Doe", Jane="Doe")
self.assertIn("John", structure)
self.assertTrue(hasattr(structure, "John"))
setattr(structure, "John", "Nemo")
self.assertEqual(structure["John"], "Nemo")
structure["John"] = "Vador"
self.assertEqual(structure["John"], "Vador")
del structure["John"]
self.assertNotIn("John", structure)
self.assertFalse(hasattr(structure, "John"))
structure.John = "Doe"
self.assertIn("John", structure)
self.assertTrue(hasattr(structure, "John"))
del structure.John
self.assertNotIn("John", structure)
self.assertFalse(hasattr(structure, "John"))
structure = Structure(John=None, Jane=None)
self.assertIsNone(structure.John)
self.assertIsNone(structure["John"])
structure.update(**{"John": "Doe", "Jane": "Doe"})
self.assertEqual(structure.John, "Doe")
self.assertEqual(structure["John"], "Doe")
def test_Structure_pickle(self):
"""
Test :class:`colour.utilities.data_structures.Structure` class
pickling.
"""
structure = Structure(John="Doe", Jane="Doe")
data = pickle.dumps(structure)
data = pickle.loads(data)
self.assertEqual(structure, data)
data = pickle.dumps(structure, pickle.HIGHEST_PROTOCOL)
data = pickle.loads(data)
self.assertEqual(structure, data)
self.assertEqual(sorted(dir(data)), ["Jane", "John"])
class TestLookup(unittest.TestCase):
"""
Define :class:`colour.utilities.data_structures.Lookup` class unit tests
methods.
"""
def test_required_methods(self):
"""Test the presence of required methods."""
required_methods = ("keys_from_value", "first_key_from_value")
for method in required_methods:
self.assertIn(method, dir(Lookup))
def test_keys_from_value(self):
"""
Test :meth:`colour.utilities.data_structures.Lookup.keys_from_value`
method.
"""
lookup = Lookup(John="Doe", Jane="Doe", Luke="Skywalker")
self.assertListEqual(
["Jane", "John"], sorted(lookup.keys_from_value("Doe"))
)
lookup = Lookup(
A=np.array([0, 1, 2]), B=np.array([0, 1, 2]), C=np.array([1, 2, 3])
|
)
self.
|
assertListEqual(
["A", "B"], sorted(lookup.keys_from_value(np.array([0, 1, 2])))
)
def test_first_key_from_value(self):
"""
Test :meth:`colour.utilities.data_structures.\
Lookup.first_key_from_value` method.
"""
lookup = Lookup(first_name="John", last_name="Doe", gender="male")
self.assertEqual("first_name", lookup.first_key_from_value("John"))
lookup = Lookup(
A=np.array([0, 1, 2]), B=np.array([1, 2, 3]), C=np.array([2, 3, 4])
)
self.assertEqual("A", lookup.first_key_from_value(np.array([0, 1, 2])))
def test_raise_exception_first_key_from_value(self):
"""
Test :meth:`colour.utilities.data_structures.\
Lookup.first_key_from_value` method raised exception.
"""
self.assertRaises(IndexError, Lookup().first_key_from_value, "John")
class TestCaseInsensitiveMapping(unittest.TestCase):
"""
Define :class:`colour.utilities.data_structures.CaseInsensitiveMapping`
class unit tests methods.
"""
def test_required_attributes(self):
"""Test the presence of required attributes."""
required_attributes = ("data",)
for attribute in required_attributes:
self.assertIn(attribute, dir(CaseInsensitiveMapping))
def test_required_methods(self):
"""Test the presence of required methods."""
required_methods = (
"__init__",
"__repr__",
"__setitem__",
"__getitem__",
"__delitem__",
"__contains__",
"__iter__",
"__len__",
"__eq__",
"__ne__",
"copy",
"lower_items",
)
for method in required_methods:
self.assertIn(method, dir(CaseInsensitiveMapping))
def test_data(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.data` property.
"""
self.assertDictEqual(
CaseInsensitiveMapping({"John": "Doe", "Jane": "Doe"}).data,
{"jane": ("Jane", "Doe"), "john": ("John", "Doe")},
)
def test__repr__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__repr__` method.
"""
mapping = CaseInsensitiveMapping()
mapping["John"] = "Doe"
self.assertEqual(
repr(mapping), "CaseInsensitiveMapping({'John': 'Doe'})"
)
def test__setitem__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__setitem__` method.
"""
mapping = CaseInsensitiveMapping()
mapping["John"] = "Doe"
self.assertEqual(mapping["John"], "Doe")
self.assertEqual(mapping["john"], "Doe")
def test__getitem__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__getitem__` method.
"""
mapping = CaseInsensitiveMapping(John="Doe", Jane="Doe")
self.assertEqual(mapping["John"], "Doe")
self.assertEqual(mapping["john"], "Doe")
self.assertEqual(mapping["Jane"], "Doe")
self.assertEqual(mapping["jane"], "Doe")
mapping = CaseInsensitiveMapping({1: "Foo", 2: "Bar"})
self.assertEqual(mapping[1], "Foo")
def test__delitem__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__delitem__` method.
"""
mapping = CaseInsensitiveMapping(John="Doe", Jane="Doe")
del mapping["john"]
self.assertNotIn("John", mapping)
del mapping["Jane"]
self.assertNotIn("jane", mapping)
self.assertEqual(len(mapping), 0)
def test__contains__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__contains__` method.
"""
mapping = CaseInsensitiveMapping(John="Doe", Jane="Doe")
self.assertIn("John", mapping)
self.assertIn("john", mapping)
self.assertIn("Jane", mapping)
self.assertIn("jane", mapping)
def test__iter__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__iter__` method.
"""
mapping = CaseInsensitiveMapping(John="Doe", Jane="Doe")
self.assertListEqual(
sorted(item for item in mapping), ["Jane", "John"]
)
def test__len__(self):
"""
Test :meth:`colour.utilities.data_structures.\
CaseInsensitiveMapping.__len__` method.
"""
self.assertEqual(len(CaseInsensitiveMapping()), 0)
self.assertEqual(
len(CaseInsensitiveMapping(John="Doe", Jane="Doe")), 2
)
def test__eq__(self):
"""
Test :meth:`co
|
joequant/pyswagger
|
pyswagger/tests/v2_0/test_circular.py
|
Python
|
mit
| 3,163
| 0.002845
|
from pyswagger import SwaggerApp, utils, primitives, errs
from ..utils import get_test_data_folder
from ...scanner import CycleDetector
from ...scan import Scanner
import unittest
import os
import six
class CircularRefTestCase(unittest.TestCase):
""" test for circular reference guard """
def test_path_item_prepare_with_cycle(self):
app = SwaggerApp.load(get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'path_item')
))
# should raise nothing
app.prepare()
def test_path_item(self):
folder = get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'pa
|
th_item')
)
def _pf(s):
return six.moves.urllib.parse.urlunparse((
'file',
'',
folder,
'',
'',
s))
app = SwaggerApp.create(folder)
s = Scanner(app)
|
c = CycleDetector()
s.scan(root=app.raw, route=[c])
self.assertEqual(sorted(c.cycles['path_item']), sorted([[
_pf('/paths/~1p1'),
_pf('/paths/~1p2'),
_pf('/paths/~1p3'),
_pf('/paths/~1p4'),
_pf('/paths/~1p1')
]]))
def test_schema(self):
folder = get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'schema')
)
def _pf(s):
return six.moves.urllib.parse.urlunparse((
'file',
'',
folder,
'',
'',
s))
app = SwaggerApp.load(folder)
app.prepare(strict=False)
s = Scanner(app)
c = CycleDetector()
s.scan(root=app.raw, route=[c])
self.maxDiff = None
self.assertEqual(sorted(c.cycles['schema']), sorted([
[_pf('/definitions/s10'), _pf('/definitions/s11'), _pf('/definitions/s9'), _pf('/definitions/s10')],
[_pf('/definitions/s5'), _pf('/definitions/s5')],
[_pf('/definitions/s1'), _pf('/definitions/s2'), _pf('/definitions/s3'), _pf('/definitions/s4'), _pf('/definitions/s1')],
[_pf('/definitions/s12'), _pf('/definitions/s13'), _pf('/definitions/s12')],
[_pf('/definitions/s6'), _pf('/definitions/s7'), _pf('/definitions/s6')],
[_pf('/definitions/s14'), _pf('/definitions/s15'), _pf('/definitions/s14')]
]))
def test_deref(self):
app = SwaggerApp.create(get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'schema'),
),
strict=False
)
s = app.resolve('#/definitions/s1')
self.assertRaises(errs.CycleDetectionError, utils.deref, s)
def test_primfactory(self):
app = SwaggerApp.create(get_test_data_folder(
version='2.0',
which=os.path.join('circular', 'schema'),
),
strict=False
)
s = app.resolve('#/definitions/s1')
self.assertRaises(errs.CycleDetectionError, app.prim_factory.produce, s, {})
|
jenshnielsen/hemelb
|
Tools/hemeTools/parsers/geometry/__init__.py
|
Python
|
lgpl-3.0
| 1,408
| 0.035511
|
#
# Copyright (C) University College London, 2007-2012, all rights reserved.
#
# This file is part of HemeLB and is provided to you under the terms of
# the GNU LGPL. Please see LICENSE in the top level directory for full
# details.
#
"""Regarding indices, a few conventions:
1) Broadly there are two types of index:
- Three-dimensional indices into a 3D array, these are typically a
numpy.ndarray (with shape == (3,)) and are named with a suffix of
'Idx'.
- One-dimensional indices into a flattened array. T
|
hese are just
integers and have the suffix 'Ijk'.
2) Indices can refer to a number of things and have additional naming:
- b : Index of a block
- sg : Index of a site in the whole domain (site global)
- sl : Index of a site in the block (site local)
"""
import numpy as np
GeometryMagicNumber = 0x676d7904
MooreNeighbourhoodDirectio
|
ns = np.array(
[[-1,-1,-1],
[-1,-1, 0],
[-1,-1,+1],
[-1, 0,-1],
[-1 , 0, 0],
[-1, 0,+1],
[-1,+1,-1],
[-1,+1, 0],
[-1,+1,+1],
[ 0,-1,-1],
[ 0,-1, 0],
[ 0,-1,+1],
[ 0, 0,-1],
#[ 0, 0, 0], <= the null displacement is not part of the Moore N'hood
[ 0, 0,+1],
[ 0,+1,-1],
[ 0,+1, 0],
[ 0,+1,+1],
[+1,-1,-1],
[+1,-1, 0],
[+1,-1,+1],
[+1, 0,-1],
[+1, 0, 0],
[+1, 0,+1],
[+1,+1,-1],
[+1,+1, 0],
[+1,+1,+1]]
)
|
ayoubg/gem5-graphics
|
gem5-gpu/tests/quick/se_gpu/20.bh/test.py
|
Python
|
bsd-3-clause
| 1,653
| 0
|
# Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redist
|
ributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
#
|
documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Joel Hestness
options.clusters = 4
options.cmd = 'gem5_gpu_bh'
options.options = '1024 1 0'
|
googlefonts/fontbakery
|
Lib/fontbakery/sphinx_extensions/profile.py
|
Python
|
apache-2.0
| 24,923
| 0.003491
|
from typing import Any, List, Tuple, Dict #cast
from sphinx.application import Sphinx
# from sphinx.ext.autodoc import Documenter
from sphinx.ext.autodoc import ModuleLevelDocumenter
from sphinx.pycode import ModuleAnalyzer, PycodeError
#from sphinx.domains.python import PythonDomain
from sphinx.locale import __
from sphinx.domains.python import PyObject
from sphinx import addnodes
from sphinx.util.inspect import signature as Signature
from sphinx.util.inspect import stringify_signature
import logging
logger = logging.getLogger(__name__)
# we can get source code first line numbers with this module for object
import inspect
from fontbakery.callable import (
FontbakeryCallable
, FontBakeryCondition
, FontBakeryCheck
, Disabled
, FontBakeryExpectedValue
)
# mute the style checks for unused names
# will be removed eventually
if False: #pylint: disable=using-constant-test
FontbakeryCallable
FontBakeryCondition
FontBakeryCheck
Disabled
FontBakeryExpectedValue
__version__ = '0.0.1'
# ModuleLevelDocumenter(Documenter): Specialized Documenter subclass for objects on module level (functions,
# classes, data/constants). Implements: resolve_name
# https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/autodoc/__init__.py#L850
# Documenter
class FontBakeryCallableDocumenter(ModuleLevelDocumenter):
"""
Specialized Documenter subclass for instances of FontBakeryCheck.
"""
objtype = 'fontbakerycallable'
can_doc_cls = FontbakeryCallable
member_order = 30
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
# type: (Any, str, bool, Any) -> bool
return isinstance(member, cls.can_doc_cls)
def format_args(self): # pylint: disable=arguments-differ # I am really not sure what went wrong here...
# type: () -> str
# We use the original signature from the wrapped _function
has_retval = isinstance(self.object, FontBakeryCondition)
if not hasattr(self.object, '_func'):
# FIXME! I don't know what's this.
return None
sig = Signature(self.object._func, bound_method=False, has_retval=has_retval)
args = stringify_signature(sig)
# escape backslashes for reST
args = args.replace('\\', '\\\\')
return args
def format_name(self):
# I'm using this to inject some new info into the check
# search for the separator ":::" in this document to see where
# the info is received. This is not a clean solution!
#
# in https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/autodoc/__init__.py#L374
# it says:
# > This normally should be something that can be parsed by the generated
# > directive, but doesn't need to be (Sphinx will display it unparsed
# > then).
# See below in `handle_signature`
# where that ipdb debugger is started, usually that eception would be
|
# dropped and we drop out of signature building. (RAISED here in `_handle_signature`
# The ValueError when the regex doesn't match...)
# seems like the slash (/) Is killing most of the header!
# Otherwise the ids display fine, the dots are fine.
# Also, in any case of name change, the [source] view is killed (removed!)
# the do
|
cument and also genindex.html anchor works so far (with 7 instead of /)
#
res = super().format_name()
if self.objtype == 'fontbakerycheck':
# A bit hackish, splitting somwhere else by ::: to retrieve the checkid
# we can get the source file first line number of self.object:
lineno = inspect.getsourcelines(self.object)[1]
res = self.object.id + ':::' + f'{lineno}' + ':::' + res#.replace('/', '7')
# else:
# res = super().format_name()
# print('formatted name:', res)
# > formatted name: com.google.fonts/check/xavgcharwidth:::59:::com_google_fonts_check_xavgcharwidth
# > formatted name: bold_wght_coord
return res
# handle_signature: com_google_fonts_check_post_table_version(ttFont, is_ttf) <desc_signature first="False"/>
# sig signature: com_google_fonts_check_post_table_version(ttFont, is_ttf)
# result: ('com_google_fonts_check_post_table_version', None) signode: <desc_signature class="" first="False" fullname="com_google_fonts_check_post_table_version" module="fontbakery.profiles.post"><desc_annotation xml:space="preserve">FontBakeryCheck </desc_annotation><desc_addname xml:space="preserve">fontbakery.profiles.post.</desc_addname><desc_name xml:space="preserve">com_google_fonts_check_post_table_version</desc_name><desc_parameterlist xml:space="preserve"><desc_parameter xml:space="preserve">ttFont</desc_parameter><desc_parameter xml:space="preserve">is_ttf</desc_parameter></desc_parameterlist></desc_signature>
def generate(self, more_content=None, real_modname=None,
check_module=False, all_members=False):
# type: (Any, str, bool, bool) -> None
"""Generate reST for the object given by *self.name*, and possibly for
its members.
If *more_content* is given, include that content. If *real_modname* is
given, use that module name to find attribute docs. If *check_module* is
True, only generate if the object is defined in the module name it is
imported from. If *all_members* is True, document all members.
"""
# print('generate', more_content, real_modname, check_module, all_members)
# print(self.name)
# print('---------------------')
# > generate None fontbakery.profiles.post True True
# > fontbakery.profiles.post::com_google_fonts_check_post_table_version
# > ---------------------
#
# > generate None fontbakery.profiles.shared_conditions True True
# > fontbakery.profiles.shared_conditions::glyph_metrics_stats
# > ---------------------
if not self.parse_name():
# need a module to import
logger.warning(
__('don\'t know which module to import for autodocumenting '
'%r (try placing a "module" or "currentmodule" directive '
'in the document, or giving an explicit module name)') %
self.name, type='autodoc')
return
# now, import the module and get object to document
if not self.import_object():
return
# doesn't do anything!
# if self.objtype == 'fontbakerycheck':
# self.name = self.object.id
# If there is no real module defined, figure out which to use.
# The real module is used in the module analyzer to look up the module
# where the attribute documentation would actually be found in.
# This is used for situations where you have a module that collects the
# functions and classes of internal submodules.
self.real_modname = real_modname or self.get_real_modname() # type: str
# try to also get a source code analyzer for attribute docs
try:
self.analyzer = ModuleAnalyzer.for_module(self.real_modname)
# parse right now, to get PycodeErrors on parsing (results will
# be cached anyway)
self.analyzer.find_attr_docs()
except PycodeError as err:
logger.debug('[autodoc] module analyzer failed: %s', err)
# no source file -- e.g. for builtin and C modules
self.analyzer = None
# at least add the module.__file__ as a dependency
if hasattr(self.module, '__file__') and self.module.__file__:
self.directive.filename_set.add(self.module.__file__)
else:
self.directive.filename_set.add(self.analyzer.srcname)
# check __module__ of object (for members not given explicitly)
if check_module:
if not self.ch
|
tktrungna/leetcode
|
Python/dungeon-game.py
|
Python
|
mit
| 2,261
| 0.006192
|
"""
QUESTION:
The demons had captured the princess (P) and imprisoned her in the bottom-right corner of a dungeon. The dungeon
consists of M x N rooms laid out in a 2D grid. Our valiant knight (K) was initially positioned in the top-left room
and must fight his way through the dungeon to rescue the princess.
The knight has an initial health point represented by a positive integer. If at any point his health point drops to 0
or below, he dies immediately.
Some of the rooms are guarded by demons, so the knight loses health (negative integers) upon entering these rooms;
other rooms are either empty (0's) or contain magic orbs that increase the knight's health (positive integers).
In order to reach the princess as quickly as possible, the knight decides to move only rightward or downward in each
step.
Write a function to determine the knight's minimum initial health so that he is able to rescue the
|
princess.
For example, given the dungeon below, the initial health of the knight must be at
|
least 7 if he follows the optimal
path RIGHT-> RIGHT -> DOWN -> DOWN.
-2 (K) -3 3
-5 -10 1
10 30 -5 (P)
Notes:
The knight's health has no upper bound.
Any room can contain threats or power-ups, even the first room the knight enters and the bottom-right room where the
princess is imprisoned.
ANSWER:
DP
"""
class Solution:
# @param dungeon, a list of lists of integers
# @return a integer
def calculateMinimumHP(self, dungeon):
w = len(dungeon[0])
h = len(dungeon)
hp = [[0] * w for x in range(h)]
hp[h - 1][w - 1] = max(0, -dungeon[h - 1][w - 1]) + 1
for x in range(h - 1, -1, -1):
for y in range(w - 1, -1, -1):
down = None
if x + 1 < h:
down = max(1, hp[x + 1][y] - dungeon[x][y])
right = None
if y + 1 < w:
right = max(1, hp[x][y + 1] - dungeon[x][y])
if down and right:
hp[x][y] = min(down, right)
elif down:
hp[x][y] = down
elif right:
hp[x][y] = right
return hp[0][0]
if __name__ == '__main__':
print Solution().nextPermutation([3,2,1])
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/aio/operations/_security_rules_operations.py
|
Python
|
mit
| 22,275
| 0.004893
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SecurityRulesOperations:
"""SecurityRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'securityRuleName': self._serialize.url("security_rule_name", security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscripti
|
on_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_argument
|
s, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
**kwargs: Any
) -> "_models.SecurityRule":
|
spcui/virt-test
|
virttest/utils_misc.py
|
Python
|
gpl-2.0
| 59,810
| 0.000686
|
"""
Virtualization test utility functions.
:copyright: 2008-2009 Red Hat Inc.
"""
import time
import string
import random
import socket
import os
import signal
import re
import logging
import commands
import fcntl
import sys
import inspect
import tarfile
import shutil
import getpass
from autotest.client import utils, os_dep
from autotest.client.shared import error, logging_config
from autotest.client.shared import git
import data_dir
try:
from staging import utils_koji
except ImportError:
from autotest.client.shared import utils_koji
import platform
ARCH = platform.machine()
class UnsupportedCPU(error.TestError):
pass
# TODO: remove this import when log_last_traceback is moved to autotest
import traceback
# TODO: this function is being moved into autotest. For compatibility
# reasons keep it here too but new code should use the one from base_utils.
def log_last_traceback(msg=None, log=logging.error):
"""
@warning: This function is being moved into autotest and your code should
use autotest.client.shared.base_utils function instead.
Writes last traceback into specified log.
:param msg: Override the default message. ["Original traceback"]
:param log: Where to log the traceback [logging.error]
"""
if not log:
log = logging.error
if msg:
log(msg)
exc_type, exc_value, exc_traceback = sys.exc_info()
if not exc_traceback:
log('Requested log_last_traceback but no exception was raised.')
return
log("Original " +
"".join(traceback.format_exception(exc_type, exc_value,
exc_traceback)))
def lock_file(filename, mode=fcntl.LOCK_EX):
f = open(filename, "w")
fcntl.lockf(f, mode)
return f
def unlock_file(f):
fcntl.lockf(f, fcntl.LOCK_UN)
f.close()
# Utility functions for dealing with external processes
def unique(llist):
"""
Return a list of the elements in list, but without duplicates.
:param list: List with values.
:return: List with non duplicate elements.
"""
n = len(llist)
if n == 0:
return []
u = {}
try:
for x in llist:
u[x] = 1
except TypeError:
return None
else:
return u.keys()
def find_command(cmd):
"""
Try to find a command in the PATH, paranoid version.
:param cmd: Command to be found.
:raise: ValueError in case the command was not found.
"""
common_bin_paths = ["/usr/libexec", "/usr/local/sbin", "/usr/local/bin",
"/usr/sbin", "/usr/bin", "/sbin", "/bin"]
try:
path_paths = os.environ['PATH'].split(":")
except IndexError:
path_paths = []
path_paths = unique(common_bin_paths + path_paths)
for dir_path in path_paths:
cmd_path = os.path.join(dir_path, cmd)
if os.path.isfile(cmd_path):
return os.path.abspath(cmd_path)
raise ValueError('Missing command: %s' % cmd)
def pid_exists(pid):
"""
Return True if a given PID exists.
:param pid: Process ID number.
"""
try:
os.kill(pid, 0)
return True
except Exception:
return False
def safe_kill(pid, signal):
"""
Attempt to send a signal to a given process that may or may not exist.
:param signal: Signal number.
"""
try:
os.kill(pid, signal)
return True
except Exception:
return False
def kill_process_tree(pid, sig=signal.SIGKILL):
"""Signal a process and all of its children.
If the process does not exist -- return.
:param pid: The pid of the process to signal.
:param sig: The signal to send to the processes.
"""
if not safe_kill(pid, signal.SIGSTOP):
return
children = commands.getoutput("ps --ppid=%d -o pid=" % pid).split()
for child in children:
kill_process_tree(int(child), sig)
safe_kill(pid, sig)
safe_kill(pid, signal.SIGCONT)
# The following are utility functions related to ports.
def is_port_free(port, address):
"""
Return True if the given port is available for use.
:param port: Port number
"""
try:
s = socket.socket()
#s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if address == "localhost":
s.bind(("localhost", port))
free = True
else:
s.connect((address, port))
free = False
except socket.error:
if address == "localhost":
free = False
else:
free = True
s.close()
return free
def find_free_port(start_port, end_port, address="localhost"):
"""
Return a host free port in the range [start_port, end_port].
:param start_port: First port that will be checked.
:param end_port: Port immediately after the last one that will be checked.
"""
for i in range(start_port, end_port):
if is_port_free(i, address):
return i
return None
def find_free_ports(start_port, end_port, count, address="localhost"):
"""
Return count of host free ports in the range [start_port, end_port].
@count: Initial number of ports known to be free in the range.
:param start_port: First port that will be checked.
:param end_port: Port immediately after the last one that will be checked.
"""
ports = []
i = start_port
while i < end_port and count > 0:
if is_port_free(i, address):
ports.append(i)
count -= 1
i += 1
return ports
# An easy way to log lines to files when the logging system can't be used
_open_log_files = {}
_log_file_dir = "/tmp"
def log_line(filename, line):
"""
Write a line to a file. '\n' is appended to the line.
:param filename: Path of file to write to, either absolute or relative to
the dir set by set_log_file_dir().
:param line: Line to write.
"""
global _open_log_files, _log_file_dir
path = get_path(_log_file_dir, filename)
if path not in _open_log_files:
# First
|
, let's close the log files opened in old directories
close_log_file(filename)
# Then, let's open the new file
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
_open_log_files[path] = open(path, "w")
timestr = time.strftime("%Y-%m-%d %H:%M:%S")
_open_log_files[path].write("%s: %s\n" % (
|
timestr, line))
_open_log_files[path].flush()
def set_log_file_dir(directory):
"""
Set the base directory for log files created by log_line().
:param dir: Directory for log files.
"""
global _log_file_dir
_log_file_dir = directory
def close_log_file(filename):
global _open_log_files, _log_file_dir
remove = []
for k in _open_log_files:
if os.path.basename(k) == filename:
f = _open_log_files[k]
f.close()
remove.append(k)
if remove:
for key_to_remove in remove:
_open_log_files.pop(key_to_remove)
# The following are miscellaneous utility functions.
def get_path(base_path, user_path):
"""
Translate a user specified path to a real path.
If user_path is relative, append it to base_path.
If user_path is absolute, return it as is.
:param base_path: The base path of relative user specified paths.
:param user_path: The user specified path.
"""
if os.path.isabs(user_path):
return user_path
else:
return os.path.join(base_path, user_path)
def generate_random_string(length, ignore_str=string.punctuation,
convert_str=""):
"""
Return a random string using alphanumeric characters.
:param length: Length of the string that will be generated.
:param ignore_str: Characters that will not include in generated string.
:param convert_str: Characters that need to be escaped (prepend "\\").
:return: The generated random string.
"""
r = random.SystemRandom()
sr = ""
chars = string.letters + string.digits + string.punctuation
if not ignore_str:
ignore_str = ""
for i in igno
|
grengojbo/st2
|
st2client/st2client/client.py
|
Python
|
apache-2.0
| 5,863
| 0.003752
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDIT
|
IONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
from st2client import models
from st2client.models.core import ResourceManager
from st2client.models.core import ActionAliasResourceManager
from st2client.models.core import LiveActionResourceManager
from st2client.models.core import TriggerInstanceResourceManager
LOG = logging.getLogger(__name__)
# Defau
|
lt values for the options not explicitly specified by the user
DEFAULT_API_PORT = 9101
DEFAULT_AUTH_PORT = 9100
DEFAULT_BASE_URL = 'http://localhost'
DEFAULT_API_VERSION = 'v1'
class Client(object):
def __init__(self, base_url=None, auth_url=None, api_url=None, api_version=None, cacert=None,
debug=False, token=None):
# Get CLI options. If not given, then try to get it from the environment.
self.endpoints = dict()
# Populate the endpoints
if base_url:
self.endpoints['base'] = base_url
else:
self.endpoints['base'] = os.environ.get('ST2_BASE_URL', DEFAULT_BASE_URL)
api_version = api_version or os.environ.get('ST2_API_VERSION', DEFAULT_API_VERSION)
if api_url:
self.endpoints['api'] = api_url
else:
self.endpoints['api'] = os.environ.get(
'ST2_API_URL', '%s:%s/%s' % (self.endpoints['base'], DEFAULT_API_PORT, api_version))
if auth_url:
self.endpoints['auth'] = auth_url
else:
self.endpoints['auth'] = os.environ.get(
'ST2_AUTH_URL', '%s:%s' % (self.endpoints['base'], DEFAULT_AUTH_PORT))
if cacert:
self.cacert = cacert
else:
self.cacert = os.environ.get('ST2_CACERT', None)
if self.cacert and not os.path.isfile(self.cacert):
raise ValueError('CA cert file "%s" does not exist.' % (self.cacert))
self.debug = debug
# Note: This is a nasty hack for now, but we need to get rid of the decrator abuse
if token:
os.environ['ST2_AUTH_TOKEN'] = token
self.token = token
# Instantiate resource managers and assign appropriate API endpoint.
self.managers = dict()
self.managers['Token'] = ResourceManager(
models.Token, self.endpoints['auth'], cacert=self.cacert, debug=self.debug)
self.managers['RunnerType'] = ResourceManager(
models.RunnerType, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['Action'] = ResourceManager(
models.Action, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['ActionAlias'] = ActionAliasResourceManager(
models.ActionAlias, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['LiveAction'] = LiveActionResourceManager(
models.LiveAction, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['Policy'] = ResourceManager(
models.Policy, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['PolicyType'] = ResourceManager(
models.PolicyType, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['Rule'] = ResourceManager(
models.Rule, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['Sensor'] = ResourceManager(
models.Sensor, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['TriggerType'] = ResourceManager(
models.TriggerType, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['Trigger'] = ResourceManager(
models.Trigger, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['TriggerInstance'] = TriggerInstanceResourceManager(
models.TriggerInstance, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['KeyValuePair'] = ResourceManager(
models.KeyValuePair, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
self.managers['Webhook'] = ResourceManager(
models.Webhook, self.endpoints['api'], cacert=self.cacert, debug=self.debug)
@property
def actions(self):
return self.managers['Action']
@property
def keys(self):
return self.managers['KeyValuePair']
@property
def liveactions(self):
return self.managers['LiveAction']
@property
def policies(self):
return self.managers['Policy']
@property
def policytypes(self):
return self.managers['PolicyType']
@property
def rules(self):
return self.managers['Rule']
@property
def runners(self):
return self.managers['RunnerType']
@property
def sensors(self):
return self.managers['Sensor']
@property
def tokens(self):
return self.managers['Token']
@property
def triggertypes(self):
return self.managers['TriggerType']
@property
def triggerinstances(self):
return self.managers['TriggerInstance']
|
aroth-arsoft/arsoft-python
|
python3/arsoft/ldap/slapd/action_module.py
|
Python
|
gpl-3.0
| 2,292
| 0.004799
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# kate: space-indent on; indent-width 4; mixedindent off; indent-mode python;
import argparse
import string
import ldap
import ldap.modlist as modlist
from action_base import *
class action_module(action_base):
def __init__(self, app, args):
action_base.__init__(self, app, args)
parser = argparse.ArgumentParser(description='configure the loaded modules')
parser.add_argument('-a', '--add', dest='add', type=str, nargs='+', help='adds the specified module.')
parser.add_argument('-r', '--remove', dest='remove', type=str, nargs='+', help='removes the specified module.')
pargs = parser.parse_args(args)
self._add = pargs.add
self._remove = pargs.remove
self._selected_modulelist_dn = None
def run(self):
if self._add is None and self._remove is None:
self._select_modulelist(add_modulelist_i
|
f_not_available=False)
ret = self._list()
else:
self._select_modulelist(add_modulelist_if_not_available=True)
mod_attrs = []
if self._add is not None:
for mod in self._add:
if mod not in self._modules.values():
mod_attrs.append( (ldap.MOD_ADD, 'olcModuleLoad', mod) )
if self._remove is not None:
for mod
|
in self._remove:
found = False
for (modidx, modname) in self._modules.items():
if modname == mod:
found = True
mod_attrs.append( (ldap.MOD_DELETE, 'olcModuleLoad', '{' + str(modidx) + '}' + mod) )
break
if self._modify_direct(self._selected_modulelist_dn, mod_attrs):
ret = 0
else:
ret = 1
return ret
def _list(self):
print("Modulepath: " + (self._modulepath if self._modulepath is not None else '<default>'))
if len(self._modules) > 0:
print("Modules:")
for modidx in sorted(self._modules.keys()):
modname = self._modules[modidx]
print(' ' + modname)
else:
print("Modules: <none>")
return 0
|
adini121/oneanddone
|
oneanddone/users/urls.py
|
Python
|
mpl-2.0
| 1,193
| 0.006706
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf.urls import patterns, url
from oneanddone.users import views
urlpatterns = patterns('',
url(r'^login/$', views.LoginView.as_view(), name='users.login'),
url(r'^profile/new/$', views.CreateProfileView.as_view(), name='users.profile.create'),
url(r'^profile/edit/$', views.UpdateProfileView.as_view(), name='users.profile.update'),
url(r'^profile/delete/$', views.DeleteProfileView.as_view(), name='users.profile.delete'),
url(r'^profile/$', views.MyProfileDetailsView.as_view(), name='users.profile.mydetails'),
url(r'^profile/(?P<id>\d+)/$', views.ProfileDetailsView.as_view(), name='users.profile.details'),
url(r'^profile/(?P<username>[^/\\]+)/$', views.ProfileDetailsView.as_view(), name='users.profile.details'),
# API URL's for interacting with User objects
url(r'^api/v1/user/$', views.UserListAPI.as_view(), name='api-user'),
url(r'^api/v1/user/(?P<email>[^/\\
|
]+)
|
/$', views.UserDetailAPI.as_view(), name='api-user-detail'),
)
|
Mariaanisimova/pythonintask
|
BITs/2014/KOSTAREV_A_I/task_4_13.py
|
Python
|
apache-2.0
| 1,201
| 0.024691
|
#Задача №4 , Вариант 13
#Напишите программу, которая выводит имя, под которым скрывается Жан Батист Поклен. Дополнительно необходимо вывести область интересов указанной личности, место рождения, годы рождения и смерти (если человек умер), вычислить возраст на данный м
|
омент (или момент смерти).
#Костарев А. И.
#14.03.16
print ("Жан Батист Поклен более известен, как французский комедиограф, актер, театральный деятель, реформатор сценического искусства Жан Батист Мольер.")
MR= "Париж, Франция"
GR= 1622
V= 1673-GR
OI= "французский комедиограф XVII века"
print ('Место Рождения: ' + MR)
print ('Год рождения:' , GR)
print
|
('Возраст:' , V)
print ('Область интересов: ' + OI)
input ("Нажмите Enter для выхода.")
|
sauli6692/ibc-server
|
rte/serializers/route.py
|
Python
|
mit
| 337
| 0
|
from
|
rest_framework import serializers
from ..models import Route
class RouteSerializer(serializers.ModelSerializer):
class Meta:
model = Route
fields = (
'pk',
'name',
'description',
'direction_main',
'direction_extra',
'zone_map'
)
| |
lattrelr7/cse881-pcap
|
ip_structs.py
|
Python
|
mit
| 4,002
| 0.006497
|
from ctypes import *
# Ether types that we handle
# These are types that will be found in the frame header
ET_ARP = 0x0806
ET_REV_ARP = 0x8035
ET_IPv4 = 0x0800
ET_IPv6 = 0x86DD
# IP types that we handle
# These types are found in the ipv4 header
IPT_ICMP = 0x01
IPT_TCP = 0x06
IPT_UDP = 0x11
IPT_IPv6 = 0x29
IPT_IGMP = 0x02
# ICMP types that we handle
# These are found in the ICMP header
ICMPT_ECHO_REPLY = 0
ICMPT_ECHO_REQUEST = 8
ICMPT_DEST_UNREACHABLE = 3
ICMPT_REDIRECT = 5
ICMPT_ROUTER_AD = 9
ICMPT_ROUTER_DISC = 10
ICMPT_TIMEOUT = 11
# TCP control bit masks
TCP_NS = 0x0100
TCP_CWR = 0x0080
TCP_ECE = 0x0040
TCP_URG = 0x0020
TCP_ACK = 0x0010
TCP_PSH = 0x0008
TCP_RST = 0x0004
TCP_SYN = 0x0002
TCP_FIN = 0x0001
# IPv4 header masks
IP_MORE_FRAGMENTS = 0x2000
# Struct sizes
IPV4_HDR_LEN = 20
IPV6_HDR_LEN = 40
UDP_HDR_LEN = 8
TCP_HDR_LEN = 20
ICMP_HDR_LEN = 8
FRAME_HDR_LEN = 14
ARP_HDR_LEN = 28
#Pcap structures
class timeval_t(Structure):
_fields_ = [('tv_sec', c_long),
('tv_usec', c_long)]
class pcap_pkthdr_t(Structure):
_fields_ = [('ts', timeval_t),
('caplen', c_uint32),
('len', c_uint32)]
#IP structures
class ipv4_header_t(BigEndianStructure):
_pack_ = 1
_fields_ = [("version_ihl", c_uint8),
("dscp_ecn", c_uint8),
("length", c_uint16),
("id", c_uint16),
("flags_frag_offset", c_uint16),
("ttl", c_uint8),
("protocol", c_uint8),
("checksum", c_uint16),
("src_ip", c_uint32),
("dst_ip", c_uint32)]
class ipv6_header_t(BigEndianStructure):
_pack_ = 1
_fields_ = [("vers_class_flow_1", c_uint16),
("flow_2", c_uint16),
("length", c_uint16),
("next_header", c_uint8),
("hop_limit", c_uint8),
("src_addr_1", c_ulonglong),
("src_addr_2", c_ulonglong),
("dst_addr_1", c_ulonglong),
("dst_addr_2", c_ulonglong),]
class icmp_header_t(BigEndianStructure):
_pack_ = 1
_f
|
ields_ = [("type", c_uint8),
("code", c_uint8),
|
("checksum", c_uint16),
("rest_of_header", c_uint32)]
class udp_header_t(BigEndianStructure):
_pack_ = 1
_fields_ = [("src_port", c_uint16),
("dst_port", c_uint16),
("length", c_uint16),
("checksum", c_uint16)]
class tcp_header_t(BigEndianStructure):
_pack_ = 1
_fields_ = [("src_port", c_uint16),
("dst_port", c_uint16),
("sn", c_uint32),
("ack_num", c_uint32),
("offset_type", c_uint16),
("window_size", c_uint16),
("checksum", c_uint16),
("urgent", c_uint16)]
class arp_message_t(BigEndianStructure):
_pack_ = 1
_fields_ = [("htype", c_uint16),
("ptype", c_uint16),
("h_addr_len", c_uint8),
("p_addr_len", c_uint8),
("oper", c_uint16),
("mac_src_1", c_uint16),
("mac_src_2", c_uint16),
("mac_src_3", c_uint16),
("src_p_addr", c_uint32),
("mac_dst_1", c_uint16),
("mac_dst_2", c_uint16),
("mac_dst_3", c_uint16),
("dst_p_addr", c_uint32)]
class eth_frame_header_t(BigEndianStructure):
_pack_ = 1
_fields_ = [("mac_src_1", c_uint16),
("mac_src_2", c_uint16),
("mac_src_3", c_uint16),
("mac_dst_1", c_uint16),
("mac_dst_2", c_uint16),
("mac_dst_3", c_uint16),
("ethertype", c_uint16)]
|
codenginebd/django-paypal-driver
|
paypal/views.py
|
Python
|
gpl-2.0
| 7,535
| 0.01075
|
# -*- coding: utf-8 -*-
from decimal import Decimal, ROUND_UP
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.conf import settings
from django.shortcuts import render_to_response
from django.template import RequestContext
from paypal.driver import PayPal
from paypal.models import PayPalResponse
from paypal.utils import process_payment_request, process_refund_request
def setcheckout(request, return_url, cancel_url, error_url, template = "paypal/setcheckout.html", currency = "USD"):
"""
Django view to process PayPal SetExpressCheckout API call.
If response 'Success' or 'SuccessWithWarning' comes, redirects user to the PayPal website to continue checkout process.
If response 'Failed' or 'FailedWithWarning' comes, shows the error and redirects user to the 'payment page' to choose another POS or PayPal again.
"""
#############################################################################
# ROUTINES #
# 1) Perform and validate POST data #
# 2) Call Paypal driver #
# 3) Execute the relevant method #
# 4) Accroding to the method response, redirect user to the given urls #
#############################################################################
if request.POST:
# normalize the given amount
amount = request.POST.get("amount")
try:
amount = Decimal(amount)
amount = str(amount.quantize(Decimal(".01"), rounding = ROUND_UP))
except:
if request.user.is_authenticated():
request.user.message_set.create(message = _("No given valid amount. Please check the amount that will be charged."))
return HttpResponseRedirect(error_url)
num_cart_items = request.POST.get('num_cart_items', None)
cart_items = None
if num_cart_items:
cart_items = []
for i in range(0, int(num_cart_items)):
item = {
'NAME': request.POST.get('cart_items[%s][NAME]' % i),
'NUMBER': request.POST.get('cart_items[%s][NUMBER]' % i),
'DESC': request.POST.get('cart_items[%s][DESC]' % i),
'AMT': request.POST.get('cart_items[%s][AMT]' % i),
'QTY': request.POST.get('cart_items[%s][QTY]' % i)
}
cart_items.append(item)
# call the PayPal driver (2)
driver = PayPal()
# call the relevant API method (3)
result = driver.SetExpressCheckout(amount, currency, return_url, cancel_url, cart_items)
# perform the response (4)
if not result:
print driver.apierror
# show the error message (comes from PayPal API) to the user and redirect him/her to the error page
if request.user.is_authenticated():
request.user.message_set.create(message = _(driver.setexpresscheckouterror))
return HttpResponseRedirect(error_url)
# send him/her to the PayPal website to check his/her order details out
redirect_url = driver.paypal_url()
|
return HttpResponseRedirect(redirect_url)
return render_to_response(template,
{'curre
|
ncy': currency,
'return_url': return_url,
'cancel_url': cancel_url,
'error_url' : error_url,
}, context_instance = RequestContext(request))
def docheckout(request, error_url, success_url, template = "paypal/docheckout.html", currency = "USD"):
"""
Django view to do the actual payment (charges actual money)
It performs the relevant API method DoExpressCheckoutPayment
"""
if request.POST:
# normalize the given amount
amount = request.POST.get("amount")
try:
amount = Decimal(amount)
amount = str(amount.quantize(Decimal(".01"), rounding = ROUND_UP))
except:
if request.user.is_authenticated():
request.user.message_set.create(message = _("No given valid amount. Please check the amount that will be charged."))
return HttpResponseRedirect(error_url)
# perform GET
token = request.GET.get("token")
payerid = request.GET.get("PayerID")
# charge from PayPal
result, response = process_payment_request(amount, currency, token, payerid)
# process the result
if not result:
# show the error message (comes from PayPal API) and redirect user to the error page
if request.user.is_authenticated():
request.user.message_set.create(message = _("Amount %s has not been charged, server error is '%s'" % (amount, response.error)))
return HttpResponseRedirect(error_url)
# Now we are gone, redirect user to success page
if request.user.is_authenticated():
request.user.message_set.create(message = _("Amount %s has been successfully charged, your transaction id is '%s'" % (amount, response.trans_id)))
return HttpResponseRedirect(success_url)
return render_to_response(template,
{'error_url': error_url,
'success_url': success_url,
}, context_instance = RequestContext(request))
def dorefund(request, error_url, success_url, template = "paypal/dorefund.html"):
if request.POST:
# normalize the given amount
amount = request.POST.get("amount")
trans_id = request.POST.get("transactionid")
try:
amount = Decimal(amount)
amount = str(amount.quantize(Decimal(".01"), rounding = ROUND_UP))
except:
if request.user.is_authenticated():
request.user.message_set.create(message = _("No given valid amount. Please check the amount that will be charged."))
return HttpResponseRedirect(error_url)
response_obj = get_object_or_404(PayPalResponse, trans_id = trans_id)
# charge from PayPal
result, response = process_refund_request(response_obj, amount)
# process the result
if not result:
# show the error message (comes from PayPal API) and redirect user to the error page
if request.user.is_authenticated():
request.user.message_set.create(message = _("Amount %s has not been charged, server error is '%s'" % (amount, response.error)))
return HttpResponseRedirect(error_url)
# Now we are gone, redirect user to success page
if request.user.is_authenticated():
request.user.message_set.create(message = _("Amount %s has been successfully refunded, your transaction id is '%s'" % (amount, response.trans_id)))
return HttpResponseRedirect(success_url)
return render_to_response(template,
{'error_url': error_url,
'success_url': success_url,
}, context_instance = RequestContext(request))
|
amitgroup/parts-net
|
scripts/cifar/train_and_test_cifar.py
|
Python
|
bsd-3-clause
| 1,777
| 0.006753
|
from __future__ import division, print_function, absolute_import
import amitgroup as ag
import pnet
import pnet.cifar
import numpy as np
ag.set_verbose(True)
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('parts',metavar='<parts file>',
type=argparse.FileType('rb'),
help='Filename of parts file')
args = parser.parse_args()
feat_net = pnet.Layer.load(args.parts)
training_seed = 0
layers = feat_net.layers
S = 13
layers += [
pnet.PoolingLayer(final_shape=(2, 2), operation='avg'),
#pnet.PoolingLayer(shape=(29, 29), strides=(29, 29), operation='sum'),
#pnet.PoolingLayer(shape=(S, S), strides=(S, S), operation='sum'),
#pnet.GMMClassificationLayer(n_components=1,
#settings=dict(
#min_covariance=0.01,
#covariance_type='diag',
#),)
pnet.SVMClassifica
|
tionLayer(C=None, settings=dict(standardize=True)),
]
net = pnet.PartsNet(layers)
limit = None
error_rate, conf_mat = pnet.cifar.train_and_test(net,
samples_per_class=None,
seed=0, limit=limit)
print('Error rate: {:.02f}'.format(error_rate * 100))
|
np.set_printoptions(precision=2, suppress=True)
print('Confusion matrix:')
norm_conf = conf_mat / np.apply_over_axes(np.sum, conf_mat, [1])
print(norm_conf)
print('Column sums')
print(norm_conf.sum(0))
from vzlog.default import vz
vz.output(net)
if pnet.parallel.main(__name__):
main()
|
bengranett/syncat
|
syncat/methods/gmm.py
|
Python
|
mit
| 6,003
| 0.002999
|
""" synrcat
gaussian mixture model
"""
import sys
import os
import numpy as np
import logging
from collections import OrderedDict
from astropy.table import Table
from pypeline import pype, add_param, depends_on
from syn import Syn
from syncat.errors import NoPoints
import syncat.misc as misc
import syncat.fileio as fileio
import time
@add_param('cat_model', metavar='filename', default='out/syn.pickle', type=str,
help='file with catalogue model to load')
@add_param('hints_file', metavar='filename', default='in/syn_hints.txt', type=str,
help='give hints about parameter distributions')
@depends_on(Syn)
class GaussianMixtureModel(pype):
""" SynCat mode
|
to generate random catalogue by sampling from a gaussian mixture model.
Parameters
----------
mask : minimask.Mask instance
mask describing survey geometry to sample fr
|
om. If None, sample from full-sky.
cat_model : str
path to file with catalogue model to load
hints_file : str
path to file with hints about parameter distributions
"""
def __init__(self, config={}, mask=None, **kwargs):
""" """
self._parse_config(config, **kwargs)
self._setup_logging()
self.load_hints()
self.mask = mask
self.syn = None
def sample_sky(self, zone=None, nside=None, order=None):
""" Sample sky coordinates.
Parameters
----------
zone : int, list
optional healpix zone index or list of indices from which to sample. Otherwise sample from all zones.
nside : int
healpix nside for zone pixelization
order : str
healpix ordering for zone pixelization
"""
return np.transpose(self.mask.draw_random_position(density=self.config['density'], n=self.config['count'],
cell=zone, nside=nside))
def load_hints(self):
""" Load the hints file.
The hints file contains information about the parameter distributions.
"""
self.hints = {}
if os.path.exists(self.config['hints_file']):
for line in file(self.config['hints_file']):
line = line.strip()
if line == "":
continue
if line.startswith("#"):
continue
words = line.split()
instruction = None
low = None
high = None
name = words.pop(0)
if len(words) > 0:
instruction = words.pop(0)
if len(words) > 0:
low = float(words.pop(0))
if len(words) > 0:
high = float(words.pop(0))
if instruction not in self.hints:
self.hints[instruction] = []
self.hints[instruction].append((name, low, high))
self.logger.info("got hint for '%s': instruction is %s with range: %s, %s", name, instruction, low, high)
return self.hints
def fit(self, filename=None, add_columns=True):
""" Fit a Gaussian mixture model to the input catalogue.
Parameters
----------
filename : str
path to input catalogue.
"""
if filename is None:
filename = self.config['in_cat']
if os.path.exists(self.config['cat_model']) and not self.config['overwrite']:
self.logger.info("reading %s", self.config['cat_model'])
self.syn = Syn(self.config['cat_model'])
self.labels = self.syn.labels
return
hints = self.load_hints()
self.logger.info("loading %s", filename)
table = fileio.read_catalogue(filename, format=self.config['input_format'], columns=self.config['input_columns'], quick=self.config['quick'])
table_dtype = table.dtype
table = misc.remove_columns(table, self.config['skip'])
properties = list(table.dtype.names)
if self.logger.isEnabledFor(logging.INFO):
mesg = ""
for i, p in enumerate(properties):
mesg += "\n{:>3} {}".format(1 + i, p)
self.logger.info("got these %i columns:%s", len(properties), mesg)
self.syn = Syn(labels=properties, hints=hints, config=self.config)
dtype = table.dtype
if add_columns:
dtype = misc.append_dtypes(dtype, self.config['add_columns'], table_dtype)
if self.config['sample_sky'] and self.config['skycoord_name'] not in dtype.names:
skycoord_name = self.config['skycoord_name']
alpha, delta = skycoord_name
skycoord_dtype = np.dtype([(alpha, np.float64), (delta, np.float64)])
dtype = misc.concatenate_dtypes([dtype, skycoord_dtype])
self.syn.fit(table, dtype=dtype)
# store column names
self.labels = properties
# save catalogue model
self.syn.save(self.config['cat_model'])
def sample(self):
""" Sample from the Gaussian mixture model.
Returns
-------
numpy strucarray : random catalogue
"""
if self.syn is None:
if not os.path.exists(self.config['cat_model']):
raise Exception("Cannot load catalogue model. Files does not exist: %s"%self.config['cat_model'])
self.syn = Syn(self.config['cat_model'])
if self.config['sample_sky']:
skycoord = self.sample_sky()
count = len(skycoord)
else:
count = self.config['count']
if count == 0:
raise NoPoints
randoms = self.syn.sample(n=count)
if self.config['sample_sky']:
skycoord_name = self.config['skycoord_name']
for i in range(len(skycoord_name)):
randoms[skycoord_name[i]] = skycoord[:,i]
return randoms
|
LT12/LTPsi
|
basis/b631Gs.py
|
Python
|
gpl-2.0
| 2,590
| 0.018533
|
basis_set = \
{
"H": [
[
"S",
[
[
18.731137,
0.0334946
],
[
2.8253937,
0.23472695
],
[
0.6401217,
0.81375733
]
]
],
[
"S",
[
[
0.1612778,
1.0
]
]
]
],
"O": [
[
"S",
[
[
5484.
|
6717,
0.0018311
],
[
825.23495,
|
0.0139501
],
[
188.04696,
0.0684451
],
[
52.9645,
0.2327143
],
[
16.89757,
0.470193
],
[
5.7996353,
0.3585209
]
]
],
[
"S",
[
[
15.539616,
-0.1107775
],
[
3.5999336,
-0.1480263
],
[
1.0137618,
1.130767
]
]
],
[
"P",
[
[
15.539616,
0.0708743
],
[
3.5999336,
0.3397528
],
[
1.0137618,
0.7271586
]
]
],
[
"S",
[
[
0.2700058,
1.0
]
]
],
[
"P",
[
[
0.2700058,
1.0
]
]
],
[
"D",
[
[
0.8,
1.0
]
]
]
]
}
def getOrbs(atom):
try:
return basis_set[atom]
except KeyError:
raise NameError('Element not supported by basis set!')
|
HuuHoangNguyen/Python_learning
|
Tuples.py
|
Python
|
mit
| 3,657
| 0.004375
|
#!/usr/bin/python
# The list are enclosed in brackets ([]) and their element
# and size can be changed, while tuples are enclosed in parenthese
# ( () ) and cannot be updated. The Tuples can be thought of as
# read-only of list
aTuple = ( 'abcd', 786, 2.23, 'John', 70.2)
bTuple = ( 123, 'Vien')
print aTuple # Print complete tuple
print aTuple[0] # Preint first element of the tuple
print aTuple[1:3] # Print element starting from 2nd till 3rd
print aTuple[2:] # Print elements starting from 3rd element
print bTuple # Print tuple tw0 times
print aTuple + bTuple # Print concatenated lists
val_tuple = ('abcd', 786, 2.23, 'John', 70.2 )
val_list = ['abcd', 786, 2.23, 'John', 70.2]
#
print "\nPrint the list and tuple before change: "
print val_tuple
print val_list
#val_tuple[2] = 100.2
val_list [2] = 100.2
print "\nPrint the list and tuple after change"
print val_tu
|
ple
print val_list
print "============================
|
======================"
tuple1 = ('physics', 'schematic', 1997, 2000)
tuple2 = (1,2, 3, 4, 5, 6, 7)
print "tuple1[0]: ", tuple1[0]
print "tuple2[1:5]: ", tuple2[1:5]
print "=================================================="
print "Updating Tuples"
print "Tuples are immutable which means you can not update or change the value of tuples element."
print "You can able to take portions of existing tuples to creatr new tuples as the following example demonstrates"
tuple1 = (12, 34, 56)
tuple2 = ('abc', 'xyz')
#Following action is n ot valid for tuples
#tuple1[0] = 100
#so let's create a new tuple as follows
tuple3 = tuple1 + tuple2
print "Tuple3 is: ", tuple3
print "=================================================="
print "Delete Tuple Elements"
print "Removing individual tuple elements is not possible. There is, of course, nothing"
print "wrong with ptting together another tuple with the undesired element discarded"
tup = ('physics', 'schemistry', 1997, 2000)
print "The tuple is: ", tup
del tup
print "After deleting tup: "
#print tup
print "=================================================="
print "cmp(tuple1, tuple2): Compares elements of both tuples"
tuple1, tuple2 = (123, 'xyz'), (456, 'abc')
print "The tuple1 is: ", tuple1
print "The tuple2 is: ", tuple2
print "cmp(tuple1, tuple2): ", cmp(tuple1, tuple2)
print "cmp(tuple2, tuple1): ", cmp(tuple2, tuple1)
tuple3 = tuple2 + (678, )
print "The tuple3 is: ", tuple3
print "cmp(tuple2, tuple3): ", cmp(tuple2, tuple3)
print "=================================================="
print "len(tuple): Given the total length of the tuple"
tuple1, tuple2 = (123, 'zara', 'xyz'), (456, 'abc')
print "The Tuple1 is: ", tuple1
print "The Tuple2 is: ", tuple2
print "First tuple length is: ", len(tuple1)
print "Second tuple length is: ", len(tuple2)
print "=================================================="
print "max(tuple) or min(tuple): Returns item from the tuple with max or min value"
tuple1, tuple2 = (123, 'xyz', 'zara', 'abc'), (456, 700, 200)
print "The tuple1 is: ", tuple1
print "The tuple2 is: ", tuple2
print "Max value element of tupple1 is : ", max(tuple1)
print "Max value element of tupple2 is : ", max(tuple2)
print "Min value element of tupple1 is : ", min(tuple1)
print "Min value element of tupple2 is : ", min(tuple2)
print "=================================================="
print "tuple(seq): converts a list into tuple"
aList = [123, 'xyz', 'zara', 'abc']
aTuple = tuple(aList)
print "The aList is: ", aList
print "The aTuple is: ", aTuple
del aList[1]
print"The aList after delete aList[1]: ", aList
|
diogocs1/comps
|
web/addons/hw_scanner/__init__.py
|
Python
|
apache-2.0
| 1,075
| 0.002791
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even th
|
e implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public Lic
|
ense
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
moonboy13/brew-journal
|
brew_journal/recipies/tests.py
|
Python
|
apache-2.0
| 21,233
| 0.003909
|
import json
from datetime import datetime
from django.test import TestCase, Client
from authentication.models import Account
from recipies.models import Recipe, RecipeSteps
from recipies.serializers import RecipeSerializer, RecipeStepsSerializer
class Utility(TestCase):
"""Utility class para testing"""
@staticmethod
def checkElement(test_instance, model, data):
"""Helper Function. Either check two values against on another or call correct helper function"""
# IF the type is a list or dict, call the correct function to check its elements. ELSE directly
# compare the elements
if type(model) is list:
Utility.checkArrayModel(test_instance, model, data)
elif type(model) is dict:
Utility.checkDictModel(test_instance, model, data)
else:
test_instance.assertEqual(model, data)
@staticmethod
def checkArrayModel(test_instance, model, data):
"""Helper function. Check an array to see if the model data is present in the data array"""
for i in range(len(model)):
Utility.
|
checkElement(test_instance, model[i], data[i])
@staticmethod
def checkDictModel(test_instance, model, data):
"""Helper function. Check a model dictionary against a data dictionary key by key"""
for key in model.keys():
Utility.checkElement(test_instance, model.get(key), data.__dict__.get(key))
# Create your tests here.
class TestRecipeModel(TestCase):
"""Test the custom method
|
attached to the Recipe model"""
def setUp(self):
self.recipe_data = dict(
recipe_name="Test Recipe",
recipe_style="Kolsch",
recipe_notes="This is my first test recipe submited from a unit test.",
last_brew_date=datetime.now()
)
self.malts_data = [
dict(
malt_brand="ABrand",
malt_type="Extra Light",
amount_by_weight=3.3,
),
dict(
malt_brand="BBrand",
malt_type="Crystal",
amount_by_weight=1.5,
malt_extract=False
),
dict(
malt_brand="CBrand",
malt_type="Light",
amount_by_weight=3,
dry_malt=True,
),
]
self.hops_data = [
dict(
hop_name="Amarillo",
alpha_acid_content=12.3,
beta_acid_content=7.9,
hop_weight=1.5,
hop_weight_uom="g",
),
dict(
hop_name="Cascade",
alpha_acid_content=8.8,
hop_weight=0.5,
hop_weight_uom="oz",
),
dict(
hop_name="Citra",
alpha_acid_content=7.9,
beta_acid_content=4.6,
hop_weight=1.0,
hop_weight_uom="oz",
dry_hops=True,
),
]
self.user = Account.objects.create_user('test', 'foo')
def tearDown(self):
self.recipe_data=None
self.malts_data=None
self.hops_data=None
self.user.delete()
def test_RecipeManager_CreateValidRecipe(self):
recipe = Recipe.objects.create_recipe(self.user, self.recipe_data, malts_data=self.malts_data, hops_data=self.hops_data)
self.assertIsInstance(recipe, Recipe)
Utility.checkElement(self, self.hops_data, recipe.recipe_hops.order_by("hop_name"))
Utility.checkElement(self, self.malts_data, recipe.recipe_malts.order_by("malt_brand"))
Utility.checkElement(self, self.recipe_data, recipe)
def test_RecipeManager_FailNoRecipeData(self):
with self.assertRaises(ValueError) as err:
Recipe.objects.create_recipe(self.user, None, self.malts_data, self.hops_data)
self.assertEqual(err.exception.message, 'Recipe information is required to create a recipe.')
def test_RecipeManager_FailInactiveUser(self):
self.user.is_active=False
with self.assertRaises(ValueError) as err:
Recipe.objects.create_recipe(self.user, self.recipe_data, malts_data=self.malts_data, hops_data=self.hops_data)
self.assertEqual(err.exception.message, 'Account must be active to create a recipe.')
def test_RecipeManager_FailNotLoggedIn(self):
with self.assertRaises(ValueError) as err:
Recipe.objects.create_recipe(None, self.recipe_data, malts_data=self.malts_data, hops_data=self.hops_data)
self.assertEqual(err.exception.message, 'Need to be logged in to create a recipe.')
class TestRecipeStepModel(TestCase):
def setUp(self):
self.recipe_data = dict(
recipe_name="Test Recipe",
recipe_style="Kolsch",
recipe_notes="This is my first test recipe submited from a unit test.",
last_brew_date=datetime.now()
)
self.user = Account.objects.create_user('test', 'foo')
self.recipe = Recipe.objects.create_recipe(self.user, self.recipe_data)
def tearDown(self):
self.recipe_data = None
self.user.delete()
self.recipe.delete()
def test_RecipeStepsManager_CreateValidStep(self):
# Collect a reference to the recipe so that its id can be retrieved
recipe_obj = Recipe.objects.get(recipe_name=self.recipe_data['recipe_name'])
step_data = dict(
step='This is a step',
step_order=1
)
step_obj = RecipeSteps.objects.save_step(step_data, recipe_obj.id)
self.assertIsInstance(step_obj, RecipeSteps)
Utility.checkElement(self, step_data, step_obj)
class TestRecipeSerializer(TestCase):
"""Test the serializers for the recipe class"""
def setUp(self):
self.json_data = open('recipies/testRecipe.json','r').read()
self.data = self.retrieveRecipeData()
# Extract just the date portion from the datetime object
my_datetime = datetime.today()
self.data['last_brew_date'] = datetime.date(my_datetime)
self.account = Account.objects.create(username='foot',password='bar2')
def tearDown(self):
self.json_data = None
self.data = None
self.account.delete()
def retrieveRecipeData(self):
"""Retrieve a new decoding of the JSON recipe data"""
return json.loads(self.json_data)
def createRecipe(self, user, data):
"""Create a recipe for use with the update unit test"""
hops = data.pop("recipe_hops")
malts = data.pop("recipe_malts")
return Recipe.objects.create_recipe(user, data, malts, hops)
def test_RecipeSerializer_Create_ValidData(self):
serialized_data = RecipeSerializer(data=self.data)
self.assertTrue(serialized_data.is_valid())
recipe = serialized_data.save(user=self.account)
Utility.checkElement(self, self.data.pop('recipe_hops'), recipe.recipe_hops.order_by("hop_name"))
Utility.checkElement(self, self.data.pop('recipe_malts'), recipe.recipe_malts.order_by("malt_brand"))
Utility.checkElement(self, self.data, recipe)
def test_RecipeSerializer_Update_ValidData(self):
premade_recipe = self.createRecipe(self.account, self.data)
recipe_data = self.retrieveRecipeData()
# Add another hop
self.data['recipe_hops'] = list()
self.data['recipe_hops'].append(dict(
hop_name="Tettang",
alpha_acid_content=8.8,
beta_acid_content=6.4,
hop_weight=3.4,
hop_weight_uom="oz",
dry_hops=True,
))
# Change the malt
self.data['recipe_malts'] = list()
self.data['recipe_malts'].append(dict(
malt_brand="Fruity_Tooty",
malt_type="Crystal",
malt_extract=False,
amount_by_weight=7.0,
))
# Update the notes
self.data['recipe_notes'] = "Added this crystal to spice it up."
serializer = RecipeSerializer(instance=prema
|
google/llvm-propeller
|
lldb/test/API/functionalities/target-new-solib-notifications/TestModuleLoadedNotifys.py
|
Python
|
apache-2.0
| 4,739
| 0.004009
|
"""
Test how many times newly loaded binaries are notified;
they should be delivered in batches instead of one-by-one.
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ModuleLoadedNotifysTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
# DynamicLoaderDarwin should batch up notifications about
# newly added/removed libraries. Other DynamicLoaders may
# not be written this way.
@skipUnlessDarwin
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.cpp', '// breakpoint')
def test_launch_notifications(self):
"""Test that lldb broadcasts newly loaded libraries in batches."""
self.build()
exe = self.getBuildArtifact("a.out")
self.dbg.SetAsync(False)
listener = self.dbg.GetListener()
listener.StartListeningForEventClass(
self.dbg,
lldb.SBTarget.GetBroadcasterClassName(),
lldb.SBTarget.eBroadcastBitModulesLoaded | lldb.SBTarget.eBroadcastBitModulesUnloaded)
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# break on main
breakpoint = target.BreakpointCreateByName('main', 'a.out')
event = lldb.SBEvent()
# CreateTarget() generated modules-loaded events; consume them & toss
while listener.GetNextEvent(event):
True
error = lldb.SBError()
flags = target.GetLaunchInfo().GetLaunchFlags()
process = target.Launch(listener,
None, # argv
None, # envp
None, # stdin_path
None, # stdout_path
None, # stderr_path
None, # working directory
flags, # launch flags
False, # Stop at entry
error) # error
self.assertTrue(
process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
total_solibs_added = 0
total_solibs_removed = 0
total_modules_added_events = 0
total_modules_removed_events = 0
while listener.GetNextEvent(event):
if lldb.SBTarget.EventIsTargetEvent(event):
if event.GetType() == lldb.SBTarget.eBroadcastBitModulesLoaded:
solib_count = lldb.SBTarget.GetNumModulesFromEvent(event)
total_modules_added_events += 1
total_solibs_added += solib_count
if self.TraceOn():
# print all of the binaries that have been added
added_files = []
i = 0
while i < solib_count:
module = lldb.SBTarget.GetModuleAtIndexFromEvent(i, event)
added_files.append(module.GetFileSpec().GetFilename())
i = i + 1
print("Loaded files: %s" % (', '.join(added_files)))
if event.GetType() == lldb.SBTarget.eBroadcastBitModulesUnloaded:
solib_count = lldb.SBTarget.GetNumModulesFromEvent(event)
total_modules_removed_events += 1
total_solib
|
s_removed += solib_count
if self.TraceOn():
# print all of the binaries that have been removed
removed_files = []
i = 0
while i < solib_count:
module = lldb.SBTarget.GetModuleAtIndexFromEvent(i, event)
removed_files.append(module.GetFileSpec().GetFilename())
|
i = i + 1
print("Unloaded files: %s" % (', '.join(removed_files)))
# This is testing that we get back a small number of events with the loaded
# binaries in batches. Check that we got back more than 1 solib per event.
# In practice on Darwin today, we get back two events for a do-nothing c
# program: a.out and dyld, and then all the rest of the system libraries.
avg_solibs_added_per_event = int(float(total_solibs_added) / float(total_modules_added_events))
self.assertGreater(avg_solibs_added_per_event, 1)
|
FreddieShoreditch/image_folder_organiser
|
venv/lib/python2.7/site-packages/PIL/TiffTags.py
|
Python
|
mit
| 9,273
| 0.000324
|
#
# The Python Imaging Library.
# $Id$
#
# TIFF tags
#
# This module provides clear-text names for various well-known
# TIFF tags. the TIFF codec works just fine without it.
#
# Copyright (c) Secret Labs AB 1999.
#
# See the README file for information on usage and redistribution.
#
|
##
# This module provides constants and clear-text names for various
# well-known TIFF tags.
##
from collections import namedtuple
class TagInfo(namedtuple("_TagInfo", "value name type length enum")):
__slots__ = []
def __new__(cls, value=None, name="unknown", type=4, length=0, enum=None):
return super(TagInfo, cls).__new__(
cls, value, name, type, length, enum or {})
def cvt_e
|
num(self, value):
return self.enum.get(value, value)
##
# Map tag numbers to tag info.
#
# id: (Name, Type, Length, enum_values)
#
TAGS_V2 = {
254: ("NewSubfileType", 4, 1),
255: ("SubfileType", 3, 1),
256: ("ImageWidth", 4, 1),
257: ("ImageLength", 4, 1),
258: ("BitsPerSample", 3, 0),
259: ("Compression", 3, 1,
{"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4,
"LZW": 5, "JPEG": 6, "PackBits": 32773}),
262: ("PhotometricInterpretation", 3, 1,
{"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RBG Palette": 3,
"Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8,
"CFA": 32803, # TIFF/EP, Adobe DNG
"LinearRaw": 32892}), # Adobe DNG
263: ("Thresholding", 3, 1),
264: ("CellWidth", 3, 1),
265: ("CellHeight", 3, 1),
266: ("FillOrder", 3, 1),
269: ("DocumentName", 2, 1),
270: ("ImageDescription", 2, 1),
271: ("Make", 2, 1),
272: ("Model", 2, 1),
273: ("StripOffsets", 4, 0),
274: ("Orientation", 3, 1),
277: ("SamplesPerPixel", 3, 1),
278: ("RowsPerStrip", 4, 1),
279: ("StripByteCounts", 4, 0),
280: ("MinSampleValue", 4, 0),
281: ("MaxSampleValue", 3, 0),
282: ("XResolution", 5, 1),
283: ("YResolution", 5, 1),
284: ("PlanarConfiguration", 3, 1, {"Contigous": 1, "Separate": 2}),
285: ("PageName", 2, 1),
286: ("XPosition", 5, 1),
287: ("YPosition", 5, 1),
288: ("FreeOffsets", 4, 1),
289: ("FreeByteCounts", 4, 1),
290: ("GrayResponseUnit", 3, 1),
291: ("GrayResponseCurve", 3, 0),
292: ("T4Options", 4, 1),
293: ("T6Options", 4, 1),
296: ("ResolutionUnit", 3, 1, {"inch": 1, "cm": 2}),
297: ("PageNumber", 3, 2),
301: ("TransferFunction", 3, 0),
305: ("Software", 2, 1),
306: ("DateTime", 2, 1),
315: ("Artist", 2, 1),
316: ("HostComputer", 2, 1),
317: ("Predictor", 3, 1),
318: ("WhitePoint", 5, 2),
319: ("PrimaryChromaticies", 3, 6),
320: ("ColorMap", 3, 0),
321: ("HalftoneHints", 3, 2),
322: ("TileWidth", 4, 1),
323: ("TileLength", 4, 1),
324: ("TileOffsets", 4, 0),
325: ("TileByteCounts", 4, 0),
332: ("InkSet", 3, 1),
333: ("InkNames", 2, 1),
334: ("NumberOfInks", 3, 1),
336: ("DotRange", 3, 0),
337: ("TargetPrinter", 2, 1),
338: ("ExtraSamples", 1, 0),
339: ("SampleFormat", 3, 0),
340: ("SMinSampleValue", 12, 0),
341: ("SMaxSampleValue", 12, 0),
342: ("TransferRange", 3, 6),
# obsolete JPEG tags
512: ("JPEGProc", 3, 1),
513: ("JPEGInterchangeFormat", 4, 1),
514: ("JPEGInterchangeFormatLength", 4, 1),
515: ("JPEGRestartInterval", 3, 1),
517: ("JPEGLosslessPredictors", 3, 0),
518: ("JPEGPointTransforms", 3, 0),
519: ("JPEGQTables", 4, 0),
520: ("JPEGDCTables", 4, 0),
521: ("JPEGACTables", 4, 0),
529: ("YCbCrCoefficients", 5, 3),
530: ("YCbCrSubSampling", 3, 2),
531: ("YCbCrPositioning", 3, 1),
532: ("ReferenceBlackWhite", 4, 0),
33432: ("Copyright", 2, 1),
# FIXME add more tags here
34665: ("ExifIFD", 3, 1),
# MPInfo
45056: ("MPFVersion", 7, 1),
45057: ("NumberOfImages", 4, 1),
45058: ("MPEntry", 7, 1),
45059: ("ImageUIDList", 7, 0),
45060: ("TotalFrames", 4, 1),
45313: ("MPIndividualNum", 4, 1),
45569: ("PanOrientation", 4, 1),
45570: ("PanOverlap_H", 5, 1),
45571: ("PanOverlap_V", 5, 1),
45572: ("BaseViewpointNum", 4, 1),
45573: ("ConvergenceAngle", 10, 1),
45574: ("BaselineLength", 5, 1),
45575: ("VerticalDivergence", 10, 1),
45576: ("AxisDistance_X", 10, 1),
45577: ("AxisDistance_Y", 10, 1),
45578: ("AxisDistance_Z", 10, 1),
45579: ("YawAngle", 10, 1),
45580: ("PitchAngle", 10, 1),
45581: ("RollAngle", 10, 1),
50741: ("MakerNoteSafety", 3, 1, {"Unsafe": 0, "Safe": 1}),
50780: ("BestQualityScale", 5, 1),
50838: ("ImageJMetaDataByteCounts", 4, 1),
50839: ("ImageJMetaData", 7, 1)
}
# Legacy Tags structure
# these tags aren't included above, but were in the previous versions
TAGS = {347: 'JPEGTables',
700: 'XMP',
# Additional Exif Info
33434: 'ExposureTime',
33437: 'FNumber',
33723: 'IptcNaaInfo',
34377: 'PhotoshopInfo',
34675: 'ICCProfile',
34850: 'ExposureProgram',
34852: 'SpectralSensitivity',
34853: 'GPSInfoIFD',
34855: 'ISOSpeedRatings',
34856: 'OECF',
34864: 'SensitivityType',
34865: 'StandardOutputSensitivity',
34866: 'RecommendedExposureIndex',
34867: 'ISOSpeed',
34868: 'ISOSpeedLatitudeyyy',
34869: 'ISOSpeedLatitudezzz',
36864: 'ExifVersion',
36867: 'DateTimeOriginal',
36868: 'DateTImeDigitized',
37121: 'ComponentsConfiguration',
37122: 'CompressedBitsPerPixel',
37377: 'ShutterSpeedValue',
37378: 'ApertureValue',
37379: 'BrightnessValue',
37380: 'ExposureBiasValue',
37381: 'MaxApertureValue',
37382: 'SubjectDistance',
37383: 'MeteringMode',
37384: 'LightSource',
37385: 'Flash',
37386: 'FocalLength',
37396: 'SubjectArea',
37500: 'MakerNote',
37510: 'UserComment',
37520: 'SubSec',
37521: 'SubSecTimeOriginal',
37522: 'SubsecTimeDigitized',
40960: 'FlashPixVersion',
40961: 'ColorSpace',
40962: 'PixelXDimension',
40963: 'PixelYDimension',
40964: 'RelatedSoundFile',
40965: 'InteroperabilityIFD',
41483: 'FlashEnergy',
41484: 'SpatialFrequencyResponse',
41486: 'FocalPlaneXResolution',
41487: 'FocalPlaneYResolution',
41488: 'FocalPlaneResolutionUnit',
41492: 'SubjectLocation',
41493: 'ExposureIndex',
41495: 'SensingMethod',
41728: 'FileSource',
41729: 'SceneType',
41730: 'CFAPattern',
41985: 'CustomRendered',
41986: 'ExposureMode',
41987: 'WhiteBalance',
41988: 'DigitalZoomRatio',
41989: 'FocalLengthIn35mmFilm',
41990: 'SceneCaptureType',
41991: 'GainControl',
41992: 'Contrast',
41993: 'Saturation',
41994: 'Sharpness',
41995: 'DeviceSettingDescription',
41996: 'SubjectDistanceRange',
42016: 'ImageUniqueID',
42032: 'CameraOwnerName',
42033: 'BodySerialNumber',
42034: 'LensSpecification',
42035: 'LensMake',
42036: 'LensModel',
42037: 'LensSerialNumber',
42240: 'Gamma',
# Adobe DNG
50706: 'DNGVersion',
50707: 'DNGBackwardVersion',
50708: 'UniqueCameraModel',
50709: 'LocalizedCameraModel',
50710: 'CFAPlaneColor',
50711: 'CFALayout',
50712: 'LinearizationTable',
50713: 'BlackLevelRepeatDim',
50714: 'BlackLevel',
50715: 'BlackLevelDeltaH',
50716: 'BlackLevelDeltaV',
50717: 'WhiteLevel',
50718: 'DefaultScale',
50719: 'DefaultCropOrigin',
50720: 'DefaultCropSize',
50721: 'ColorMatrix1',
50722: 'ColorMatrix2',
50723: 'CameraCalibration1',
50724: 'CameraCalibration2',
50725: 'ReductionMatrix1',
50726: 'ReductionMatrix2',
50727: 'AnalogBalance',
50728: 'AsShotNeutr
|
fgaudin/aemanager
|
notification/migrations/0002_populate_users.py
|
Python
|
agpl-3.0
| 4,216
| 0.00759
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for user in orm['auth.user'].objects.all():
notification = orm.Notification()
notification.user = user
notification.save()
def backwards(self, orm):
orm['notification.notification'].objects.all().delete()
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField',
|
[], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'notification.notification': {
'Meta': {'object_name': 'Notification'},
'id': ('django.db.models.fields.AutoField
|
', [], {'primary_key': 'True'}),
'notify_bug_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'notify_invoices_to_send': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'notify_late_invoices': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['notification']
|
shiblon/pytour
|
3/tutorials/while_loops.py
|
Python
|
apache-2.0
| 1,756
| 0.008542
|
# vim:tw=50
""""While" Loops
Recursion is powerful, but not always convenient
or efficient for processing sequences. That's why
Python has **loops**.
A _loop_ is just what it sounds like: you do
something, then you go round and do it again, like
a track: you run around, then you run around again.
Loops let you do repetitive things, like printing
all of the elements of a list, or adding them all
together, without using recursion.
Python supports two kinds. We'll start with
**while loops**.
A |while| statement is like an |if| statement, in
that it executes t
|
he indented block if its condition is
|True| (nonzero). But, unlike |
|
if|, it *keeps on
doing it* until the condition becomes |False| or
it hits a |break| statement. Forever.
The code window shows a while loop that prints
every element of a list. There's another one that
adds all of the elements. It does this
without recursion. Check it out.
Exercises
- Look at |print_all|. Why does it eventually
stop? What is the value of |i| when it does?
- Why does |slicing_print_all| stop? How does it
work?
"""
__doc__ = """Use while loops to do things repetitively."""
def print_all(seq):
"""Print all elements of seq."""
i = 0
while i < len(seq):
print("item", i, seq[i])
i = i + 1 # This is also spelled 'i += 1'
def slicing_print_all(seq):
"""Another way of using while - less efficient."""
while seq:
print(seq[0])
seq = seq[1:]
def add_all(seq):
"""Add all of the elements of seq."""
i = 0
s = 0
while i < len(seq):
s += seq[i]
i += 1
return s
print("Using indices:")
print_all([1, 5, 8, "hello", 9])
print("Using slices:")
slicing_print_all(range(3))
print("Summing:")
print("sum of all:", add_all(range(1,12))) # Should be 66
|
markstoehr/phoneclassification
|
local/CExtractPatches_from_spec.py
|
Python
|
gpl-3.0
| 8,937
| 0.013651
|
from __future__ import division
import numpy as np
import argparse, itertools
from scipy.io import wavfile
from template_speech_rec import configParserWrapper
from TestSVMBernoulli import get_bernoulli_templates
from scipy.ndimage.filters import maximum_filter
from amitgroup.stats import bernoullimm
import matplotlib.pyplot as plt
from matplotlib.colors import colorConverter
from mpl_toolkits.axes_grid1 import ImageGrid
import matplotlib.cm as cm
import spectral_features.filters.filterbank as fb
from phoneclassification.transforms import spectrogram, preemphasis, process_wav, smooth_log_spectrogram
from template_speech_rec.get_train_data import get_edge_features_use_config
import template_speech_rec.edge_signal_proc as esp
def get_maximal_patches(X,S,patch_radius=2,min_count=50):
"""
"""
k = 2*patch_radius+1
edge_sub_shape=(k,k,8)
edge_view_shape=tuple(np.subtract(X.shape,edge_sub_shape)+1)[:-1]+edge_sub_shape
edge_arr_view= np.lib.stride_tricks.as_strided(X,edge_view_shape,X.strides[:-1] + X.strides )
edge_arr_sums = edge_arr_view.sum(-1).sum(-1).sum(-1)
edge_local_maxes = maximum_filter(edge_arr_sums,size=(k,k),cval=0,mode='constant')
local_max_patches = edge_arr_view[(edge_arr_sums >= edge_local_maxes) * (edge_arr_sums >= 50)]
spec_sub_shape = (k,k)
spec_view_shape=tuple(np.subtract(S.shape,spec_sub_shape)+1)+spec_sub_shape
spec_arr_view=np.lib.stride_tricks.as_strided(S,spec_view_shape,S.strides *2 )
return local_max_patches, spec_arr_view[edge_arr_sums >= edge_local_maxes]
def main(args):
"""
For each label and component constructed a positive and negative
training set and train a linear SVM to separate them
"""
config_d = configParserWrapper.load_settings(open(args.config,'r'))
true_examples = []
false_examples = []
mean = 0
total = 0
num_less_than_eq = np.zeros(20)
fls = np.loadtxt(args.fls_txt, dtype=str)
all_X_patches = []
all_S_patches = []
htemp, dhtemp, ddhtemp, tttemp = fb.hermite_window(
args.winsize,
args.num_tapers,
args.win_half_time_support)
run_transform = lambda x, winlength : esp.get_spectrogram_features(x,
16000,
winlength,
80,
2**(int(np.ceil(np.log2(winlength)))),
4000,
7,
)
X_patches = []
S_patches = []
for fl_id, fl_path in enumerate(fls):
if len(X_patches) > 100000: break
S = run_transform(wavfile.read(fl_path)[1], args.winsize)
# spectrogram(,
# 16000,
# 3200,
# args.winsize,
# 2**int(np.ceil(np.log2(args.winsize))),
# 2,
# htemp)
if args.do_exp_weighted_divergence:
Sold = S.copy()
S *=np.exp(S)
X = get_edge_features_use_config(S.T,config_d['EDGES'])
cur_X_patches, cur_S_patches = get_maximal_patches(X,S,patch_radius=2)
X_patches.extend(cur_X_patches)
S_patches.extend(cur_S_patches)
num_new_patches = len(X_patches)
X = np.array(X_patches)
S = np.array(S_patches)
data_shape = X.shape[1:]
X = X.reshape(X.shape[0],np.prod(data_shape))
bmm = bernoullimm.BernoulliMM(n_components=args.n_components,
n_init= 50,
n_iter= 500,
random_state=0,
verbose=args.v, tol=1e-6)
bmm.fit(X)
# check above 30
use_means = bmm.predict_proba(X).sum(0) > 30
print use_means.sum()
try:
np.save(args.save_parts,bmm.means_.reshape(*( (bmm.n_components,)+data_shape))[use_means])
except:
import pdb; pdb.set_trace()
S_shape = S.shape[1:]
import pdb; pdb.set_trace()
S_clusters = bmm.cluster_underlying_data(S.reshape(len(S),np.prod(S_shape)),X).reshape(
*( (bmm.n_components,) + S_shape))[use_means]
np.save(args.spec_save_parts,S_clusters)
ncols = int(np.sqrt(args.n_components))
nrows = int(np.ceil(args.n_components/ncols))
if args.viz_spec_parts is not None:
plt.close('all')
fig = plt.figure(1, (6, 6))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols = (nrows,ncols ), # creates 2x2 grid of axes
axes_pad=0.001, # pad between axes in inch.
)
for i in xrange(S_clusters.shape[0]):
try:
grid[i].imshow(S_clusters[i],cmap=cm.binary,interpolation='nearest')
grid[i].spines['bottom'].set_color('red')
grid[i].spines['top'].set_color('red')
grid[i].spines['left'].set_color('red')
grid[i].spines['right'].set_color('red')
for a in grid[i].axis.values():
a.toggle(all=False)
except:
import pdb; pdb.set_trace()
for i in xrange(S_clusters.shape[0],nrows*ncols):
try:
grid[i].spines['bottom'].set_color('red')
except: import pdb; pdb.set_trace()
grid[i].spines['top'].set_color('red')
gri
|
d[i].spines['left'].set_color('red')
grid[i].spines['right'].set_color('red')
for a in grid[i].axis.values():
|
a.toggle(all=False)
plt.savefig('%s' % args.viz_spec_parts
,bbox_inches='tight')
if __name__=="__main__":
parser = argparse.ArgumentParser("""For each component and model
we construct a positive and negative data subset and then
train an SVM""")
parser.add_argument('-v',
action='store_true',
help='verbosity flag')
parser.add_argument('--config',
type=str,
default='conf/main.config',
help='configuration file')
parser.add_argument('--data',
type=str,
nargs='+',
help='paths to where the data are kept, in order of phone ids')
parser.add_argument('--data_spec',
type=str,
nargs='+',
help='paths to where the spec data are kept, in order of phone ids')
parser.add_argument('--save_parts',
type=str,
help='paths to where the data are kept, in order of phone ids')
parser.add_argument('--spec_save_parts',
type=str,
help='paths to where the spec data are kept, in order of phone ids')
parser.add_argument('--viz_spec_parts',
type=str,
default=None,
help='paths to where the spec data are kept, in order of phone ids')
parser.add_argument('--n_components',
type=int,
default=50,
help='number of components')
parser.add_argument('--num_tapers',
type=int,
default=5,
help='number of tapers for signal processing')
parser.add_argument('--winsize',
type=int,
default=256,
help='window length for processing')
parser.add_argument('--win_half_time_support',
type=int,
default=4,
help='half time support for tapers')
parser.add_argument('--fls_txt',
|
Domatix/stock-logistics-workflow
|
stock_picking_whole_scrap/__manifest__.py
|
Python
|
agpl-3.0
| 641
| 0
|
# Copyright 2018 Tecnativa - Sergio Teruel
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Stock Picking Whole Scrap',
'summary': 'Create whole scrap from a picking for move lines',
'version': '11.0.1.0.0',
'development_status': 'Beta',
'category': 'Warehouse',
'website': 'https://github.com/OCA/stock-logistics-workflow',
'author': 'Tecnativa, Odoo Community Association (OCA)',
'license': 'AGPL-3',
'installable': True,
'depends': [
'stock',
],
'data': [
|
'wizards/stock_pic
|
king_whole_scrap.xml',
'views/stock_picking_views.xml',
],
}
|
bstroebl/QGIS
|
python/plugins/sextante/parameters/ParameterVector.py
|
Python
|
gpl-2.0
| 4,042
| 0.003216
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ParameterVector.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from sextante.parameters.ParameterDataObject import ParameterDataObject
from sextante.core.QGisLayers import QGisLayers
from qgis.core import *
from sextante.core.LayerExporter import LayerExporter
class ParameterVector(ParameterDataObject):
VECTOR_TYPE_POINT = 0
VECTOR_TYPE_LINE = 1
VECTOR_TYPE_POLYGON = 2
VECTOR_TYPE_ANY = -1
def __init__(self, name="", description="", shapetype=-1, optional=False):
ParameterDataObject.__init__(self, name, description)
self.optional = optional
self.shapetype = shapetype
self.value = None
self.exported = None
def setValue(self, obj):
self.exported = None
if obj == None:
if self.optional:
self.value = None
return True
else:
return False
if isinstance(obj, QgsVectorLayer):
self.value = unicode(obj.source())
return True
else:
self.value = unicode(obj)
layers = QGisLayers.getVectorLayers(self.shapetype)
for layer in layers:
if layer.name() == self.value:
self.value = unicode(layer.source())
return True
return True
def getSafeExportedLayer(self):
'''Returns not the value entered by the user, but a string with a filename which
contains the data of this layer, but saved in a standard format (currently always
a shapefile) so that it can be opened by most external applications.
If there is a selection and SEXTANTE is configured to use just the selection, if exports
the layer even if it is already in a suitable format.
Works only if the layer represented by the parameter value is currently loaded in QGIS.
Otherwise, it will not perform any export and return the current value string.
If the current value represents a layer in a suitable format, it does not export at all
and returns that value.
The layer is exported just the first time the method is called. The method can be called
several times and it will always return the same file, performing the export only the first time.'''
if self.exported:
return self.exported
layer = QGisLayers.getObjectFromUri(self.value, False)
if layer:
self.exported = LayerExporter.exportVectorLayer(layer)
else:
self.exported = self.value
return self.exported
def serialize(self):
return self.__module__.split(".")[-1] + "|" + self.name + "|" + self.description +\
|
"|" + str(self.shapetype) + "|" + str(self.optional)
def deserialize(self, s):
tokens = s.split("|")
return ParameterVector(tokens[0], tokens[1], int(tokens[2]), str(True)
|
== tokens[3])
def getAsScriptCode(self):
return "##" + self.name + "=vector"
|
hayderimran7/tempest
|
tempest/api/compute/servers/test_virtual_interfaces_negative.py
|
Python
|
apache-2.0
| 1,691
| 0
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest_lib import exceptions as lib_exc
from tempest.api.compute import base
from tempest import test
class VirtualInterfacesNegativeTestJSON(base.BaseV2ComputeTest):
@classmethod
def setup_credentials(cls):
# For this test no network resources are needed
cls.set_network_resources()
super(V
|
irtualInterfacesNegativeTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(VirtualInterfacesNegativeTestJSON, cls).setup_clients()
cls.client = cls.servers_client
@test.attr(type=['negative'])
@test.idempotent_id('64ebd03c-1089-4306-93fa-60f5eb5c803c')
@test.services('network')
def test_list_virtual_interfaces_invalid_server_id(self):
# Negative test: Should not be able to GET virtual interfaces
# for an inv
|
alid server_id
invalid_server_id = str(uuid.uuid4())
self.assertRaises(lib_exc.NotFound,
self.client.list_virtual_interfaces,
invalid_server_id)
|
squirrelo/qiita
|
qiita_db/test/test_base.py
|
Python
|
bsd-3-clause
| 5,463
| 0.000183
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from unittest import TestCase, main
from qiita_core.exceptions import IncompetentQiitaDeveloperError
from qiita_core.util import qiita_test_checker
from qiita_core.qiita_settings import qiita_config
import qiita_db as qdb
@qiita_test_checker()
class QiitaBaseTest(TestCase):
"""Tests that the base class functions act correctly"""
def setUp(self):
# We need an actual subclass in order to test the equality functions
self.tester = qdb.artifact.Artifact(1)
self.portal = qiita_config.portal
def tearDown(self):
qiita_config.portal = self.portal
def test_init_base_error(self):
"""Raises an error when instantiating a base class directly"""
with self.assertRaises(IncompetentQiitaDeveloperError):
qdb.base.QiitaObject(1)
def test_init_error_inexistent(self):
"""Raises an error when instantiating an object that does not exists"""
with self.assertRaises(qdb.exceptions.QiitaDBUnknownIDError):
qdb.artifact.Artifact(10)
def test_check_subclass(self):
"""Nothing happens if check_subclass called from a subclass"""
self.tester._check_subclass()
def test_check_subclass_error(self):
"""check_subclass raises an error if called from a base class"""
# Checked through the __init__ call
with self.assertRaises(IncompetentQiitaDeveloperError):
qdb.base.QiitaObject(1)
with self.assertRaises(IncompetentQiitaDeveloperError):
qdb.base.QiitaStatusObject(1)
def test_check_id(self):
"""Correctly checks if an id exists on the database"""
self.assertTrue(self.tester._check_id(1))
self.assertFalse(self.tester._check_id(100))
def test_check_portal(self):
"""Correctly checks if object is accessable in portal given"""
qiita_config.portal = 'QIITA'
tester = qdb.analysis.Analysis(1)
self.assertTrue(tester._check_portal(1))
qiita_config.portal = 'EMP'
self.assertFalse(tester._check_portal(1))
self.assertTrue(self.tester._check_portal(1))
def test_equal_self(self):
"""Equality works with the same object"""
self.assertEqual(self.tester, self.tester)
def test_equal(self):
"""Equality works with two objects pointing to the same instance"""
new = qdb.artifact.Artifact(1)
self.assertEqual(self.tester, new)
def test_not_equal(self):
"""Not equals works with object of the same type"""
sp1 = qdb.study.StudyPerson(1)
sp2 = qdb.study.StudyPerson(2)
self.assertNotEqual(sp1, sp2)
def test_not_equal_type(self):
"""Not equals works with object of different type"""
new = qdb.study.Study(1)
self.assertNotEqual(self.tester, new)
@qiita_test_checker()
clas
|
s QiitaStatusObjectTest(TestCase):
"""Tests that the QittaStatusObject class functions act correctly"""
def setUp(self):
# We need an actual subclass in order to test the equality functions
self.tester =
|
qdb.analysis.Analysis(1)
def test_status(self):
"""Correctly returns the status of the object"""
self.assertEqual(self.tester.status, "in_construction")
def test_check_status_single(self):
"""check_status works passing a single status"""
self.assertTrue(self.tester.check_status(["in_construction"]))
self.assertFalse(self.tester.check_status(["queued"]))
def test_check_status_exclude_single(self):
"""check_status works passing a single status and the exclude flag"""
self.assertTrue(self.tester.check_status(["public"], exclude=True))
self.assertFalse(self.tester.check_status(["in_construction"],
exclude=True))
def test_check_status_list(self):
"""check_status work passing a list of status"""
self.assertTrue(self.tester.check_status(
["in_construction", "queued"]))
self.assertFalse(self.tester.check_status(
["public", "queued"]))
def test_check_status_exclude_list(self):
"""check_status work passing a list of status and the exclude flag"""
self.assertTrue(self.tester.check_status(
["public", "queued"], exclude=True))
self.assertFalse(self.tester.check_status(
["in_construction", "queued"], exclude=True))
def test_check_status_unknown_status(self):
"""check_status raises an error if an invalid status is provided"""
with self.assertRaises(ValueError):
self.tester.check_status(["foo"])
with self.assertRaises(ValueError):
self.tester.check_status(["foo"], exclude=True)
def test_check_status_unknown_status_list(self):
"""check_status raises an error if an invalid status list is provided
"""
with self.assertRaises(ValueError):
self.tester.check_status(["foo", "bar"])
with self.assertRaises(ValueError):
self.tester.check_status(["foo", "bar"], exclude=True)
if __name__ == '__main__':
main()
|
algorithmiaio/algorithmia-python
|
Algorithmia/util.py
|
Python
|
mit
| 1,473
| 0.002716
|
import re
import hashlib
FNAME_MATCH = re.compile(r'/([^/]+)$') # From the last slash to the end of the string
PREFIX = re.compile(r'([^:]+://)(/)?(.+)') # Check
|
for a prefix like data://
def getParentAndBase(path):
match = PREFIX.match(path)
if match is None:
if path.endswith('/'):
stripped_path = path[:-1]
else:
stripped_path = path
base = FNAME_MATCH.search(stripped_path)
if base is None:
raise ValueError('Invalid path')
parent = FNAME_MATCH.sub(''
|
, stripped_path)
return parent, base.group(1)
else:
prefix, leading_slash, uri = match.groups()
parts = uri.split('/')
parent_path = '/'.join(parts[:-1])
if leading_slash is not None:
parent_path = '{prefix}/{uri}'.format(prefix=prefix, uri='/'.join(parts[:-1]))
else:
parent_path = '{prefix}{uri}'.format(prefix=prefix, uri='/'.join(parts[:-1]))
return parent_path, parts[-1]
def pathJoin(parent, base):
if parent.endswith('/'):
return parent + base
return parent + '/' + base
def md5_for_file(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return str(hash_md5.hexdigest())
def md5_for_str(content):
hash_md5 = hashlib.md5()
hash_md5.update(content.encode())
return str(hash_md5.hexdigest())
|
klebercode/rhape
|
rha/settings.py
|
Python
|
mit
| 2,677
| 0
|
"""
Django settings for rha project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# import os
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
from decouple import config
from dj_database_url import parse as db_url
from unipath import Path
BASE_DIR = Path(__file__).parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [
'.localhost',
'127.0.0.1',
'.herokuapp.com',
'.rha.com.br',
'.ow7.com.br',
]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'bootstrap3',
'rha.core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'rha.urls'
WSGI_APPLICATION = 'rha.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default':
|
config(
'DATABASE_URL',
default='sqlite:///' + BASE_DIR.child('db.sqlite3'),
cast=db_url),
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Recife'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = BASE_DIR.child('staticfiles')
STATIC_URL = '/static/'
MEDIA_ROOT = BASE_DIR.child('medi
|
a')
MEDIA_URL = '/media/'
# EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DEFAULT_FROM_EMAIL = 'RHAPE <no-reply@rhape.com.br>'
EMAIL_USE_TLS = True
EMAIL_HOST = config('EMAIL_HOST')
EMAIL_HOST_USER = config('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = config('EMAIL_HOST_PASSWORD')
EMAIL_PORT = 587
|
wolfe-pack/moro
|
public/javascripts/brat/server/src/tag.py
|
Python
|
bsd-2-clause
| 6,002
| 0.001833
|
#!/usr/bin/env python
# -*- Mode: Python; tab-width: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# vim:set ft=python ts=4 sw=4 sts=4 autoindent:
'''
Functionality for invoking tagging services.
Author: Pontus Stenetorp
Version: 2011-04-22
'''
from __future__ import with_statement
from httplib import HTTPConnection, HTTPSConnection
from os.path import join as path_join
from socket import error as SocketError
from urlparse import urlparse
from annotation import TextAnnotations, TextBoundAnnotationWithText
from annotator import _json_from_ann, ModificationTracker
from common import ProtocolError
from document import real_directory
from jsonwrap import loads
from message import Messager
from projectconfig import ProjectConfiguration
### Constants
QUERY_TIMEOUT = 30
###
class UnknownTaggerError(ProtocolError):
def __init__(self, tagger):
self.tagger = tagger
def __str__(self):
return ('Tagging request received for '
'an unknown tagger "%s"') % self.tagger
def json(self, json_dic):
json_dic['exception'] = 'unknownTaggerError'
class InvalidConnectionSchemeError(ProtocolError):
def __init__(self, tagger, scheme):
self.tagger = tagger
self.scheme = scheme
def __str__(self):
return ('The tagger "%s" uses the unsupported scheme "%s"'
' "%s"') % (self.tagger, self.scheme, )
def json(self, json_dic):
json_dic['exception'] = 'unknownTaggerError'
class InvalidTaggerResponseError(ProtocolError):
def __init__(self, tagger, response):
self.tagger = tagger
self.response = response
def __str__(self):
return (('The tagger "%s" returned an invalid JSON response, please '
'contact the tagger service mantainer. Response: "%s"')
% (self.tagger, self.response, ))
def json(self, json_dic):
json_dic['exception'] = 'unknownTaggerError'
class TaggerConnectionError(ProtocolError):
def __init__(self, tagger, error):
self.tagger = tagger
self.error = error
def __str__(self):
return ('Tagger service %s returned the error: "%s"'
% (self.tagger, self.error, ))
def json(self, json_dic):
json_dic['exception'] = 'taggerConnectionError'
def tag(collection, document, tagger):
pconf = ProjectConfiguration(real_directory(collection))
for tagger_token, _, _, tagger_service_url in pconf.get_annotator_config():
if tagger == tagger_token:
break
else:
raise UnknownTaggerError(tagger)
doc_path = path_join(real_directory(collection), document)
with TextAnnotations(path_join(real_directory(collection),
document)) as ann_obj:
url_soup = urlparse(tagger_service_url)
if url_soup.scheme == 'http':
Connection = HTTPConnection
elif url_soup.scheme == 'https':
Connection = HTTPSConnection
else:
raise InvalidConnectionSchemeError(tagger_token, url_soup.scheme)
conn = None
try:
conn = Connection(url_soup.netloc)
req_headers = {
'Content-type': 'text/plain; charset=utf-8',
'Accept': 'application/json',
}
# Build a new service URL since the request method doesn't accept
# a parameters argument
service_url = url_soup.path + (
'?' + url_soup.query if url_soup.query else '')
try:
data = ann_obj.get_document_text().encode('utf-8')
req_headers['Content-length'] = len(data)
# Note: Trout slapping for anyone sending Unicode objects here
conn.request('POST',
# As per: http://bugs.python.org/issue11898
# Force the url to be an ascii string
str(url_soup.path),
data,
headers=req_headers)
except SocketError, e:
raise TaggerConnectionError(tagger_token, e)
resp = conn.getresponse()
# Did the request succeed?
if resp.status != 200:
rai
|
se TaggerConnectionError(tagger_token,
'%s %s' % (resp.status, resp.reason))
# Finally, we can read the response data
resp_data = resp.read()
finally:
if conn is not None
|
:
conn.close()
try:
json_resp = loads(resp_data)
except ValueError:
raise InvalidTaggerResponseError(tagger_token, resp_data)
mods = ModificationTracker()
for ann_data in json_resp.itervalues():
assert 'offsets' in ann_data, 'Tagger response lacks offsets'
offsets = ann_data['offsets']
assert 'type' in ann_data, 'Tagger response lacks type'
_type = ann_data['type']
assert 'texts' in ann_data, 'Tagger response lacks texts'
texts = ann_data['texts']
# sanity
assert len(offsets) != 0, 'Tagger response has empty offsets'
assert len(texts) == len(offsets), 'Tagger response has different numbers of offsets and texts'
# Note: We do not support discontinuous spans at this point
assert len(offsets) < 2, 'Tagger response has multiple offsets (discontinuous spans not supported)'
start, end = offsets[0]
text = texts[0]
_id = ann_obj.get_new_id('T')
tb = TextBoundAnnotationWithText(((start, end),), _id, _type, text)
mods.addition(tb)
ann_obj.add_annotation(tb)
mod_resp = mods.json_response()
mod_resp['annotations'] = _json_from_ann(ann_obj)
return mod_resp
if __name__ == '__main__':
# Silly test, but helps
tag('/BioNLP-ST_2011_ID_devel', 'PMC1874608-01-INTRODUCTION', 'random')
|
dww100/sct
|
python/bin/sctify.py
|
Python
|
apache-2.0
| 1,761
| 0.000568
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sctify: converts a CHARMM PSF/PDF pair to a SCT compatible PDB
"""
# Copyright 2014 University College London
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import argparse
import sct
def parse_arguments():
"""Parse command line arguments and ensure correct combinations present"""
parser = argparse.ArgumentParser(
description='Convert a CHARMM PSF/PDF pair to a SCT compatible PDB\n')
parser.add_argument('-i', '--input_pdb', nargs='?
|
', type=str,
dest='pdb_path', help='Path to the input PDB file',
required=True)
parser.add_argument('-p', '--input_psf', nargs='?', type=str,
dest='psf_path', help='Path to the input PSF file',
required=True)
parser.add_argument('-o', '--output_pdb', nargs='?', type=str,
dest='pdb_out', default=None,
help='Path to the output PDB file')
|
return parser.parse_args()
def main():
args = parse_arguments()
atoms = sct.pdb.process_pdb_psf(args.psf_path, args.pdb_path)
sct.pdb.write_pdb(atoms, args.pdb_out)
if __name__ == "__main__":
main()
|
atiberghien/makerscience-server
|
makerscience_catalog/migrations/0004_auto__add_field_makerscienceresource_level__add_field_makersciencereso.py
|
Python
|
agpl-3.0
| 5,685
| 0.00686
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'MakerScienceResource.level'
db.add_column(u'makerscience_catalog_makerscienceresource', 'level',
self.gf('django.db.models.fields.CharField')(default=2, max_length=1),
keep_default=False)
# Adding field 'MakerScienceResource.duration'
db.add_column(u'makerscience_catalog_makerscienceresource', 'duration',
self.gf('django.db.models.fields.CharField')(default='1', max_length=30),
keep_default=False)
# Adding field 'MakerScienceResource.cost'
db.add_column(u'makerscience_catalog_makerscienceresource', 'cost',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'MakerScienceResource.level'
db.delete_column(u'makerscience_catalog_makerscienceresource', 'level')
# Deleting field 'MakerScienceResource.duration'
db.delete_column(u'makerscience_catalog_makerscienceresource', 'duration')
# Deleting field 'MakerScienceResource.cost'
db.delete_column(u'makerscience_catalog_makerscienceresource', 'cost')
models = {
u'makerscience_catalog.makerscienceproject': {
'Meta': {'object_name': 'MakerScienceProject'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"})
},
u'makerscience_catalog.makerscienceresource': {
'Meta': {'object_name': 'MakerScienceResource'},
'cost': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'duration': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"})
},
u'projects.project': {
'Meta': {'object_name': 'Project'},
'baseline': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'begin_date': ('django.db.models.fields.DateField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scout.PostalAddress']", 'null': 'True', 'blank': 'True'}),
'progress': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectProgress']", 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None'
|
, 'unique_with': '()'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'projects.projectprogress': {
'Meta': {'ordering': "['order']", 'object_name': 'ProjectProgress'},
'icon': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
u'id': ('d
|
jango.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'progress_range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectProgressRange']"})
},
u'projects.projectprogressrange': {
'Meta': {'object_name': 'ProjectProgressRange'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'})
},
u'scout.postaladdress': {
'Meta': {'object_name': 'PostalAddress'},
'address_locality': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'address_region': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post_office_box_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'street_address': ('django.db.models.fields.TextField', [], {'null': 'True'})
}
}
complete_apps = ['makerscience_catalog']
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/densitymapbox/colorbar/_dtick.py
|
Python
|
mit
| 500
| 0.002
|
import _plotly_utils.basevalidators
clas
|
s DtickValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(
self, plotly_name="dtick", parent_name="densitymapbox.colorbar", **kwargs
):
super(DtickValidator, self).__init__(
plotly_name=plotly_name,
|
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
implied_edits=kwargs.pop("implied_edits", {"tickmode": "linear"}),
**kwargs
)
|
glaudsonml/kurgan-ai
|
tools/sqlmap/tamper/space2comment.py
|
Python
|
apache-2.0
| 1,319
| 0.001516
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.LOW
def dependencies():
pass
def tamper(payload, **kwargs):
"""
Replaces space character (' ') with comments '/**/'
Tested against:
* Microsoft SQL Server 2005
* MySQL 4, 5.0 and 5.5
* Oracle 10g
* PostgreSQL 8.3, 8.4, 9.0
Notes:
* Useful to bypass weak and bespoke web application firewalls
>>> tamper('SELECT id FROM users')
'SELECT/**/id/**/FROM/**/users'
"""
retVal = payload
if payload:
retVal = ""
quote, doublequote, firstspace = False, False, False
for i in xrange(len(payload)):
if not firstspace:
if payload[i].isspace():
firstspace = True
retVal += "/**/"
continue
el
|
if payload[i] == '\'':
quote = not quote
elif payload[i] == '"':
doublequote = not doublequote
elif payload[i] == " " and not doublequote and not quote:
retVal += "/**/"
continue
retVal +
|
= payload[i]
return retVal
|
sinnwerkstatt/ecg-balancing
|
ecg_balancing/migrations/0012_auto__add_unique_company_slug.py
|
Python
|
mit
| 10,698
| 0.007758
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'Company', fields ['slug']
db.create_unique(u'ecg_balancing_company', ['slug'])
def backwards(self, orm):
# Removing unique constraint on 'Company', fields ['slug']
db.delete_unique(u'ecg_balancing_company', ['slug'])
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [
|
], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django
|
.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'ecg_balancing.company': {
'Meta': {'object_name': 'Company'},
'activities': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'employees_number': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'foundation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'industry': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'managing_directors': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'model_creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owners': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'revenue': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'zipcode': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'ecg_balancing.companybalance': {
'Meta': {'object_name': 'CompanyBalance'},
'auditor': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'common_good': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'balance'", 'to': u"orm['ecg_balancing.Company']"}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'matrix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'company_balances'", 'to': u"orm['ecg_balancing.ECGMatrix']"}),
'peer_companies': ('django.db.models.fields.related.ManyToManyField', [], {'max_length': '255', 'to': u"orm['ecg_balancing.Company']", 'null': 'True', 'symmetrical': 'False', 'blank': 'True'}),
'process_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'prospect': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.SmallIntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'})
},
u'ecg_balancing.companybalanceindicator': {
'Meta': {'object_name': 'CompanyBalanceIndicator'},
'company_balance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'company_balance'", 'to': u"orm['ecg_balancing.CompanyBalance']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'evaluation': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'company_balance'", 'to': u"orm['ecg_balancing.Indicator']"})
},
u'ecg_balancing.ecgmatrix': {
'Meta': {'object_name': 'ECGMatrix'},
'contact': ('djang
|
bendaf/diff_drive_entropy
|
EntropicRobot/main_race.py
|
Python
|
gpl-2.0
| 4,131
| 0.002905
|
import pygame
import sys
from PIL import Image # Python Imaging Library
from vector_math import Vector2
pygame.init()
# load map with PIL
image_filename = "track_new_3.bmp"
#image_filename = "empty.bmp"
class Environment:
def __init__(self):
img = Image.open(image_filename)
self.track = img.load() # for pixel information
self.width, self.height = img.size
def get_size(self):
return
|
self.width, self.height
def is_free(self, x, y):
return self.track[x, y] == (255, 255, 255)
def draw_pixel(self, r, g, b, x, y):
global screen
|
draw_pixel(screen, r, g, b, x, y)
class Goal:
def __init__(self, x=100, y=100, size=10):
self.pos = Vector2(x, y)
self.size = size
def draw(self):
pygame.draw.rect(screen, (255, 0, 0), (self.pos.x, self.pos.y, self.size, self.size))
def draw_pixel(surface, r, g, b, x, y):
surface.fill((r, g, b), ((x, y), (2, 2)))
environment = Environment()
# Create screen
width, height = environment.get_size()
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption('Entropic AI')
background = pygame.image.load(image_filename).convert() # for display
velocity_font = pygame.font.Font(None, 30)
robot_list = list()
RED = (255, 0, 0)
YELLOW = (200, 200, 0)
from robot import Robot
myRobot = Robot(environment, 495, 147, 7, speed=0.2, color=RED)
robot_list.append(myRobot)
myRobot2 = Robot(environment, 495, 147, 7, speed=0.2, color=YELLOW)
robot_list.append(myRobot2)
"""
from robot_goal import Robot
goal = Goal(width-50, 350)
myRobot = Robot(environment, width/2, height/2, 7, g=goal.pos)
"""
walked_path = list()
clock = pygame.time.Clock()
running = True
paused = False
draw_walked_path = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
running = False
sys.exit()
elif event.key == pygame.K_p:
paused ^= True
elif event.key == pygame.K_f:
draw_walked_path ^= True
elif event.key == pygame.K_w:
myRobot.timeHorizon += 5
print("time horizon = ", myRobot.timeHorizon)
elif event.key == pygame.K_s:
myRobot.timeHorizon -= 5
print("time horizon = ", myRobot.timeHorizon)
elif event.key == pygame.K_d:
myRobot.numberOfPaths += 5
print("number of paths = ", myRobot.numberOfPaths)
elif event.key == pygame.K_a:
myRobot.numberOfPaths -= 5
print("number of paths = ", myRobot.numberOfPaths)
elif event.key == pygame.K_x:
myRobot.maxSpeed += 1.0
print("max speed = ", myRobot.maxSpeed)
elif event.key == pygame.K_z:
myRobot.maxSpeed -= 1.0
print("max speed = ", myRobot.maxSpeed)
if event.type == pygame.MOUSEBUTTONDOWN:
print(pygame.mouse.get_pos())
myRobot.pos.x, myRobot.pos.y = pygame.mouse.get_pos()
myRobot.speed = 0
walked_path.clear()
screen.blit(background, [0, 0]) # redraw clean background
# Update:
if not paused:
#myRobot.sensor()
for r in robot_list:
future_positions = r.simulate()
r.move()
#walked_path.append(r.pos.as_int())
for p in future_positions:
draw_pixel(screen, 0, 0, 255, *p)
# Draw
#goal.draw()
for r in robot_list:
r.draw(screen)
#text = velocity_font.render("v = "+str(myRobot.speed), 1, (255, 0, 0))
#screen.blit(text, (0, 0))
if len(walked_path) > 1 and draw_walked_path:
pygame.draw.lines(screen, (0, 255, 0), False, walked_path, 1)
if not myRobot.in_bounds():
running = False
pygame.display.flip()
#pygame.time.wait(30)
"""
clock.tick()
print("fps: ", clock.get_fps())
"""
pygame.quit()
|
eamuntz/Django-Tut
|
myproject/myproject/settings.py
|
Python
|
mit
| 2,048
| 0
|
"""
Django settings for myproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'lr51qxjbnu!8+yh8c^=j_2x)*7^cy5#=9(+eb$3@v1%fyqzddl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'polls'
)
|
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'myproject.urls'
WSGI_APPLICATION = 'myproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
|
ProstoKSI/distributed-queue
|
distributed_queue/tests/test_backends_init.py
|
Python
|
mit
| 1,313
| 0.002285
|
import unittest
from distributed_queue import core, backends
class TestBackend(unittest.TestCase):
def test_list_backends(self):
backend_list = core.BACKEND_LIST
self.assertTrue('dummy' in backend_list)
self.assertTrue('redis' in backend_list)
def test_create_backend_fail(self):
backend = core.create_backend('error')
self.assertEqual(backend, None)
def test_create_backend_base(self):
backend = backends.BaseBackend()
self.assertRaises(NotImplementedError, backend.send, 'test', 'test')
self.assertRaises(NotImplementedError, backend.receive, ['test'])
def test_create_backend_dummy(self)
|
:
backend = core.create_backend('dummy')
self.assertTrue(backend is not
|
None)
self.assertTrue(isinstance(backend, backends.BaseBackend))
self.assertTrue(getattr(backend, 'send', None) is not None)
self.assertTrue(getattr(backend, 'receive', None) is not None)
test_data = 'test 1 2 3'
backend.send('test', test_data)
item = backend.receive(['test'])
self.assertTrue(item[1] == test_data)
item = backend.receive(['other'])
self.assertEqual(item, None)
def test_create_backend_redis(self):
#TODO: Need to test redis backend
pass
|
dohop/supervisor-logstash-notifier
|
setup.py
|
Python
|
apache-2.0
| 1,635
| 0
|
#
# Copyright 2016 Dohop hf.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Setup script for building supervisor-logstash-notifier
"""
from setuptools import setup, find_packages
# 2 step 'with open' to be python2.6 compatible
with open('requirements.txt') as requirements:
with open('test_requirements.txt') as test_requirements:
setup(
name='supervisor-logstash-notifier',
version='0.2.5',
packages=find_packages(exclude=['tests']),
url='https://github.com/dohop/supervisor-logstash-notifier',
license='Apache 2.0',
author='aodj',
author_email='alexander@dohop.com',
|
description='Stream supervisor events to a logstash instance',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'logstash_notifier = logstash_notifier:main'
]
},
install_requires=requirements.read().splitlines(),
test_suite='tests',
tests_require=test
|
_requirements.read().splitlines(),
)
|
SecHackLabs/WebHackSHL
|
modules/tplmap/burp_extension/config_tab.py
|
Python
|
gpl-3.0
| 4,466
| 0.027765
|
from burp import ITab
from javax.swing import JPanel, GroupLayout, JLabel, JComboBox, JCheckBox
from java.awt import Dimension
from core.checks import plugins
class ConfigTab( ITab, JPanel ):
def __init__( self, callbacks ):
self._callbacks = callbacks
self._helpers = callbacks.getHelpers()
self.__initLayout__()
def __initLayout__( self ):
self._levelComboBox = JComboBox()
levelComboBoxSize = Dimension( 300, 30 )
self._levelComboBox.setPreferredSize( levelComboBoxSize )
self._levelComboBox.setMaximumSize( levelComboBoxSize )
for level in range( 0, 6 ):
self._levelComboBox.addItem( str( level ) )
self._techRenderedCheckBox = JCheckBox( 'Rendered', True )
self._techTimebasedCheckBox = JCheckBox( 'Time-based', True )
self._plugin_groups = {}
for plugin in plugins:
parent = plugin.__base__.__name__
if not self._plugin_groups.has_key( parent ):
self._plugin_groups[ parent ] = []
self._plugin_groups[ parent ].append( plugin )
self._pluginCheckBoxes = []
for pluginGroup in self._plugin_groups.values():
for plugin in pluginGroup:
self._pluginCheckBoxes.append( PluginCheckBox( plugin ) )
self._positionReplaceCheckBox = JCheckBox( 'Replace', True )
self._positionAppendCheckBox = JCheckBox( 'Append', False )
displayItems = (
{
'label': 'Level',
'components': ( self._levelComboBox, ),
'description': 'Level of code context escape to perform (1-5, Default:0).'
},
{
'label': 'Techniques',
'components': ( self._techRenderedCheckBox, sel
|
f._techTimebasedCheckBox, ),
'description': 'Techniques R(endered) T(ime-based blind). Default: RT.'
|
},
{
'label': 'Template Engines',
'components': self._pluginCheckBoxes,
'description': 'Force back-end template engine to this value(s).'
},
{
'label': 'Payload position',
'components': ( self._positionReplaceCheckBox, self._positionAppendCheckBox, ),
'description': 'Scan payload position. This feature only appears in BurpExtension.'
}
)
layout = GroupLayout( self )
self.setLayout( layout )
layout.setAutoCreateGaps( True )
layout.setAutoCreateContainerGaps( True )
labelWidth = 200
hgroup = layout.createParallelGroup( GroupLayout.Alignment.LEADING )
vgroup = layout.createSequentialGroup()
for displayItem in displayItems:
label = JLabel( displayItem.get( 'label' ) )
label.setToolTipText( displayItem.get( 'description' ) )
_hgroup = layout.createSequentialGroup().addComponent( label, labelWidth, labelWidth, labelWidth )
_vgroup = layout.createParallelGroup( GroupLayout.Alignment.BASELINE ).addComponent( label )
for component in displayItem.get( 'components' ):
_hgroup.addComponent( component )
_vgroup.addComponent( component )
hgroup.addGroup( _hgroup )
vgroup.addGroup( _vgroup )
layout.setHorizontalGroup( hgroup )
layout.setVerticalGroup( vgroup )
def getTabCaption( self ):
return 'Tplmap'
def getUiComponent( self ):
return self
def getLevel( self ):
return self._levelComboBox.getSelectedIndex()
def getTechniques( self ):
return '%s%s' % ( 'R' if self._techRenderedCheckBox.isSelected() else '', 'T' if self._techTimebasedCheckBox.isSelected() else '' )
def getEngines( self ):
return [ checkbox.getPlugin() for checkbox in self._pluginCheckBoxes if checkbox.isSelected() ]
def getPayloadPosition( self ):
return { 'replace': self._positionReplaceCheckBox.isSelected(), 'append': self._positionAppendCheckBox.isSelected() }
class PluginCheckBox( JCheckBox ):
def __init__( self, plugin ):
JCheckBox.__init__( self, plugin.__name__, True )
self._plugin = plugin
parent = plugin.__base__.__name__
tooltip = parent if( parent != 'Plugin' ) else 'eval'
self.setToolTipText( tooltip )
def getPlugin( self ):
return self._plugin
|
antani/cheapr
|
cheapr/app.py
|
Python
|
bsd-3-clause
| 1,780
| 0.002247
|
# -*- coding: utf-8 -*-
'''The app module, containing the app factory function.'''
from flask import Flask, render_template
from cheapr.settings import ProdConfig
from cheapr.assets import assets
from cheapr.extensions import (
bcrypt,
cache,
db,
login_manager,
migrate,
debug_toolbar,
)
from cheapr import public, user
from flask.ext.images import Images
def create_app(config_object=ProdCon
|
fig):
'''An application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
'''
app = Flask(__name__)
app.config.from_object(config_object)
register_extensions(app)
register_
|
blueprints(app)
register_errorhandlers(app)
app.secret_key = 'Google'
app.images_cache='static/cache/images'
#https://medium.com/@5hreyans/the-one-weird-trick-that-cut-our-flask-page-load-time-by-70-87145335f679
app.jinja_env.cache = {}
images = Images(app)
#resize = Resize(app)
return app
def register_extensions(app):
assets.init_app(app)
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
debug_toolbar.init_app(app)
migrate.init_app(app, db)
return None
def register_blueprints(app):
app.register_blueprint(public.views.blueprint)
app.register_blueprint(user.views.blueprint)
return None
def register_errorhandlers(app):
def render_error(error):
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template("{0}.html".format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
|
limix/glimix-core
|
version.py
|
Python
|
mit
| 411
| 0
|
import re
from os.path import join
from setuptools import find_packages
def get():
pkgnames = find_packages()
if len(pkgnames) == 0:
return "unknown"
pkgname = pkgnames[0]
conte
|
nt = open(join(pkgname, "__init__.py")).read()
c = re.compile(r"__version__ *= *('[^']+'|\"[^\"]+\")")
m = c.search(content)
if m is None:
return "unknown"
return m.gro
|
ups()[0][1:-1]
|
vileopratama/vitech
|
src/addons/l10n_in_hr_payroll/report/report_hr_salary_employee_bymonth.py
|
Python
|
mit
| 4,552
| 0.004174
|
#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import datetime
import time
from openerp.osv import osv
from openerp.report import report_sxw
class report_hr_salary_employee_bymonth(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(report_hr_salary_employee_bymonth, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'get_employee': self.get_employee,
'get_periods': self.get_periods,
'get_months_tol': self.get_months_tol,
'get_total': self.get_total,
})
self.context = context
self.mnths = []
self.mnths_total = []
self.total = 0.0
def get_periods(self, form):
# Get start year-month-date and end year-month-date
first_year = int(form['start_date'][0:4])
last_year = int(form['end_date'][0:4])
first_month = int(form['start_date'][5:7])
last_month = int(form['end_date'][5:7])
no_months = (last_year-first_year) * 12 + last_month - first_month + 1
current_month = first_month
current_year = first_year
# Get name of the months from integer
mnth_name = []
for count in range(0, no_months):
m = datetime.date(current_year, current_month, 1).strftime('%b')
mnth_name.append(m)
self.mnths.append(str(current_month) + '-' + str(current_year))
if current_month == 12:
current_month = 0
current_year = last_year
current_month = current_month + 1
for c in range(0, (12-no_months)):
mnth_name.append('')
self.mnths.append('')
return [mnth_name]
def get_salary(self, form, emp_id, emp_salary, total_mnths):
category_id = form.get('category_id', [])
category_id = category_id and category_id[0] or False
self.cr.execute("select to_char(date_to,'mm-yyyy') as to_date ,sum(pl.total) \
from hr_payslip_line as pl \
left join hr_payslip as p on pl.slip_id = p.id \
left join hr_employee as emp on emp.id = p.employee_id \
left join resource_resource as r on r.id = emp.resource_id \
where p.state = 'done' and p.employee_id = %s and pl.category_id = %s \
group by r.name, p.date_to,emp.id",(emp_id, category_id,))
sal = self.cr.fetchall()
salary = dict(sal)
total =
|
0.0
cnt = 0
for month in self.mnths:
if month <> '':
if len(month) != 7:
month = '0' + str(month)
if month in salary and salary[month]:
emp_salary.append(salary[month])
total += salary[month]
total_mnths[cnt] = total_mnths[cnt] + salary[month]
|
else:
emp_salary.append(0.00)
else:
emp_salary.append('')
total_mnths[cnt] = ''
cnt = cnt + 1
return emp_salary, total, total_mnths
def get_employee(self, form):
emp_salary = []
salary_list = []
total_mnths=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
emp_obj = self.pool.get('hr.employee')
emp_ids = form.get('employee_ids', [])
employees = emp_obj.browse(self.cr, self.uid, emp_ids, context=self.context)
for emp_id in employees:
emp_salary.append(emp_id.name)
total = 0.0
emp_salary, total, total_mnths = self.get_salary(form, emp_id.id, emp_salary, total_mnths)
emp_salary.append(total)
salary_list.append(emp_salary)
emp_salary = []
self.mnths_total.append(total_mnths)
return salary_list
def get_months_tol(self):
return self.mnths_total
def get_total(self):
for item in self.mnths_total:
for count in range(1, len(item)):
if item[count] == '':
continue
self.total += item[count]
return self.total
class wrapped_report_employee_salary_bymonth(osv.AbstractModel):
_name = 'report.l10n_in_hr_payroll.report_hrsalarybymonth'
_inherit = 'report.abstract_report'
_template = 'l10n_in_hr_payroll.report_hrsalarybymonth'
_wrapped_report_class = report_hr_salary_employee_bymonth
|
rvs/gpdb
|
src/test/tinc/tincrepo/mpp/gpdb/tests/storage/pg_twophase/switch_ckpt_serial/trigger_sql/test_triggersqls.py
|
Python
|
apache-2.0
| 1,237
| 0.001617
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required b
|
y applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific lan
|
guage governing permissions and
limitations under the License.
"""
from mpp.models import SQLTestCase
'''
Trigger sqls for create_tests
'''
class TestTriggerSQLClass(SQLTestCase):
'''
This class contains all the sqls that are part of the trigger phase
The sqls in here will get suspended by one of the faults that are triggered in the main run
@gpdiff False
'''
sql_dir = 'sql/'
@classmethod
def setUpClass(cls):
"""
Since some operation in this scenario is blocked, we want to run
only aimed SQLs without unexpected setup. Just make this no-op.
"""
pass
|
codelikeagirlcny/python-lessons-cny
|
code-exercises-etc/section_02_(strings)/z.ajm.str-format-phone-ex.20151024.py
|
Python
|
mit
| 169
| 0
|
phone = "315-555-2955"
|
prin
|
t "Area Code: {0}".format(phone[0:3])
print "Local: {0}".format(phone[4:])
print "Different format: ({0}) {1}".format(phone[0:3], phone[4:])
|
max0d41/ThugBrowser
|
src/DOM/Plugins.py
|
Python
|
gpl-2.0
| 1,544
| 0.002591
|
#!/usr/bin/env python
#
# Plugins.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
from .Plugin import Plugin
class Plugins(list):
def __init__(self):
list.__init__(self)
@property
def length(self):
return len(self)
def __getattr__(self, key):
return self.namedItem(key)
def __getitem__(self, key):
try:
key = i
|
nt(key)
return self.item(key)
except:
return self.namedItem(key)
def item(self, index):
if index >= self.length:
return Plugin()
return list.__getitem__(self, index)
def namedItem(self, name):
index = 0
while index < self.length:
p = self.item(index)
if p['name'].star
|
tswith(name):
return p
index += 1
print 'PLUGIN NOT FOUND:', name
return Plugin()
def refresh(self, reloadDocuments = False):
pass
|
piton-package-manager/piton
|
piton/lib/inquirer/prompt.py
|
Python
|
mit
| 425
| 0
|
# -*- coding: utf-8 -*-
|
from .render.console import C
|
onsoleRender
def prompt(questions, render=None, answers=None):
render = render or ConsoleRender()
answers = answers or {}
try:
for question in questions:
answers[question.name] = render.render(question, answers)
return answers
except KeyboardInterrupt:
print('')
print('Cancelled by user')
print('')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.