repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
Jumpscale/jumpscale6_core | apps/admin/jumpscripts/varia/upload_root_keys.py | Python | bsd-2-clause | 1,949 | 0.015393 |
from JumpScale import j
descr = """
Fetches the public keys from the vscalers_sysadmin repo and puts them in authorized_keys
use '-e system ' to only use the system key (e.g. in production env on a mgmt node)
"""
organization = "vscalers"
author = "tim@incubaid.com"
license = "bsd"
version = "1.0"
category = "ssh.keys.upload"
period = 1 # always in sec
startatboot = True
order = 1
enable = True
async = True
log = False
def action(node):
keys = []
cuapi=node.cuapi
tags=j.core.tags.getObject(node.args.extra)
basepath=j.dirs.replaceTxtDirVars(j.application.config.get("admin.basepath"))
d = j.system.fs.joinPaths(basepath, 'identities')
if not j.system.fs.exists(path=d):
raise RuntimeError("cannot find basepath:%s"%d)
if str(tags)<>"":
#only use system key
username=str(tags)
u = j.system.fs.joinPaths(basepath, 'identities',username)
filename = j.system.fs.joinPaths(u, 'id.hrd')
hrd=j.core.hrd.getHRD(filename)
pkey=hrd.get("id.key.dsa.pub")
keys.append(pkey)
print "Found", len(keys), "public system ssh keys" |
if str(tags)=="system":
for name in ["id_dsa","id_dsa.pub"]:
| u = j.system.fs.joinPaths(basepath, 'identities',username,name)
j.system.fs.copyFile(u,"/root/.ssh/%s"%name)
j.system.fs.chmod("/root/.ssh/%s"%name,384)
else:
# Fetch keys from repo
for filename in j.system.fs.listFilesInDir(d, recursive=True, filter='*id.hrd'):
hrd=j.core.hrd.getHRD(filename)
pkey=hrd.get("id.key.dsa.pub")
keys.append(pkey)
print "Found", len(keys), "public ssh keys"
#Remove current keys ##DEFAULT SHOULD NOT DO THIS
# cuapi.run("rm -f /root/.ssh/authorized_keys")
#Put new keys
for key in keys:
print key
print cuapi.ssh_authorize('root', key)
print "key added"
|
manuBocquet/ansible-report | callbackplugin/ansible-report.py | Python | gpl-3.0 | 5,751 | 0.009216 | # (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: debug
type: stdout
short_description: formated stdout/stderr display
description:
- Use this callback to sort though extensive debug output
version_added: "2.4"
extends_documentation_fragment:
- default_callback
requirements:
- set as stdout in configuration
'''
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
from ansible import constants as C
import json
import os
import time
import sqlite3
class CallbackModule(CallbackModule_default): # pylint: disable=too-few-public-methods,no-init
'''
Override for the default callback module.
Render std err/out outside of the rest of the result which it prints with
indentation.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'debug'
db = None
cdb = None
def __init__(self):
self.results = []
return CallbackModule_default.__init__(self)
################################################## | ################
# Add module filter to send data to an sqlite database
def init_sqlite(self, file):
#self._display.display("init database [%s]" % file)
self.db = sqlite3.connect(file)
self.cdb = self.db.cursor()
self.cdb.execute('''DROP TABLE IF EXISTS ansible''')
self.cdb.execute('''CREATE TABLE ansible( host TEXT, data TEXT, value TEXT, epoch REAL, task TEXT, unique (host,data,task))''')
def db_update_value(self, hostname, | data, value, epoch, task):
self.cdb.execute("INSERT or REPLACE INTO ansible (host,data,value,epoch,task) VALUES (\"%s\",\"%s\",\"%s\",\"%s\",\"%s\")" % (hostname,data,value,epoch,task))
def module_filter(self, result):
#self._display.display("Module filter, action: {}".format(result._task.action))
if result._task.action == "set_fact":
#self._display.display( json.dumps({"host": result._result}, indent=4) )
#self._display.display( "task {}".format(result._task.name ))
#if result._task.args:
# self._display.display(json.dumps(result._task.args))
if 'sqlite' in result._task.args:
if not self.db:
dbfile = result._task.args['sqlite']
self.init_sqlite(dbfile)
dargs = result._task.args['data']
for data in dargs:
value = str(dargs[data])
self.db_update_value(result._host,data,value,result._result['epoch'],result._task.name)
#########################
# End module filter
def _new_play(self, play):
return {
'play': {
'name': play.name,
'id': str(play._uuid)
},
'tasks': []
}
def _new_task(self, task):
return {
'task': {
'name': task.name,
'id': str(task._uuid)
},
'hosts': {}
}
def v2_playbook_on_play_start(self, play):
self.results.append(self._new_play(play))
return CallbackModule_default.v2_playbook_on_play_start(self, play)
def v2_playbook_on_task_start(self, task, is_conditional):
self.results[-1]['tasks'].append(self._new_task(task))
return CallbackModule_default.v2_playbook_on_task_start(self, task, is_conditional)
def v2_runner_on_ok(self, result):
host = result._host
self.results[-1]['tasks'][-1]['hosts'][host.name] = result._result
self.results[-1]['tasks'][-1]['hosts'][host.name]['epoch'] = time.time()
self.module_filter(result)
return CallbackModule_default.v2_runner_on_ok(self,result)
def v2_runner_on_failed(self, result, ignore_errors=False):
host = result._host
self.results[-1]['tasks'][-1]['hosts'][host.name] = result._result
self.results[-1]['tasks'][-1]['hosts'][host.name]['epoch'] = time.time()
return CallbackModule_default.v2_runner_on_failed(self,result,ignore_errors)
def v2_runner_on_unreachable(self, result):
host = result._host
self.results[-1]['tasks'][-1]['hosts'][host.name] = result._result
self.results[-1]['tasks'][-1]['hosts'][host.name]['epoch'] = time.time()
return CallbackModule_default.v2_runner_on_unreachable(self,result)
def v2_runner_on_skipped(self, result):
host = result._host
self.results[-1]['tasks'][-1]['hosts'][host.name] = result._result
self.results[-1]['tasks'][-1]['hosts'][host.name]['epoch'] = time.time()
return CallbackModule_default.v2_runner_on_skipped(self,result)
def v2_playbook_on_stats(self, stats):
"""Display info about playbook statistics"""
hosts = sorted(stats.processed.keys())
filename = os.getenv('ANSIBLE_REPORT_FILE',"report.json")
fileout = open(filename,"w")
summary = {}
for h in hosts:
s = stats.summarize(h)
summary[h] = s
output = {
'plays': self.results,
'stats': summary
}
fileout.write(json.dumps(output, indent=4, sort_keys=True))
#self._display.display(json.dumps(output, indent=4, sort_keys=True),stderr=True)
#self._display.display(json.dumps(self.results, indent=4, sort_keys=True),stderr=True)
if self.db:
self.db.commit()
self.db.close()
return CallbackModule_default.v2_playbook_on_stats(self, stats)
|
cloudtools/troposphere | troposphere/datapipeline.py | Python | bsd-2-clause | 2,721 | 0.002573 | # Copyright (c) 2012-2022, Mark Peek <mark@peek.org>
# All right | s reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
from . import AWSObject, AWSProperty, PropsDictType
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
"""
`ParameterObjectAttribute <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects-attributes.html>`__
"""
props: PropsDictType = {
"Key": (str, True),
"S | tringValue": (str, True),
}
class ParameterObject(AWSProperty):
"""
`ParameterObject <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects.html>`__
"""
props: PropsDictType = {
"Attributes": ([ParameterObjectAttribute], True),
"Id": (str, True),
}
class ParameterValue(AWSProperty):
"""
`ParameterValue <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parametervalues.html>`__
"""
props: PropsDictType = {
"Id": (str, True),
"StringValue": (str, True),
}
class ObjectField(AWSProperty):
"""
`ObjectField <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects-fields.html>`__
"""
props: PropsDictType = {
"Key": (str, True),
"RefValue": (str, False),
"StringValue": (str, False),
}
class PipelineObject(AWSProperty):
"""
`PipelineObject <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects.html>`__
"""
props: PropsDictType = {
"Fields": ([ObjectField], True),
"Id": (str, True),
"Name": (str, True),
}
class PipelineTag(AWSProperty):
"""
`PipelineTag <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelinetags.html>`__
"""
props: PropsDictType = {
"Key": (str, True),
"Value": (str, True),
}
class Pipeline(AWSObject):
"""
`Pipeline <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html>`__
"""
resource_type = "AWS::DataPipeline::Pipeline"
props: PropsDictType = {
"Activate": (boolean, False),
"Description": (str, False),
"Name": (str, True),
"ParameterObjects": ([ParameterObject], True),
"ParameterValues": ([ParameterValue], False),
"PipelineObjects": ([PipelineObject], False),
"PipelineTags": ([PipelineTag], False),
}
|
Mirio/steamstoreprice | steamstoreprice/steamstoreprice.py | Python | bsd-2-clause | 2,007 | 0.002495 | from steamstoreprice.exception import UrlNotSteam, PageNotFound, RequestGenericError
from bs4 import BeautifulSoup
import requests
class SteamStorePrice:
def normalizeurl(self, url):
"""
clean the url from referal and other stuff
:param url(string): amazon url
:return: string(url cleaned)
"""
if "://store.steampowered.com/app" in url:
return url
else:
raise UrlNotSteam("Please check | the url, it doesn't contain store.steampowered.com/app*")
def normalizeprice(self, price):
"""
remove the currenty from price
: | param price(string): price tag find on amazon store
:return: float(price cleaned)
"""
listreplace = ["€", "$", "£", "\t", "\r\n"]
for replacestring in listreplace:
price = price.replace(replacestring, "")
return float(price.replace(",", "."))
def getpage(self, url):
"""
Get the page and raise if status_code is not equal to 200
:param url(string): normalized(url)
:return: bs4(html)
"""
url = self.normalizeurl(url)
req = requests.get(url)
if req.status_code == 200:
return BeautifulSoup(req.text, "html.parser")
elif req.status_code == 404:
raise PageNotFound("Page not found, please check url")
else:
raise RequestGenericError("Return Code: %s, please check url" % req.status_code)
def getprice(self, url):
"""
Find the price on AmazonStore starting from URL
:param url(string): url
:return: float(price cleaned)
"""
body_content = self.getpage(self.normalizeurl(url))
try:
return self.normalizeprice(body_content.find("div", {"class": "game_purchase_price"}).contents[0])
except AttributeError:
return self.normalizeprice(body_content.find("div", {"class": "discount_final_price"}).contents[0])
|
Ghost-script/fedmsg_meta_fedora_infrastructure | fedmsg_meta_fedora_infrastructure/buildsys.py | Python | lgpl-2.1 | 16,007 | 0.000187 | # This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# | License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Fre | e Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
from fedmsg_meta_fedora_infrastructure import BaseProcessor
from fedmsg_meta_fedora_infrastructure.fasshim import avatar_url
import fedmsg.meta.base
import datetime
from pytz import UTC
try:
import koji
except ImportError:
koji = None
_build_template = """Package: {name}-{version}-{release}
Status: {status}
Built by: {owner_name}
ID: {id}
Started: {started}
Finished: {finished}
"""
_task_header_template = """Task {id} on {build_host}
Task Type: {method} ({arch})
Link: {url}
"""
import logging
log = logging.getLogger('fedmsg.meta.buildsys')
class KojiProcessor(BaseProcessor):
__name__ = "buildsys"
__description__ = "the Fedora build system"
__link__ = "http://koji.fedoraproject.org/koji"
__docs__ = "https://fedoraproject.org/wiki/Using_the_Koji_build_system"
__obj__ = "Koji Builds"
__icon__ = ("https://fedoraproject.org/w/uploads/2/20/"
"Artwork_DesignService_koji-icon-48.png")
@classmethod
def _fill_task_template(cls, sess, taskid):
file_base = 'https://kojipkgs.fedoraproject.org/work/'
info = sess.getTaskInfo(taskid)
if info['host_id'] is None:
info['build_host'] = '(unscheduled)'
else:
host = sess.getHost(info['host_id'])
info['build_host'] = host['name']
weburl = sess.baseurl.rsplit('/', 1)[0] + '/koji/'
info['url'] = weburl + 'taskinfo?taskID=%i' % info['id']
retval = _task_header_template.format(**info)
result = None
try:
result = sess.getTaskResult(taskid)
except Exception as e:
log.warning(unicode(e))
retval += "\n" + unicode(e) + "\n"
if result:
for kind in ['logs', 'rpms', 'srpms']:
if kind in result:
retval += kind + ":\n"
for item in result[kind]:
retval += " " + file_base + item + "\n"
for kind in ['srpm']:
if kind in result:
retval += kind + ":\n " + file_base + result[kind] + "\n"
children = sess.getTaskChildren(taskid)
for child in sorted(children, key=lambda d: d['completion_ts']):
retval += "\n" + cls._fill_task_template(sess, child['id'])
return retval
@classmethod
def _fill_build_template(cls, sess, build):
full_build = sess.getBuild(build['build_id'])
lookup = dict(zip(*zip(*koji.BUILD_STATES.items())[::-1]))
full_build['status'] = lookup[full_build['state']].lower()
fmt = '%a, %d %b %Y %H:%M:%S %Z'
try:
dt = datetime.datetime.fromtimestamp(
full_build['creation_ts'], UTC).strftime(fmt)
except TypeError:
dt = ''
full_build['started'] = dt
try:
dt = datetime.datetime.fromtimestamp(
full_build['completion_ts'], UTC).strftime(fmt)
except TypeError:
dt = ''
full_build['finished'] = dt
try:
_build_str = _build_template.format(**full_build)
except Exception as e:
log.warning(unicode(e))
_build_str = unicode(e) + "\n"
task_id = full_build['task_id']
if task_id is None:
_task_str = "Build imported into koji\n"
else:
try:
_task_str = "Closed tasks:\n-------------\n"
_task_str += cls._fill_task_template(sess, task_id)
except Exception as e:
log.warning(unicode(e))
_task_str = unicode(e) + "\n"
return _build_str + _task_str
def long_form(self, msg, **config):
instance = msg['msg'].get('instance', 'primary')
if instance == 'primary':
url = "https://koji.fedoraproject.org/kojihub"
elif instance == 'ppc':
url = "http://ppc.koji.fedoraproject.org/kojihub"
elif instance == 's390':
url = "http://s390.koji.fedoraproject.org/kojihub"
elif instance == 'arm':
url = "http://arm.koji.fedoraproject.org/kojihub"
if 'buildsys.build.state.change' in msg['topic'] and koji:
session = koji.ClientSession(url)
build = msg['msg']
long_form = self._fill_build_template(session, build)
return long_form
if 'buildsys.task.state.change' in msg['topic'] and koji:
session = koji.ClientSession(url)
taskid = msg['msg']['id']
try:
long_form = self._fill_task_template(session, taskid)
except Exception as e:
log.warning(unicode(e))
long_form = unicode(e)
return long_form
def subtitle(self, msg, **config):
inst = msg['msg'].get('instance', 'primary')
if inst == 'primary':
inst = ''
else:
inst = ' (%s)' % inst
if 'buildsys.tag' in msg['topic']:
tmpl = self._(
"{owner}'s {name}-{version}-{release} tagged "
"into {tag} by {user}{inst}"
)
return tmpl.format(inst=inst, **msg['msg'])
elif 'buildsys.untag' in msg['topic']:
tmpl = self._(
"{owner}'s {name}-{version}-{release} untagged "
"from {tag} by {user}{inst}"
)
return tmpl.format(inst=inst, **msg['msg'])
elif 'buildsys.repo.init' in msg['topic']:
tmpl = self._('Repo initialized: {tag}{inst}')
return tmpl.format(inst=inst, tag=msg['msg'].get('tag', 'unknown'))
elif 'buildsys.repo.done' in msg['topic']:
tmpl = self._('Repo done: {tag}{inst}')
return tmpl.format(inst=inst, tag=msg['msg'].get('tag', 'unknown'))
elif 'buildsys.package.list.change' in msg['topic']:
tmpl = self._(
"Package list change for {package}: '{tag}'{inst}")
return tmpl.format(inst=inst, **msg['msg'])
elif 'buildsys.rpm.sign' in msg['topic']:
tmpl = self._('Koji build '
'{name}-{version}-{release}.{arch}.rpm '
'signed with sigkey \'{sigkey}\'')
return tmpl.format(**msg['msg']['info'])
elif 'buildsys.build.state.change' in msg['topic']:
templates = [
self._("{owner}'s {name}-{version}-{release} "
"started building{inst}"),
self._("{owner}'s {name}-{version}-{release} "
"completed{inst}"),
self._("{owner}'s {name}-{version}-{release} "
"was deleted{inst}"),
self._("{owner}'s {name}-{version}-{release} "
"failed to build{inst}"),
self._("{owner}'s {name}-{version}-{release} "
"was cancelled{inst}"),
]
tmpl = templates[msg['msg']['new']]
# If there was no owner of the build, chop off the prefix.
if not msg['msg']['owner']:
tmpl = tmpl[len("{owner}'s "):]
return tmpl.format(inst=inst, **msg['msg'])
elif 'buildsys.task.state.change' in msg['topic']:
templates = {
'OPEN': self |
greenape/risky-aging-model | disclosuregame/Util/__init__.py | Python | mpl-2.0 | 3,317 | 0.001809 | __all__ = ["sqlite_dump", "sqlite_merge"]
from random import Random
import math
def random_expectations(depth=0, breadth=3, low=1, | high=10, random=Random()):
"""
Generate depth x breadth array of random numbers where each row sums to
high, with a minimum of low.
"""
result = []
if depth == 0:
initial = high + 1
for i in range(breadth - 1):
n = random.randint(low, initial - (low * (breadth - i)))
initial -= n
result.append(n)
result.append(initial - low)
random.s | huffle(result)
else:
result = [random_expectations(depth - 1, breadth, low, high, random) for x in range(breadth)]
return result
def rescale(new_low, new_high, low, diff, x):
scaled = (new_high-new_low)*(x - low)
scaled /= diff
return scaled + new_low
def weighted_random_choice(choices, weights, random=Random()):
population = [val for val, cnt in zip(choices, weights) for i in range(int(cnt))]
return random.choice(population)
def multinomial(probabilities, draws=1, random=Random()):
"""
Draw from a multinomial distribution
"""
def pick():
draw = random.random()
bracket = 0.
for i in range(len(probabilities)):
bracket += probabilities[i]
if draw < bracket:
return i
return i
result = [0] * len(probabilities)
for i in range(draws):
result[pick()] += 1
return result
def logistic_random(loc, scale, random=Random()):
"""
Return a random number from a specified logistic distribution.
"""
x = random.random()
return loc + scale * math.log(x / (1 - x))
def shuffled(target, random=Random()):
"""
Return a shuffled version of the argument
"""
a = target[:]
random.shuffle(a)
return a
def make_pbs_script(kwargs, hours=60, mins=0, ppn=16, script_name=None):
"""
Generate a PBS run script to be submitted.
"""
from disclosuregame.Util.sqlite_merge import list_matching
from os.path import split
args_dir, name = split(kwargs.kwargs[0])
kwargs_files = list_matching(args_dir, name)
count = len(kwargs_files)
import sys
args = sys.argv[1:]
args = " ".join(args)
args = args.replace("*", "${PBS_ARRAYID}")
args = args.replace(" %s " % kwargs.file_name, " ${PBS_ARRAYID}_%s " % kwargs.file_name)
if kwargs.file_name == "":
args += " -f ${PBS_ARRAYID}"
interpreter = sys.executable
run_script = ["#!/bin/bash -vx", "#PBS -l walltime=%d:%d:00" % (hours, mins), "#PBS -l nodes=1:ppn=%d" % ppn,
"module load python"]
# Doesn't work on multiple nodes, sadly
# Set up the call
run_call = "%s -m disclosuregame.run %s" % (interpreter, args)
run_script.append(run_call)
# Cleanup after all jobs have run
if script_name is not None:
run_script.append("if [$PBS_ARRAYID -eq %d]" % count)
run_script.append("then")
run_script.append("\trm %s" % script_name)
run_script.append("fi")
return '\n'.join(run_script), count
# ${python} Run.py -R 100 -s ${sig} -r ${resp} --pickled-arguments ../experiment_args/sensitivity_${PBS_ARRAYID}.args -f ${PBS_ARRAYID}_sensitivity -i 1000 -d ${dir} -g ${game}
|
andersonsilvade/python_C | Python32/web2py/scripts/bench.py | Python | mit | 295 | 0.016949 | import time
import | sys
import urllib2
import urllib2
n = int(sys.argv[1])
url = sys.argv[2]
headers = {"Accept-Language" : "en" }
req = urllib2.Request(url, None, headers)
t0 = time.time()
for k in xrange(n):
data = urllib2.urlopen(req).read()
print (t | ime.time()-t0)/n
if n==1: print data
|
Debian/openjfx | modules/web/src/main/native/Tools/Scripts/webkitpy/benchmark_runner/benchmark_results_unittest.py | Python | gpl-2.0 | 16,571 | 0.006216 | # Copyright (C) 2015 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEV | ER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import u | nittest
from benchmark_results import BenchmarkResults
class BenchmarkResultsTest(unittest.TestCase):
def test_init(self):
results = BenchmarkResults({'SomeTest': {'metrics': {'Time': {'current': [1, 2, 3]}}}})
self.assertEqual(results._results, {'SomeTest': {'metrics': {'Time': {None: {'current': [1, 2, 3]}}}, 'tests': {}}})
with self.assertRaisesRegexp(TypeError, r'"Time" metric of "SomeTest" contains non-numeric value: \[1, 2, "a"\]'):
BenchmarkResults({'SomeTest': {'metrics': {'Time': {'current': [1, 2, 'a']}}}})
def test_format(self):
result = BenchmarkResults({'SomeTest': {'metrics': {'Time': {'current': [1, 2, 3]}}}})
self.assertEqual(result.format(), 'SomeTest:Time: 2.0ms stdev=50.0%\n')
result = BenchmarkResults({'SomeTest': {'metrics': {'Time': {'current': [1, 2, 3]}, 'Score': {'current': [2, 3, 4]}}}})
self.assertEqual(result.format(), '''
SomeTest:Score: 3.0pt stdev=33.3%
:Time: 2.0ms stdev=50.0%
'''[1:])
result = BenchmarkResults({'SomeTest': {
'metrics': {'Time': ['Total', 'Arithmetic']},
'tests': {
'SubTest1': {'metrics': {'Time': {'current': [1, 2, 3]}}},
'SubTest2': {'metrics': {'Time': {'current': [4, 5, 6]}}}}}})
self.assertEqual(result.format(), '''
SomeTest:Time:Arithmetic: 3.0ms stdev=33.3%
:Time:Total: 7.0ms stdev=28.6%
SubTest1:Time: 2.0ms stdev=50.0%
SubTest2:Time: 5.0ms stdev=20.0%
'''[1:])
def test_format_values_with_large_error(self):
self.assertEqual(BenchmarkResults._format_values('Runs', [1, 2, 3]), '2.0/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [10, 20, 30]), '20/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [100, 200, 300]), '200/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [1000, 2000, 3000]), '2.0K/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [10000, 20000, 30000]), '20K/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [100000, 200000, 300000]), '200K/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [1000000, 2000000, 3000000]), '2.0M/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.1, 0.2, 0.3]), '200m/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.01, 0.02, 0.03]), '20m/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.001, 0.002, 0.003]), '2.0m/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.0001, 0.0002, 0.0003]), '200u/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.00001, 0.00002, 0.00003]), '20u/s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.000001, 0.000002, 0.000003]), '2.0u/s stdev=50.0%')
def test_format_values_with_small_error(self):
self.assertEqual(BenchmarkResults._format_values('Runs', [1.1, 1.2, 1.3]), '1.20/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [11, 12, 13]), '12.0/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [110, 120, 130]), '120/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [1100, 1200, 1300]), '1.20K/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [11000, 12000, 13000]), '12.0K/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [110000, 120000, 130000]), '120K/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [1100000, 1200000, 1300000]), '1.20M/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.11, 0.12, 0.13]), '120m/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.011, 0.012, 0.013]), '12.0m/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.0011, 0.0012, 0.0013]), '1.20m/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.00011, 0.00012, 0.00013]), '120u/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.000011, 0.000012, 0.000013]), '12.0u/s stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Runs', [0.0000011, 0.0000012, 0.0000013]), '1.20u/s stdev=8.3%')
def test_format_values_with_time(self):
self.assertEqual(BenchmarkResults._format_values('Time', [1, 2, 3]), '2.0ms stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Time', [10, 20, 30]), '20ms stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Time', [100, 200, 300]), '200ms stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Time', [1000, 2000, 3000]), '2.0s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Time', [10000, 20000, 30000]), '20s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Time', [100000, 200000, 300000]), '200s stdev=50.0%')
self.assertEqual(BenchmarkResults._format_values('Time', [0.11, 0.12, 0.13]), '120us stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Time', [0.011, 0.012, 0.013]), '12.0us stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Time', [0.0011, 0.0012, 0.0013]), '1.20us stdev=8.3%')
self.assertEqual(BenchmarkResults._format_values('Time', [0.00011, 0.00012, 0.00013]), '120ns stdev=8.3%')
def test_format_values_with_no_error(self):
self.assertEqual(BenchmarkResults._format_values('Time', [1, 1, 1]), '1.00ms stdev=0.0%')
def test_format_values_with_small_difference(self):
self.assertEqual(BenchmarkResults._format_values('Time', [5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4]),
'4.05ms stdev=5.5%')
def test_aggregate_results(self):
self.maxDiff = None
self.assertEqual(BenchmarkResults._aggregate_results(
{'SomeTest': {'metrics': {'Time': {'current': [1, 2, 3]}}}}),
{'SomeTest': {'metrics': {'Time': {None: {'current': [1, 2, 3]}}}, 'tests': {}}})
self.assertEqual(BenchmarkResults._aggregate_results(
{'SomeTest': {
'metrics': {'Time': ['Total']},
'tests': {
'SubTest1': {'metrics': {'Time': {'current': [1, 2, 3]}}},
'SubTest2': {'metrics': {'Time': {'current': [4, 5, 6]}}}}}}),
{'SomeTest': {
'metrics': {'Time': {'Total': {'current': [5, 7, 9]}}},
'tests': {
'SubTest1': {'metrics': {'Time': {None: {'current': [1, 2, 3]}}}, 'tests': {}},
|
MTG/gaia | src/bindings/pygaia/scripts/dataset_to_csv.py | Python | agpl-3.0 | 1,937 | 0.002065 | #!/usr/bin/env python
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Gaia
#
# Gaia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the G | NU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from __future__ import print_function
import sys
import g | aia2
def dataset_to_csv(filename, csv_filename):
ds = gaia2.DataSet()
ds.load(filename)
out = open(csv_filename, 'w')
valueNames = ds.layout().descriptorNames(gaia2.RealType)
labelNames = ds.layout().descriptorNames(gaia2.StringType)
out.write('Track name;')
for name in labelNames:
out.write('%s;' % name)
for name in valueNames:
out.write('%s;' % name)
out.write('\n')
for cname in ds.collectionNames():
for pname in ds.collection(cname).pointNames():
p = ds.collection(cname).point(pname)
out.write('%s;' % pname)
for name in labelNames:
out.write('%s;' % p.label(name))
for name in valueNames:
out.write('%s;' % str(p.value(name)))
out.write('\n')
if __name__ == '__main__':
if len(sys.argv) < 2:
print('USAGE: %s gaia_dataset.db output.csv' % sys.argv[0])
sys.exit(1)
ds_filename = sys.argv[1]
csv_filename = sys.argv[2]
dataset_to_csv(ds_filename, csv_filename)
|
axaxs/pyparted | src/parted/partition.py | Python | gpl-2.0 | 9,919 | 0.001512 | #
# Code modified from original to work with Python 3
# Alex Skinner
# alex@lx.lc
# 12/28/2012
#
# partition.py
# Python bindings for libparted (built on top of the _ped Python module).
#
# Copyright (C) 2009 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Chris Lumens <clumens@redhat.com>
# David Cantrell <dcantrell@redhat.com>
#
import math
import string
import warnings
import _ped
import parted
from .decorators import localeC
# XXX: add docstrings
class Partition():
@localeC
def __init__(self, disk=None, type=None, fs=None, geometry=None, PedPartition=None):
if PedPartition is None:
if disk is None:
raise parted.PartitionException("no disk specified")
elif type is None:
raise parted.PartitionException("no type specified")
elif geometry is None:
raise parted.PartitionException("no geometry specified")
self._fileSystem = fs
self._geometry = geometry
self._disk = disk
if fs is None:
self.__partition = _ped.Partition(disk.getPedDisk(), type, geometry.start, geometry.end)
else:
self.__partition = _ped.Partition(disk.getPedDisk(), type, geometry.start, geometry.end, parted.fileSystemType[fs.type])
else:
self.__partition = PedPartition
self._geometry = parted.Geometry(PedGeometry=self.__partition.geom)
if disk is None:
self._disk = parted.Disk(PedDisk=self.__partition.disk)
else:
self._disk = disk
if self.__partition.fs_type is None:
self._fileSystem = None
else:
self._fileSystem = parted.FileSystem(type=self.__partition.fs_type.name, geometry=self._geometry)
def __eq__(self, other):
return not self.__ne__(other)
def __ne__(self, other):
if hash(self) == hash(other):
return False
if type(self) != type(other):
return True
return self.path != other.path or self.type != other.type or self.geometry != other.geometry or self.fileSystem != other.fileSystem
def __str__(self):
try:
name = self.name
except parted.PartitionException:
name = None
s = ("parted.Partition instance --\n"
" disk: %(disk)r fileSystem: %(fileSystem)r\n"
" number: %(number)s path: %(path)s type: %(type)s\n"
" name: %(name)s active: %(active)s busy: %(busy)s\n"
" geometry: %(geometry)r PedPartition: %(ped)r" %
{"disk": self.disk, "fileSystem": self.fileSystem, "geometry": self.geometry,
"number": self.number, "path": self.path,
"type": self.type, "name": name, "active": self.active,
"busy": self.busy, "ped": self.__partition})
return s
def __writeOnly(self, property):
raise parted.WriteOnlyProperty(property)
@property
@localeC
def active(self):
"""True if the partition is active, False otherwise."""
return bool(self.__partition.is_active())
@property
@localeC
def busy(self):
"""True if the partition is active, False otherwise."""
return bool(self.__partition.is_busy())
@property
def disk(self):
"""The Disk this partition belongs to."""
return self._disk
@property
@localeC
def path(self):
"""The filesystem path to this partition's device node."""
return self.__partition.get_path()
@property
@localeC
def name(self):
"""The name of this partition."""
try:
return self.__partition.get_name()
except parted.PartitionException as msg:
return None
@property
def number(self):
"""The partition number."""
return self.__partition.num
fileSystem = property(lambda s: s._fileSystem, lambda s, v: setattr(s, "_fileSystem", v))
geometr | y = property(lambda s: s._geometry, lambda s, v: setattr(s, "_geometry", v))
system = property(lambda s: s.__writeOnly("system"), lambda s, v: s.__partition.set_system(v))
type = property(lambda s: s.__partition.type, lambd | a s, v: setattr(s.__partition, "type", v))
@localeC
def getFlag(self, flag):
"""Get the value of a particular flag on the partition. Valid flags
are the _ped.PARTITION_* constants. See _ped.flag_get_name() and
_ped.flag_get_by_name() for more help working with partition flags.
"""
return self.__partition.get_flag(flag)
@localeC
def setFlag(self, flag):
"""Set the flag on a partition to the provided value. On error, a
PartitionException will be raised. See getFlag() for more help on
working with partition flags."""
return self.__partition.set_flag(flag, 1)
@localeC
def unsetFlag(self, flag):
"""Unset the flag on this Partition. On error, a PartitionException
will be raised. See getFlag() for more help on working with
partition flags."""
return self.__partition.set_flag(flag, 0)
@localeC
def getMaxGeometry(self, constraint):
"""Given a constraint, return the maximum Geometry that self can be
grown to. Raises Partitionexception on error."""
return parted.Geometry(PedGeometry=self.disk.getPedDisk().get_max_partition_geometry(self.__partition, constraint))
@localeC
def isFlagAvailable(self, flag):
"""Return True if flag is available on this Partition, False
otherwise."""
return self.__partition.is_flag_available(flag)
@localeC
def nextPartition(self):
"""Return the Partition following this one on the Disk."""
partition = self.disk.getPedDisk().next_partition(self.__partition)
if partition is None:
return None
else:
return parted.Partition(disk=self.disk, PedPartition=partition)
@localeC
def getSize(self, unit="MB"):
"""Return the size of the partition in the unit specified. The unit
is given as a string corresponding to one of the following
abbreviations: b (bytes), KB (kilobytes), MB (megabytes), GB
(gigabytes), TB (terabytes). An invalid unit string will raise a
SyntaxError exception. The default unit is MB."""
warnings.warn("use the getLength method", DeprecationWarning)
return self.geometry.getSize(unit)
@localeC
def getLength(self, unit='sectors'):
"""Return the length of the partition in sectors. Optionally, a SI or
IEC prefix followed by a 'B' may be given in order to convert the
length into bytes. The allowed values include B, kB, MB, GB, TB, KiB,
MiB, GiB, and TiB."""
return self.geometry.getLength(unit)
def getFlagsAsString(self):
"""Return a comma-separated string representing the flags
on this partition."""
flags = []
for flag in partitionFlag.keys():
if self.getFlag(flag):
flags.append(partitionFlag[flag])
return s |
degoldschmidt/pytrack-analysis | examples/run_post_tracking.py | Python | gpl-3.0 | 5,316 | 0.005455 | import os
import numpy as np
import pandas as pd
from pytrack_analysis import Multibench
from pytrack_analysis.dataio import VideoRawData
from pytrack_analysis.profile import get_profile, get_scriptname, show_profile
from pytrack_analysis.posttracking import frameskips, get_displacements, mistracks, get_head_tail, get_corrected_flips
from pytrack_analysis.viz import plot_along, plot_fly, plot_interval, plot_overlay, plot_ts
def main():
experiment = 'DIFF'
user = 'degoldschmidt'
profile = get_profile(experiment, user)
basedir = profile.set_folder('/Users/degoldschmidt/Desktop/tracking_test_data')
### Define raw data structure
colnames = ['datetime', 'elapsed_time', 'frame_dt', 'body_x', 'body_y', 'angle', 'major', 'minor']
#colunits = ['Datetime', 's', 's', 'px', 'px', 'rad', 'px', 'px']
raw_data = VideoRawData(experiment, basedir)
### go through all session
for i_session, video in enumerate(raw_data.videos):
###
### arena + food spots
video.load_arena()
### trajectory data
video.load_data()
video.data.reindex(colnames)
#video.data.center_to_arena(video.arenas)
### fly/experiment metadata
#for fly_idx, fly_data in enumerate(raw_data.get_data()):
###
video.unload_data()
del profile
"""
mistrk_list = []
### for each arena
for i_arena, each_df in enumerate(raw_data.get_data()):
### compute head and tail positions
each_df['head_x'], each_df['head_y'], each_df['tail_x'], each_df['tail_y'] = get_head_tail(each_df, x='body_x', y='body_y', angle='angle', major='major')
### compute frame-to-frame displacements
arena = raw_data.arenas[i_arena]
each_df['displacement'], each_df['dx'], each_df['dy'], each_df['mov_angle'], each_df['align'], each_df['acc'] = get_displacements(each_df, x='body_x', y='body_y', angle='angle')
### detect mistracked frames
each_df, mistr = mistracks(each_df, i_arena, dr='displacement', major='major', thresholds=(4*8.543, 5*8.543))
mistrk_list.append(len(mistr))
file_id = 4 * (i_session) + i_arena
_file = os.path.join(folders['processed'],'pixeldiff','{}_{:03d}.csv'.format(experiment, file_id))
### flips START-----
df = pd.read_csv(_file, index_col='frame')
each_df['headpx'], each_df['tailpx'] = df['headpx'], df['tailpx']
each_df = get_corrected_flips(each_df)
### scale trajectories to mm
#print(raw_data.get_data(0).head(3))
scale = 8.543
raw_data.set_scale('fix_scale', scale, unit='mm')
raw_data.flip_y()
print(mistrk_list)
#print(raw_data.get_data(0).head(3))
#plot_traj(raw_data, scale, time=(raw_data.first_frame, raw_data.last_frame), only='tail')
for i_arena, each_df in enumerate(raw_data.get_data()):
file_id = 4 * i_session + i_arena
_file = os.path.join(folders['processed'], 'post_tracking','{}_{:03d}.csv'.format(experiment, file_id))
out_df = each_df[['datetime', 'elapsed_time', 'frame_dt', 'body_x', 'body_y', 'head_x', 'head_y', 'tail_x', 'tail_y', 'angle', 'major', 'minor', 'flipped']]
out_df.to_csv(_file, index_label='frame')
meta_dict = {}
arena = raw_data.arenas[i_arena]
#### meta_dict save
import yaml
import io
with open(os.path.join(folders['manual'],'conditions.yaml'), 'r') as stream:
try:
conds = yaml.load(stream)
except yaml.YAMLError as exc:
print(exc)
meta_dict['arena'] = {'x': float(arena.x), 'y': float(arena.y), 'layout': conds['arena_layout'], 'name': arena.name, 'outer': float(arena.outer), 'radius': float(arena.r), 'scale': arena.pxmm}
meta_dict['condition'] = raw_data.condition[i_arena]
meta_dict['datafile'] = _file
meta_dict['datetime'] = raw_data.timestamp
meta_dict['flags'] = {'mistracked_frames': mistrk_list[i_arena]}
spots = arena.spots
meta_dict['food_spots'] = [{'x': float(each.rx), 'y': float(each.ry), 'r': 1.5, 'substr': each.substrate} for each in spots]
meta_dict['fly'] = {'genotype': conds['genotype'], 'mating': conds['mating'], 'metabolic': raw_data.condition[i_arena], 'n_per_arena': conds['num_flies'], 'sex': conds['sex']}
meta_dict['setup'] = {'light': conds['light'], 'humidity': conds['humidity'], 'name': conds['setup'], 'room': 'behavior room', 'temperature': '25C'}
meta_dict['video'] = {'dir': folders['videos'], 'file': raw_data.video_file, 'first_frame': int(raw_data.first_frame), 'last_frame': int(raw_data.last_frame), 'nframes': len(each_df.index), ' | start_time': | raw_data.starttime}
_yaml = _file[:-4]+'.yaml'
with io.open(_yaml, 'w', encoding='utf8') as f:
yaml.dump(meta_dict, f, default_flow_style=False, allow_unicode=True)
"""
if __name__ == '__main__':
# runs as benchmark test
test = Multibench("", SILENT=False, SLIM=True)
test(main)
del test
|
dpaschall/test_TensorFlow | bin/cifar10test/cifar10_train.py | Python | gpl-3.0 | 4,167 | 0.00528 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A binary to train CIFAR-10 using a single GPU.
Accuracy:
cifar10_train.py achieves ~86% accuracy after 100K steps (256 epochs of
data) as judged by cifar10_eval.py.
Speed: With batch_size 128.
System | Step Time (sec/batch) | Accuracy
------------------------------------------------------------------
1 Tesla K20m | 0.35-0.60 | ~86% at 60K steps (5 hours)
1 Tesla K40m | 0.25-0.35 | ~86% at 100K steps (4 hours)
Usage:
Please see the tutorial and website for how to download the CIFAR-10
data set, compile the program and train the model.
http://tensorflow.org/tutorials/deep_cnn/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import time
import tensorflow as tf
import cifar10
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('train_dir', '/tmp/cifar10_train',
"""Directory where to write event logs """
"""and checkpoint.""")
tf.app.flags.DEFINE_integer('max_steps', 100000, #reduced significantly -daniel
"""Number of batches to run.""")
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
def train():
"""Train CIFAR-10 for a number of steps."""
with tf.Graph().as_default():
global_step = tf.contrib | .framework.get_or_create_global_step()
# Get images and labels for CIFAR-10.
images, labels = cifar10.distorted_inputs()
# Build a Graph that computes the logits predictions from the
# inference model.
logits = cifar10.inference(images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters. |
train_op = cifar10.train(loss, global_step)
class _LoggerHook(tf.train.SessionRunHook):
"""Logs loss and runtime."""
def begin(self):
self._step = -1
def before_run(self, run_context):
self._step += 1
self._start_time = time.time()
return tf.train.SessionRunArgs(loss) # Asks for loss value.
def after_run(self, run_context, run_values):
duration = time.time() - self._start_time
loss_value = run_values.results
if self._step % 10 == 0:
num_examples_per_step = FLAGS.batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), self._step, loss_value,
examples_per_sec, sec_per_batch))
with tf.train.MonitoredTrainingSession(
checkpoint_dir=FLAGS.train_dir,
hooks=[tf.train.StopAtStepHook(last_step=FLAGS.max_steps),
tf.train.NanTensorHook(loss),
_LoggerHook()],
config=tf.ConfigProto(
log_device_placement=FLAGS.log_device_placement)) as mon_sess:
while not mon_sess.should_stop():
mon_sess.run(train_op)
def main(argv=None): # pylint: disable=unused-argument
cifar10.maybe_download_and_extract()
if tf.gfile.Exists(FLAGS.train_dir):
tf.gfile.DeleteRecursively(FLAGS.train_dir)
tf.gfile.MakeDirs(FLAGS.train_dir)
train()
if __name__ == '__main__':
tf.app.run() |
owtf/ptp | tests/tools/robots/robots_reports.py | Python | bsd-3-clause | 232 | 0 | report_info = """User-agent: *
Disallow: /se | arch
Disallow: /sdch
Disallow: /groups
Disallow: /images
Disallow: /admin
Disallow: /catalogs
Allow: /catalogs/about
Allow: /catalogs/p?"""
report_unknown = """User-agent: *
Allow: /""" | |
DEAP/deap | doc/code/benchmarks/kursawe.py | Python | lgpl-3.0 | 855 | 0.009357 | from mpl_toolkits.mplot3d import A | xes3D
from matplotlib import cm
import matplotlib.pyplot as plt
try:
import numpy as np
except:
exit()
from deap import benchmarks
X = np.arange(-5, 5, 0.1)
Y = np.arange(-5, 5, 0.1)
X, Y = np.meshgrid(X, Y)
Z1 = np.zeros(X.shape)
Z2 = np.zeros(X.shape)
for i in range(X.shape[0]):
for j in range(X.shape[1]):
Z1[i,j], Z2[i,j] = benchmarks.kursawe((X[i,j],Y[i,j]))
fig = plt.figure(figsize=(12,5))
ax = fig.add_subplot | (1, 2, 1, projection='3d')
ax.plot_surface(X, Y, Z1, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2)
plt.xlabel("x")
plt.ylabel("y")
ax = fig.add_subplot(1, 2, 2, projection='3d')
ax.plot_surface(X, Y, Z2, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2)
plt.xlabel("x")
plt.ylabel("y")
plt.subplots_adjust(left=0, right=1, bottom=0, top=1, wspace=0, hspace=0)
plt.show() |
klyap/pipe2py | pipe2py/modules/pipeurlinput.py | Python | gpl-2.0 | 822 | 0 | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
pipe2py.modules.pipeurlinput
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
http://pipes.yahoo.com/pipes/docs?doc=user_inputs#URL
"""
from pipe2py.lib import utils
def pipe_urlinput(context=None, _INPUT=None, conf=None, **kwargs):
"""An input that prompts the user for a url and yields it foreve | r.
Not loopable.
Parameters
----------
context : pipe2py.Conte | xt object
_INPUT : unused
conf : {
'name': {'value': 'parameter name'},
'prompt': {'value': 'User prompt'},
'default': {'value': 'default value'},
'debug': {'value': 'debug value'}
}
Yields
------
_OUTPUT : url
"""
value = utils.get_input(context, conf)
value = utils.url_quote(value)
while True:
yield value
|
asm-products/pants-party | textjokes/migrations/0003_auto_20150323_2213.py | Python | agpl-3.0 | 1,299 | 0.00154 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('textjokes', '0002_auto_20150323_1524'),
]
operations = [
migrations.CreateModel(
name='TextPunchline',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(null=True, blank=True)),
('text', models.CharField(max_length=255)),
('active', models.BooleanField(default=True)),
('responses', models.IntegerField(default=0)),
('score', models.IntegerField(default=1)),
('joke', models.ForeignKey(to='textjokes.TextJoke')),
('user', models.ForeignKey(related_name='punchlines', to=settings.AUTH_USER_MODEL)),
| ],
options={ |
'ordering': ['-id'],
},
bases=(models.Model,),
),
migrations.AlterModelOptions(
name='textjoke',
options={'ordering': ['-id']},
),
]
|
CINPLA/expipe-dev | expipe-plugin-cinpla/tests/test_intan.py | Python | gpl-3.0 | 2,175 | 0.005057 | import pytest
import expipe
import subprocess
import click
from click.testing import CliRunner
import quantities as pq
import os.path as op
from expipe_plugin_cinpla.intan import IntanPlugin
from expipe_plugin_cinpla.electrical_stimulation import ElectricalStimulationPlugin
fr | om expipe_plugin_cinpla.main import CinplaPlugin
expipe.ensure_testing()
@click.group()
@click.pass_context
def cli(ctx):
pass
IntanPlugin().attach_to_cli(cli)
ElectricalStimulationPlugin().attach_to_cli(cli)
CinplaPlugin().attach_to_cli(cli)
def run_command(command_list | , inp=None):
runner = CliRunner()
result = runner.invoke(cli, command_list, input=inp)
if result.exit_code != 0:
print(result.output)
raise result.exception
def test_intan():#module_teardown_setup_project_setup):
currdir = op.abspath(op.dirname(__file__))
intan_path = op.join(currdir, 'test_data', 'intan',
'test-rat_2017-06-23_11-15-46_1',
'test_170623_111545_stim.rhs')
action_id = 'test-rat-230617-01'
data_path = op.join(expipe.settings['data_path'],
pytest.USER_PAR.project_id,
action_id)
if op.exists(data_path):
import shutil
shutil.rmtree(data_path)
run_command(['register-intan', intan_path, '--no-move'], inp='y')
run_command(['process-intan', action_id])
run_command(['analyse', action_id, '--spike-stat', '--psd', '--tfr','--spike-stat'])
def test_intan_ephys():#module_teardown_setup_project_setup):
currdir = op.abspath(op.dirname(__file__))
intan_ephys_path = op.join(currdir, 'test_data', 'intan',
'test-rat_2017-06-23_11-15-46_1')
action_id = 'test-rat-230617-01'
data_path = op.join(expipe.settings['data_path'],
pytest.USER_PAR.project_id,
action_id)
if op.exists(data_path):
import shutil
shutil.rmtree(data_path)
run_command(['register-intan-ephys', intan_ephys_path, '--no-move'], inp='y')
run_command(['process-intan-ephys', action_id])
run_command(['analyse', action_id, '--all'])
|
ruchee/vimrc | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/t/typing/typing_consider_using_alias_without_future.py | Python | mit | 1,918 | 0.005214 | """Test pylint.extension.typing - consider-using-alias
'py-version' needs to be set to '3.7' or ' | 3.8' and 'runtime-typing=no'.
"""
# pylint: disable=missing-docstring,invalid-name,unused-argument,line-too-long,unsubscriptable-object
import collections
import collections.abc
import typing
from collections.abc import Awaitable
from dataclasses import dataclass
from typing import Dict, List, Set, Union, TypedDict
var1: typing.Dict[str, int] # [consider-using-alias]
var2: List[int] # [consider-using-alias]
var3: collections.abc.Iterable[int]
var4: typi | ng.OrderedDict[str, int] # [consider-using-alias]
var5: typing.Awaitable[None] # [consider-using-alias]
var6: typing.Iterable[int] # [consider-using-alias]
var7: typing.Hashable # [consider-using-alias]
var8: typing.ContextManager[str] # [consider-using-alias]
var9: typing.Pattern[str] # [consider-using-alias]
var10: typing.re.Match[str] # [consider-using-alias]
var11: list[int]
var12: collections.abc
var13: Awaitable[None]
var14: collections.defaultdict[str, str]
Alias1 = Set[int]
Alias2 = Dict[int, List[int]]
Alias3 = Union[int, typing.List[str]]
Alias4 = List # [consider-using-alias]
def func1(arg1: List[int], /, *args: List[int], arg2: set[int], **kwargs: Dict[str, int]) -> typing.Tuple[int]:
# -1:[consider-using-alias,consider-using-alias,consider-using-alias,consider-using-alias]
pass
def func2(arg1: list[int]) -> tuple[int, int]:
pass
class CustomIntList(typing.List[int]):
pass
cast_variable = [1, 2, 3]
cast_variable = typing.cast(List[int], cast_variable)
(lambda x: 2)(List[int])
class CustomNamedTuple(typing.NamedTuple):
my_var: List[int] # [consider-using-alias]
CustomTypedDict1 = TypedDict("CustomTypedDict1", my_var=List[int])
class CustomTypedDict2(TypedDict):
my_var: List[int] # [consider-using-alias]
@dataclass
class CustomDataClass:
my_var: List[int] # [consider-using-alias]
|
tranqui/PyTrajectories | saddle-nucleation.py | Python | gpl-3.0 | 1,177 | 0.008496 | #!/usr/bin/env python2.7
from configuration import *
from pylab import *
import copy
# Non-dimensional units where D=sigma=1.
rho = 25 # good model for hard sphere.
def morse_potential(r):
return (1 - exp(-rho*(r-1)))^2
def morse_force(r):
return -2*exp(-rho*(r-1))*(1 - exp(-rho*(r-1)))
# Vector force acting on 1 from 2.
def interatomic_force(r1, r2):
delta_r = r1 - r2
r = norm(delta_r)
retur | n morse_force(r)*(delta_r/r)
if __name__ == '__main__':
if len(sys.argv) < 2:
print "missing parameter: saddle-nucleation.py <in-file>"
else:
initial_config = Configuration(sys.argv[1])
N = initial_config.num_particles
forces = zeros((N, 3))
| for i in range(0, N):
print "Forces on " + str(i) + ":"
for j in range(i+1, N):
F = interatomic_force(initial_config.positions[i], initial_config.positions[j])
forces[i,:] += F
forces[j,:] -= F
copy_conf = copy.deepcopy(initial_config)
print initial_config.positions
copy_conf.positions[0,1] = 666
print copy_conf.positions
print initial_config.positions
|
tensorflow/graphics | tensorflow_graphics/nn/layer/__init__.py | Python | apache-2.0 | 984 | 0 | # Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS | IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for t | he specific language governing permissions and
# limitations under the License.
"""Layer module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_graphics.nn.layer import graph_convolution
from tensorflow_graphics.nn.layer import pointnet
from tensorflow_graphics.util import export_api as _export_api
# API contains submodules of tensorflow_graphics.geometry.
__all__ = _export_api.get_modules()
|
Stavitsky/python-neutronclient | neutronclient/tests/unit/test_utils.py | Python | apache-2.0 | 3,942 | 0 | # Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from neutronclient.common import exceptions
from neutronclient.common import utils
class TestUtils(testtools.TestCase):
def test_string_to_bool_true(self):
self.assertTrue(utils.str2bool('true'))
def test_string_to_bool_false(self):
self.assertFalse(utils.str2bool('false'))
def test_string_to_bool_None(self):
self.assertIsNone(utils.str2bool(None))
def test_string_to_dictionary(self):
input_str = 'key1=value1,key2=value2'
expected = {'key1': 'value1', 'key2': 'value2'}
self.assertEqual(expected, utils.str2dict(input_str))
def test_none_string_to_dictionary(self):
input_str = ''
expected = {}
self.assertEqual(expected, utils.str2dict(input_str))
input_str = None
expected = {}
self.assertEqual(expected, utils.str2dict(input_str))
def test_get_dict_item_properties(self):
item = {'name': 'test_name', 'id': 'test_id'}
fields = ('name', 'id')
actual = utils.get_item_properties(item=item, fields=fields)
self.assertEqual(('test_name', 'test_id'), actual)
def test_get_object_item_properties_mixed_case_fields(self):
class Fake(object):
def __init__(self):
self.id = 'test_id'
self.name = 'test_name'
self.test_user = 'test'
fields = ('name', 'id', 'test user')
mixed_fields = ('test user', 'ID')
item = Fake()
actual = utils.get_item_properties(item, fields, mixed_fields)
self.assertEqual(('test_name', 'test_id', 'test'), actual)
def test_get_object_item_desired_fields_differ_from_item(self):
class Fake(object):
def __init__(self):
self.id = 'test_id_1'
self.name = 'test_name'
self.test_user = 'test | '
fields = ('name', 'id', 'test user')
item = Fake()
actual = utils.get_item_properties(item, fields)
self.assertNotEqual(('test_name', 'test_id', 'test'), actual)
def test_get_object_item_desired_fields_is_empty(self):
class Fake(object):
def __init__(self):
| self.id = 'test_id_1'
self.name = 'test_name'
self.test_user = 'test'
fields = []
item = Fake()
actual = utils.get_item_properties(item, fields)
self.assertEqual((), actual)
def test_get_object_item_with_formatters(self):
class Fake(object):
def __init__(self):
self.id = 'test_id'
self.name = 'test_name'
self.test_user = 'test'
class FakeCallable(object):
def __call__(self, *args, **kwargs):
return 'pass'
fields = ('name', 'id', 'test user', 'is_public')
formatters = {'is_public': FakeCallable()}
item = Fake()
act = utils.get_item_properties(item, fields, formatters=formatters)
self.assertEqual(('test_name', 'test_id', 'test', 'pass'), act)
class ImportClassTestCase(testtools.TestCase):
def test_get_client_class_invalid_version(self):
self.assertRaises(
exceptions.UnsupportedVersion,
utils.get_client_class, 'image', '2', {'image': '2'})
|
Jetzal/Picture | picture.py | Python | mit | 1,761 | 0.014196 | """
picture.py
Author: Jett
Credit: None
Assignment:
Use the ggame library to "paint" a graphical picture of something (e.g. a house, a face or landscape).
Use at least:
1. Three different Color objects.
2. Ten different Sprite objects.
3. One (or more) RectangleAsset objects.
4. One (or more) CircleAsset objects.
5. One (or more) EllipseAsset objects.
6. One (or more) PolygonAsset objects.
See:
https://github.com/HHS-IntroProgramming/Standards-and-Syllabus/wiki/Displaying-Graphics
for general information on how to use ggame.
See:
http://brythonserver.github.io/ggame/
for detailed information on ggame.
"""
from ggame import App, Color, LineStyle, Sprite, RectangleAsset, CircleAsset, EllipseAsset, PolygonAsset
# add your code here \/ \/ \/
red = Color(0xff0000, 1.0)
green = Color(0x00ff00, 1.0)
blue = Color(0x0000ff, 1.0)
black = Color(0x000000, 1.0)
brown = Color(0x633C1F, 1.0)
white = Color(0xFFFFFF, 1.0)
yellow=Color(0xFFFF00, 1.0)
thinline = LineStyle(1, black)
House = RectangleAsset(700, 700, thinline, brown)
Swindow=CircleAsset(70, thinline, white)
window= RectangleAsset(100,100, thinline, white)
ellipse=EllipseAsset(30, 20, thinline, white)
polygon=PolygonAsset([(0,0), (700,0), (350,-300), (0,0)], thinline, black)
door= RectangleAsset(100,200, | thinline, black)
chimmney= RectangleAsset(70, 170, thinline, black)
Sun=CircleAsset(130, thinline, yellow)
Earth=RectangleAsset(10010, 300, thinline, green)
Sprite(House,(500,400))
Sprite(polygon,(500,400))
Sprite(Swindow, (850,400))
Sprite(window, (550, 550))
Sprite(window, | (1050, 550))
Sprite(door, (800, 680))
Sprite(ellipse,(850,705))
Sprite(chimmney, (600, 200))
Sprite(Sun, (80, 80))
Sprite(Earth,(-20, 830))
# add your code here /\ /\ /\
myapp = App()
myapp.run()
|
gustavofonseca/inbox | penne_core/contrib/sites/migrations/0002_set_site_domain_and_name.py | Python | bsd-2-clause | 1,132 | 0 | """
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from djang | o.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
| 'domain': 'example.com',
'name': 'penne_core'
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'example.com',
'name': 'example.com'
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
|
yufeldman/arrow | python/pyarrow/tests/test_types.py | Python | apache-2.0 | 7,112 | 0 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pickle
import pytest
import pyarrow as pa
import pyarrow.types as types
MANY_TYPES = [
pa.null(),
pa.bool_(),
pa.int32(),
pa.time32('s'),
pa.time64('us'),
pa.date32(),
pa.timestamp('us'),
pa.timestamp('us', tz='UTC'),
pa.timestamp('us', tz='Europe/Paris'),
pa.float16(),
pa.float32(),
pa.float64(),
pa.decimal128(19, 4),
pa.string(),
pa.binary(),
pa.binary(10),
pa.list_(pa.int32()),
pa.struct([pa.field('a', pa.int32()),
pa.field('b', pa.int8()),
pa.field('c', pa.string())]),
pa.union([pa.field('a', pa.binary(10)),
pa.field('b', pa.string())], mode=pa.lib.UnionMode_DENSE),
pa.union([pa.field('a', pa.binary(10)),
pa.field('b', pa.string())], mode=pa.lib.UnionMode_SPARSE),
# XXX Needs array pickling
# pa.dictionary(pa.int32(), pa.array(['a', 'b', 'c'])),
]
def test_is_boolean():
assert types.is_boolean(pa.bool_())
assert not types.is_boolean(pa.int8())
def test_is_integer():
signed_ints = [pa.int8(), pa.int16(), pa.int32(), pa.int64()]
unsigned_ints = [pa.uint8(), pa.uint16(), pa.uint32(), pa.uint64()]
for t in signed_ints + unsigned_ints:
assert types.is_integer(t)
for t in signed_ints:
assert types.is_signed_integer(t)
assert not types.is_unsigned_integer(t)
for t in unsigned_ints:
assert types.is_unsigned_integer(t)
assert not types.is_signed_integer(t)
assert not types.is_integer(pa.float32())
assert not types.is_signed_integer(pa.float32())
def test_is_floating():
for t in [pa.float16(), pa.float32(), pa.float64()]:
assert types.is_floating(t)
assert not types.is_floating(pa.int32())
def test_is_null():
assert types.is_null(pa.null())
assert not types.is_null(pa.list_(pa.int32()))
def test_is_decimal():
assert types.is_decimal(pa.decimal128(19, 4))
assert not types.is_decimal(pa.int32())
def test_is_list():
assert types.is_list(pa.list_(pa.int32()))
assert not types.is_list(pa.int32())
def test_is_dictionary():
assert types.is_dictionary(
pa.dictionary(pa.int32(),
pa.array(['a', 'b', 'c'])))
assert not types.is_dictionary(pa.int32())
def test_is_nested_or_struct():
struct_ex = pa.struct([pa.field('a', pa.int32()),
pa.field('b', pa.int8()),
pa.field('c', pa.string())])
assert types.is_struct(struct_ex)
assert not types.is_struct(pa.list_(pa.int32()))
assert types.is_nested(struct_ex)
assert types.is_nested(pa.list_(pa.int32()))
assert not types.is_nested(pa.int32())
def test_is_union():
for mode in [pa.lib.UnionMode_SPARSE, pa.lib.UnionMode_DENSE]:
assert types.is_union(pa.union([pa.field('a', pa.int32()),
pa.field('b', pa.int8()),
pa.field('c', pa.string())],
mode=mode))
assert not types.is_union(pa.list_(pa.int32()))
# TODO(wesm): is_map, once implemented
def test_is_binary_string():
assert types.is_binary(pa.binary())
assert not types.is_binary(pa.string())
assert types.is_string(pa.string())
assert types.is_unicode(pa.string())
assert not types.is_string(pa.binary())
assert types.is_fixed_size_binary(pa.binary(5))
assert not types.is_fixed_size_binary(pa.binary())
def test_is_temporal_date_time_timestamp():
date_types = [pa.date32(), pa.date64()]
time_types = [pa.time32('s'), pa.time64('ns')]
timestamp_types = [pa.timestamp('ms')]
for case in date_types + time_types + timestamp_types:
assert types.is_temporal(case)
for case in date_types:
assert types.is_date(case)
assert not types.is_time(case)
assert not types.is_timestamp(case)
for case in time_types:
assert types.is_time(case)
assert not types.is_date(case)
assert not types.is_timestamp(case)
for case in timestamp_types:
assert types.is_timestamp(case)
assert not types.is_date(case)
assert not types.is_time(case)
assert not types.is_temporal(pa.int32())
def test_timestamp_type():
# See ARROW-1683
assert isinstance(pa.timestamp('ns'), pa.TimestampType)
def test_union_type():
def check_fields(ty, fields):
assert ty.num_children == len(fields)
assert [ty[i] for i in range(ty.num_children)] == fields
fields = [pa.field('x', pa.list_(pa.int32())),
pa.field('y', pa.binary())]
for mode in ('sparse', pa.lib.UnionMode_SPARSE):
ty = pa.union(fields, mode=mode)
assert ty.mode == 'sparse'
check_fields(ty, fields)
for mode in ('dense', pa.lib.UnionMode_DENSE):
ty = pa.union(fields, mode=mode)
assert ty.mode == 'dense'
check_fields(ty, fields)
for mode in ('unknown', 2):
with pytest.raises(ValueError, match='Invalid union mode'):
pa.union(fields, mode=mode)
def test_types_hashable():
in_dict = {}
for i, type_ in enumerate(MANY_TYPES):
assert hash(type_) == hash(type_)
in_dict[ | type_] = i
assert in_dict[type_] == i
assert len(in_dict) == len(MANY_TYPES)
def test_types_pic | klable():
for ty in MANY_TYPES:
data = pickle.dumps(ty)
assert pickle.loads(data) == ty
@pytest.mark.parametrize('t,check_func', [
(pa.date32(), types.is_date32),
(pa.date64(), types.is_date64),
(pa.time32('s'), types.is_time32),
(pa.time64('ns'), types.is_time64),
(pa.int8(), types.is_int8),
(pa.int16(), types.is_int16),
(pa.int32(), types.is_int32),
(pa.int64(), types.is_int64),
(pa.uint8(), types.is_uint8),
(pa.uint16(), types.is_uint16),
(pa.uint32(), types.is_uint32),
(pa.uint64(), types.is_uint64),
(pa.float16(), types.is_float16),
(pa.float32(), types.is_float32),
(pa.float64(), types.is_float64)
])
def test_exact_primitive_types(t, check_func):
assert check_func(t)
def test_fixed_size_binary_byte_width():
ty = pa.binary(5)
assert ty.byte_width == 5
def test_decimal_byte_width():
ty = pa.decimal128(19, 4)
assert ty.byte_width == 16
|
nohona/cron-crm | usr/local/certbot/certbot-apache/certbot_apache/tests/complex_parsing_test.py | Python | gpl-3.0 | 4,535 | 0 | """Tests for certbot_apache.parser."""
import os
import shutil
import unittest
from certbot import errors
from certbot_apache.tests import util
class ComplexParserTest(util.ParserTest):
"""Apache Parser Test."""
def setUp(self): # pylint: disable=arguments-differ
super(ComplexParserTest, self).setUp(
"complex_parsing", "complex_parsing")
self.setup_variables()
# This needs to happen after due to setup_variables no | t being run
# until after
self.parser.init_modules() # pylint: disable=protected-access
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
def se | tup_variables(self):
"""Set up variables for parser."""
self.parser.variables.update(
{
"COMPLEX": "",
"tls_port": "1234",
"fnmatch_filename": "test_fnmatch.conf",
"tls_port_str": "1234"
}
)
def test_filter_args_num(self):
"""Note: This may also fail do to Include conf-enabled/ syntax."""
matches = self.parser.find_dir("TestArgsDirective")
self.assertEqual(len(self.parser.filter_args_num(matches, 1)), 3)
self.assertEqual(len(self.parser.filter_args_num(matches, 2)), 2)
self.assertEqual(len(self.parser.filter_args_num(matches, 3)), 1)
def test_basic_variable_parsing(self):
matches = self.parser.find_dir("TestVariablePort")
self.assertEqual(len(matches), 1)
self.assertEqual(self.parser.get_arg(matches[0]), "1234")
def test_basic_variable_parsing_quotes(self):
matches = self.parser.find_dir("TestVariablePortStr")
self.assertEqual(len(matches), 1)
self.assertEqual(self.parser.get_arg(matches[0]), "1234")
def test_invalid_variable_parsing(self):
del self.parser.variables["tls_port"]
matches = self.parser.find_dir("TestVariablePort")
self.assertRaises(
errors.PluginError, self.parser.get_arg, matches[0])
def test_basic_ifdefine(self):
self.assertEqual(len(self.parser.find_dir("VAR_DIRECTIVE")), 2)
self.assertEqual(len(self.parser.find_dir("INVALID_VAR_DIRECTIVE")), 0)
def test_basic_ifmodule(self):
self.assertEqual(len(self.parser.find_dir("MOD_DIRECTIVE")), 2)
self.assertEqual(
len(self.parser.find_dir("INVALID_MOD_DIRECTIVE")), 0)
def test_nested(self):
self.assertEqual(len(self.parser.find_dir("NESTED_DIRECTIVE")), 3)
self.assertEqual(
len(self.parser.find_dir("INVALID_NESTED_DIRECTIVE")), 0)
def test_load_modules(self):
"""If only first is found, there is bad variable parsing."""
self.assertTrue("status_module" in self.parser.modules)
self.assertTrue("mod_status.c" in self.parser.modules)
# This is in an IfDefine
self.assertTrue("ssl_module" in self.parser.modules)
self.assertTrue("mod_ssl.c" in self.parser.modules)
def verify_fnmatch(self, arg, hit=True):
"""Test if Include was correctly parsed."""
from certbot_apache import parser
self.parser.add_dir(parser.get_aug_path(self.parser.loc["default"]),
"Include", [arg])
if hit:
self.assertTrue(self.parser.find_dir("FNMATCH_DIRECTIVE"))
else:
self.assertFalse(self.parser.find_dir("FNMATCH_DIRECTIVE"))
# NOTE: Only run one test per function otherwise you will have
# inf recursion
def test_include(self):
self.verify_fnmatch("test_fnmatch.?onf")
def test_include_complex(self):
self.verify_fnmatch("../complex_parsing/[te][te]st_*.?onf")
def test_include_fullpath(self):
self.verify_fnmatch(os.path.join(self.config_path,
"test_fnmatch.conf"))
def test_include_fullpath_trailing_slash(self):
self.verify_fnmatch(self.config_path + "//")
def test_include_single_quotes(self):
self.verify_fnmatch("'" + self.config_path + "'")
def test_include_double_quotes(self):
self.verify_fnmatch('"' + self.config_path + '"')
def test_include_variable(self):
self.verify_fnmatch("../complex_parsing/${fnmatch_filename}")
def test_include_missing(self):
# This should miss
self.verify_fnmatch("test_*.onf", False)
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
morishin/alfred-xcfind-workflow | xcfind.py | Python | mit | 1,495 | 0.008027 | import os
import subprocess
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__))+ '/enum')
sys.path.append(os.path.dirname(os.path.abspath(__file__))+ '/workflow')
from enum import Enum
fr | om workflow import Workflow
class FileType(Enum):
xcodeproj = 'xcode-project_Icon.icns'
xcworkspace = 'workspace_Icon.icns'
playground = 'playground_Icon.icns'
def extension(self):
return self.name
def icon(self):
return self.value
def search(query, file_type):
find_command = "mdfind \"kMDItemFSName == '{0}*.{1}'c\"".fo | rmat(query, file_type.extension())
found_paths = subprocess.check_output(find_command, shell=True).decode("utf-8").rstrip().split("\n")
results = []
for path in found_paths:
filename = path.split("/")[-1]
results.append((filename, path, file_type.icon()))
return results
def main(wf):
if len(wf.args) > 0:
query = wf.args[0]
else:
return
xcode_path = subprocess.check_output("xcode-select -print-path", shell=True).decode("utf-8").rstrip()
xcode_resouce_path= "/".join(xcode_path.split("/")[:-1]) + "/Resources/"
for fileType in [FileType.xcworkspace, FileType.playground, FileType.xcodeproj]:
for (filename, path, icon) in search(query, fileType):
wf.add_item(filename, path, arg=path, valid=True, icon=xcode_resouce_path + icon)
wf.send_feedback()
if __name__ == '__main__':
wf = Workflow()
sys.exit(wf.run(main))
|
shagi/guifiadmin | vpn/migrations/0001_initial.py | Python | agpl-3.0 | 3,807 | 0.005779 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-11-01 08:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('guifi', '0001_initial'),
('accounting', '0001_initial'),
]
operations = [
migrations.Crea | teModel(
name='TincClient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vpn_address', models.CharField(max_length=200, verbose_name='vpn address')),
('private_key', models.TextField(blank=True, null=True, verbose_name='private key')),
('public_key', models.TextField(blank=True, nu | ll=True, verbose_name='public key')),
('upload', models.PositiveSmallIntegerField(default=400, help_text='speed in kbit/s', verbose_name='upload')),
('download', models.PositiveSmallIntegerField(default=4000, help_text='speed in kbit/s', verbose_name='download')),
],
options={
'verbose_name': 'Tinc client',
'verbose_name_plural': 'Tinc clients',
},
),
migrations.CreateModel(
name='TincGateway',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200, verbose_name='title')),
('nickname', models.SlugField(help_text='name of the tinc daemon and the network interface', max_length=100, unique=True, verbose_name='nickname')),
('address', models.CharField(blank=True, help_text='IP address or hostname of the server. If blank, guifi address will be used.', max_length=200, null=True, verbose_name='address')),
('upload', models.PositiveSmallIntegerField(help_text='speed in kbit/s', verbose_name='upload')),
('download', models.PositiveSmallIntegerField(help_text='speed in kbit/s', verbose_name='download')),
('inet_zone', models.CharField(default='inet', help_text='zone for the external interface.', max_length=20, verbose_name='external zone')),
('inet_interface', models.CharField(help_text='external interface. For firewall policies.', max_length=20, verbose_name='external interface')),
('inet_address', models.CharField(help_text='external ip address. VPN traffic will go out from here.', max_length=200, verbose_name='external address')),
('vpn_address', models.CharField(max_length=200, verbose_name='vpn address')),
('subnet', models.CharField(max_length=200, verbose_name='subnet')),
('private_key', models.TextField(blank=True, null=True, verbose_name='private key')),
('public_key', models.TextField(blank=True, null=True, verbose_name='public key')),
('device', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='guifi.Device', verbose_name='device')),
],
options={
'verbose_name': 'Tinc server',
'verbose_name_plural': 'Tinc servers',
},
),
migrations.AddField(
model_name='tincclient',
name='gateway',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vpn.TincGateway', verbose_name='gateway'),
),
migrations.AddField(
model_name='tincclient',
name='member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounting.Member', verbose_name='member'),
),
]
|
jakirkham/volumina | tests/layerwidget_test.py | Python | lgpl-3.0 | 4,634 | 0.007553 | ###############################################################################
# volumina: volume slicing and editing library
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the Lesser GNU General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# See the files LICENSE.lgpl2 and LICENSE.lgpl3 for full text of the
# GNU Lesser General Public License version 2.1 and 3 respectively.
# This information is also available on the ilastik web site at:
# http://ilastik.org/license/
###############################################################################
import os
import time
import unittest as ut
from PyQt4.QtCore import QTimer
from PyQt4.QtGui import qApp, QApplication, QWidget, QHBoxLayout, QPixmap
from volumina.layer import Layer
from volumina.layerstack import LayerStackModel
from volumina.widgets.layerwidget import LayerWidget
class TestLayerWidget( ut.TestCase ):
"""
Create two layers and add them to a LayerWidget.
Then change one of the layer visibilities and verify that the layer widget appearance updates.
At the time of this writing, the widget doesn't properly repaint the selected layer (all others repaint correctly).
"""
@classmethod
def setUpClass(cls):
if 'TRAVIS' in os.environ:
# This test fails on Travis-CI for unknown reasons,
# probably due to the variability of time.sleep().
# Skip it on Travis-CI.
import nose
raise nose.SkipTest
cls.app = QApplication([])
cls.errors = False
@classmethod
def tearDownClass(cls):
del cls.app
def impl(self):
try:
# Change the visibility of the *selected* layer
self.o2.visible = False
# Make sure the GUI is caught up on paint events
QApplication.processEvents()
# We must sleep for the screenshot to be right.
time.sleep(0.1)
self.w.repaint()
# Capture the window before we change anything
beforeImg = QPixmap.grabWindow( self.w.winId() ).toImage()
| # Change the visibility of the *selected* layer
self.o2.visible = Tru | e
self.w.repaint()
# Make sure the GUI is caught up on paint events
QApplication.processEvents()
# We must sleep for the screenshot to be right.
time.sleep(0.1)
# Capture the window now that we've changed a layer.
afterImg = QPixmap.grabWindow( self.w.winId() ).toImage()
# Optional: Save the files so we can inspect them ourselves...
#beforeImg.save('before.png')
#afterImg.save('after.png')
# Before and after should NOT match.
assert beforeImg != afterImg
except:
# Catch all exceptions and print them
# We must finish so we can quit the app.
import traceback
traceback.print_exc()
TestLayerWidget.errors = True
qApp.quit()
def test_repaint_after_visible_change(self):
self.model = LayerStackModel()
self.o1 = Layer([])
self.o1.name = "Fancy Layer"
self.o1.opacity = 0.5
self.model.append(self.o1)
self.o2 = Layer([])
self.o2.name = "Some other Layer"
self.o2.opacity = 0.25
self.model.append(self.o2)
self.view = LayerWidget(None, self.model)
self.view.show()
self.view.updateGeometry()
self.w = QWidget()
self.lh = QHBoxLayout(self.w)
self.lh.addWidget(self.view)
self.w.setGeometry(100, 100, 300, 300)
self.w.show()
self.w.raise_()
# Run the test within the GUI event loop
QTimer.singleShot(500, self.impl )
self.app.exec_()
# Were there errors?
assert not TestLayerWidget.errors, "There were GUI errors/failures. See above."
if __name__=='__main__':
ut.main()
|
aguijarro/DataSciencePython | DataWrangling/CaseStudy/project/analyze_data.py | Python | mit | 1,201 | 0.004163 | import matplotlib.pyplot as plt
import pandas as pd
# Create a list of colors (from iWantHue)
colors = ["#E13F29", "#D69A80", "#D63B59", "#AE5552", "#CB5C3B", "#EB8076", "#96624E"]
def draw_data(st_types_count, keys, explode):
data = st_types_count.fromkeys(keys)
for d in data:
data[d] = st_types_count[d]
keys = []
values = []
raw_data = {}
for key, value in data.iteritems():
keys.append(key)
value | s.append(value)
| raw_data["keys"] = keys
raw_data["values"] = values
df = pd.DataFrame(raw_data, columns = ['keys', 'values'])
print ("data", df)
# Create a pie chart
plt.pie(
# using data total)arrests
df['values'],
# with the labels being officer names
labels=df['keys'],
# with no shadows
shadow=False,
# with colors
colors=colors,
# with one slide exploded out
explode=explode,
# with the start angle at 90%
startangle=90,
# with the percent listed as a fraction
autopct='%1.1f%%',
)
# View the plot drop above
plt.axis('equal')
# View the plot
plt.tight_layout()
plt.show() |
jashandeep-sohi/aiohttp | tests/test_py35/test_client_websocket_35.py | Python | apache-2.0 | 1,892 | 0 | import pytest
import aiohttp
from aiohttp import web
@pytest.mark.run_loop
async def test_client_ws_async_for(loop, create_server):
items = ['q1', 'q2', 'q3']
async def handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
for i in items:
ws.send_str(i)
await ws.close()
return ws
app, url = await create_server(proto='ws')
app.router.add_route('GET', '/', handler)
resp = await aiohttp.ws_connect(url, loop=loop)
it = iter(items)
async for msg in resp:
assert msg.data == next(it)
with pytest.raises(StopIteration):
| next(it)
assert resp.closed
@pytest.mark.run_loop
async def test_client_ws_async_with(loop, create_app_and_client):
async def handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
msg = await ws.receive()
ws.send_str(msg.data + '/answer')
await ws.close()
return ws
app, client = await create_app_and_client(
server_params=dict(proto='ws'))
ap | p.router.add_route('GET', '/', handler)
async with client.ws_connect('/') as ws:
ws.send_str('request')
msg = await ws.receive()
assert msg.data == 'request/answer'
assert ws.closed
@pytest.mark.run_loop
async def test_client_ws_async_with_shortcut(loop, create_server):
async def handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
msg = await ws.receive()
ws.send_str(msg.data + '/answer')
await ws.close()
return ws
app, url = await create_server(proto='ws')
app.router.add_route('GET', '/', handler)
async with aiohttp.ws_connect(url, loop=loop) as ws:
ws.send_str('request')
msg = await ws.receive()
assert msg.data == 'request/answer'
assert ws.closed
|
stdweird/aquilon | lib/python2.6/aquilon/worker/formats/cpu.py | Python | apache-2.0 | 1,186 | 0.001686 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy | of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cpu formatter."""
from aquilon.worker.formats.formatters import ObjectFormatter
fr | om aquilon.aqdb.model import Cpu
class CpuFormatter(ObjectFormatter):
def format_raw(self, cpu, indent=""):
details = [indent + "Cpu: %s %s %d MHz" %
(cpu.vendor.name, cpu.name, cpu.speed)]
if cpu.comments:
details.append(indent + " Comments: %s" % cpu.comments)
return "\n".join(details)
ObjectFormatter.handlers[Cpu] = CpuFormatter()
|
marcelnicolay/pycompressor | compressor/cli.py | Python | lgpl-3.0 | 2,967 | 0.006741 | # coding: utf-8
# <pycompressor - compress and merge static files (css,js) in html files>
# Copyright (C) <2012> Marcel Nicolay <marcel.nicolay@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
import sys |
class CLI(object):
color = {
"PINK": "",
"BLUE": "",
"CYAN": "",
"GREEN": "",
"YELLOW": "",
"RED": "",
"END": "",
}
@staticmethod
def show_colors():
CLI.color = {
"PINK": "\033[35m",
"BLUE": "\033[34m",
| "CYAN": "\033[36m",
"GREEN": "\033[32m",
"YELLOW": "\033[33m",
"RED": "\033[31m",
"END": "\033[0m",
}
def __init__(self):
self.__config_parser()
def __config_parser(self):
self.__parser = OptionParser(usage="usage: %prog [options] start")
self.__parser.add_option("-c", "--config",
dest="config_file",
default="compressor.yaml",
help="Use a specific config file. If not provided, will search for 'compressor.yaml' in the current directory.")
self.__parser.add_option("-s", "--sync",
dest="sync",
action="store_true",
default=False,
help="Sync files with S3")
self.__parser.add_option("-v", "--version",
action="store_true",
dest="compressor_version",
default=False,
help="Displays compressor version and exit.")
self.__parser.add_option("--color",
action="store_true",
dest="show_colors",
default=False,
help="Output with beautiful colors.")
self.__parser.add_option("--prefix",
dest="prefix",
default="min",
help="Use prefix in output js and css.")
def get_parser(self):
return self.__parser
def parse(self):
return self.__parser.parse_args()
def error_and_exit(self, msg):
self.msg("[ERROR] %s\n" % msg, "RED")
sys.exit(1)
def info_and_exit(self, msg):
self.msg("%s\n" % msg, "BLUE")
sys.exit(0)
def msg(self, msg, color="CYAN"):
print "%s%s%s" % (self.color[color], msg, self.color["END"]) |
anna-effeindzourou/trunk | examples/rotationalResistance.py | Python | gpl-2.0 | 1,465 | 0.055328 | #!/usr/bin/env python
# encoding: utf-8
from yade import utils, plot
o = Omega()
fr = 0.5;rho=2000
tc = 0.001; en = 0.7; et = 0.7; o.dt = 0.0002*tc
r = 0.002
mat1 = O.materials.append(ViscElMat(frictionAngle=fr,mR = 0.05, mRtype = 1, density=rho,tc=tc,en=en,et=et))
mat2 = O.materials.append(ViscElMat(frictionAngle=fr,mR = 0.05, mRtype = 2, density=rho,tc=tc,en=en,et=et))
oriBody = Quaternion(Vector3(1,0,0),(pi/28))
id1 = O.bodies.append(sphere(center=[0,0,2*r],radius=r,material=mat1))
id2 = O.bodies.append(geom.facetBox(center=(0,-16.0*r,-2*r),orientation=oriBody,extents=(r,17.0*r,0), material=mat1,color=(0,0,1)))
id3 = O.bodies.append(sphere(center=[10*r,0,2*r],radius=r,material=mat2))
id4 = O.bodies.append(geom.facetBox(center=(10*r,-16.0*r,-2*r),orientation=oriBody,extents=(r,17.0*r,0), material=mat2,color=(0,0,1)))
o.engines = [
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb(),Bo1_Facet_Aabb()]),
InteractionLoop( |
[Ig2_Sphere_Sphere_ScGeom(),Ig2_Facet_Sphere_ScGeom()],
[Ip2_ViscElMat_ViscElMat_ViscElPhys()],
[Law2_ScGeom_ViscElPhys_Basic()],
),
NewtonIntegrator(damping=0,gravity=[0,0,-9.81]),
PyRunner(command='addPlotDat | a()',iterPeriod=10000, dead = False, label='graph'),
]
def addPlotData():
f1 = [0,0,0]
s1 = O.bodies[id1].state.pos[1]
s2 = O.bodies[id3].state.pos[1]
plot.addData(sc=O.time, fc1=s1, fc2=s2)
plot.plots={'sc':('fc1','fc2')}; plot.plot()
from yade import qt
qt.View()
|
doubledherin/my_compiler | parser.py | Python | mit | 8,875 | 0.001239 | import token_names as tokens
import abstract_syntax_tree as AST
class Parser(object):
def __init__(self, lexer):
self.lexer = lexer
self.current_token = self.lexer.get_next_token()
def error(self, message):
raise Exception(message)
def consume(self, token_type):
if self.current_token.type == token_type:
self.current_token = self.lexer.get_next_token()
else:
error_message = """
Trying to consume token type \'{}\' but current token type is \'{}\'
""".format(token_type, self.current_token.type)
self.error(error_message)
def program(self):
"""
program : block
"""
block_node = self.block()
program_node = AST.Program(block_node)
return program_node
def block(self):
"""
block : declarations compound_statement
"""
declaration_nodes = self.declarations()
compound_statement_node = self.compound_statement()
block_node = AST.Block(declaration_nodes, compound_statement_node)
return block_node
def declarations(self):
"""declarations : (var (variable_declaration SEMI)+)*
| (function ID (LPAREN formal_parameter_list RPAREN)? OPEN block CLOSE)*
| empty
"""
declarations = []
parameters = []
while True:
if self.current_token.type == tokens.VAR:
self.consume(tokens.VAR)
while self.current_token.type == tokens.ID:
declarations.extend(self.variable_declarations())
self.consume(tokens.SEMI)
while self.current_token.type == tokens.FUNCTION:
self.consume(tokens.FUNCTION)
function_name = self.current_token.value
self.consume(tokens.ID)
if self.current_token.type == tokens.LPAREN:
self.consume(tokens.LPAREN)
parameters = self.formal_parameter_list()
self.consume(tokens.RPAREN)
self.consume(tokens.OPEN)
block_node = self.block()
self.consume(tokens.CLOSE)
function_declaration = AST.FunctionDeclaration(function_name, parameters, block_node)
declarations.append(function_declaration)
else:
break
return declarations
def formal_parameter_list(self):
""" formal_parameter_list : formal_parameters
| formal_parameters SEMI formal_parameter_list
"""
if not self.current_token.type == tokens.ID:
| return []
parameter_nodes = self.formal_parameters()
while self.current_token.type == tokens.SEMI:
self.consume(tok | ens.SEMI)
parameter_nodes.extend(self.formal_parameters())
return parameter_nodes
def formal_parameters(self):
""" formal_parameters : ID (COMMA ID)* COLON type_spec """
parameter_nodes = []
parameter_tokens = [self.current_token]
self.consume(tokens.ID)
while self.current_token.type == tokens.COMMA:
self.consume(tokens.COMMA)
parameter_tokens.append(self.current_token)
self.consume(tokens.ID)
self.consume(tokens.COLON)
type_node = self.type_spec()
for token in parameter_tokens:
parameter_node = AST.Parameter(AST.Variable(token), type_node)
parameter_nodes.append(parameter_node)
return parameter_nodes
def variable_declarations(self):
"""
variable_declarations : ID (COMMA ID)* COLON type_spec
"""
variable_nodes = []
variable_nodes.append(AST.Variable(self.current_token))
self.consume(tokens.ID)
while self.current_token.type == tokens.COMMA:
self.consume(tokens.COMMA)
variable_nodes.append(AST.Variable(self.current_token))
self.consume(tokens.ID)
self.consume(tokens.COLON)
type_node = self.type_spec()
variable_declarations = [
AST.VariableDeclaration(variable_node, type_node) for variable_node in variable_nodes
]
return variable_declarations
def type_spec(self):
"""
type_spec : int
"""
token = self.current_token
if self.current_token.type == tokens.INT:
self.consume(tokens.INT)
return AST.Type(token)
else:
self.error('Unexpected token type %s in type_spec function' % self.current_token.type)
def factor(self):
"""
factor : PLUS factor
| MINUS factor
| int
| LPAREN expr RPAREN
| variable
"""
token = self.current_token
if token.type == tokens.INT:
self.consume(tokens.INT)
return AST.Number(token)
if token.type == tokens.PLUS:
self.consume(tokens.PLUS)
node = AST.UnaryOperator(token, self.factor())
return node
if token.type == tokens.MINUS:
self.consume(tokens.MINUS)
node = AST.UnaryOperator(token, self.factor())
return node
if token.type == tokens.LPAREN:
self.consume(tokens.LPAREN)
node = self.expr()
self.consume(tokens.RPAREN)
return node
else:
node = self.variable()
return node
def term(self):
"""
term : factor (MULTIPLY | DIVIDE) factor)*
"""
node = self.factor()
while self.current_token.type in (tokens.MULTIPLY, tokens.DIVIDE):
token = self.current_token
if token.type == tokens.MULTIPLY:
self.consume(tokens.MULTIPLY)
elif token.type == tokens.DIVIDE:
self.consume(tokens.DIVIDE)
node = AST.BinaryOperator(left=node, op=token, right=self.factor())
return node
def expr(self):
"""
expr : term ((PLUS | MINUS) term)*
"""
node = self.term()
while self.current_token.type in (tokens.PLUS, tokens.MINUS):
token = self.current_token
if token.type == tokens.PLUS:
self.consume(tokens.PLUS)
elif token.type == tokens.MINUS:
self.consume(tokens.MINUS)
node = AST.BinaryOperator(left=node, op=token, right=self.term())
return node
def compound_statement(self):
"""compound_statement : statement_list"""
nodes = self.statement_list()
root = AST.Compound()
for node in nodes:
root.children.append(node)
return root
def statement_list(self):
"""
statement_list : statement | statement SEMI statement_list
"""
node = self.statement()
results = [node]
while self.current_token.type == tokens.SEMI:
self.consume(tokens.SEMI)
results.append(self.statement())
if self.current_token.type == tokens.ID:
self.error('Unexpected token type %s in statement_list function'.format(self.current_token.type))
return results
def statement(self):
"""
statement : compound_statement
| assignment_statement
| print_statement
| empty
"""
if self.current_token.type == tokens.OPEN:
node = self.compound_statement()
elif self.current_token.type == tokens.ID:
node = self.assignment_statement()
elif self.current_token.type == tokens.PRINT:
node = self.print_statement()
else:
node = self.empty()
return node
def assignment_statement(self):
"""
assignment_statement : variable ASSIGN expr
"""
left = self.variable()
token = self.current_token
self.consume(tokens.ASSIGN)
right = self.expr()
node = AST.Assign(left, token, right)
return node
def print_ |
ArchieR7/HackerRank | Data Structures/Arrays/Left Rotation.py | Python | mit | 204 | 0.02451 | import sys
(N, D) = [int(x) for x in | input().split()]
nums = [int(x) for x in input().split()]
for i in range(0,D):
temp = nums[0]
nums.pop(0)
nums.append(temp)
pri | nt(' '.join(str(n) for n in nums)) |
Vlek/plugins | HexChat/HexStats.py | Python | mit | 1,614 | 0.017968 | import hexchat
#Based on Weechat's Weestats: https://weechat.org/scripts/source/weestats.py.html/
#By Filip H.F. 'FiXato' Slagter <fixato [at] gmail [dot] com>
__module_name__ = 'HexStats'
__module_version__ = '0.0.1'
__module_description__ = 'Displays HexChat-wide User | Statistics'
__module_author__ = 'Vlek'
def stats(word, word_to_eol, userdata):
print( getstats() )
return hexchat.EAT_ALL
def printstats(word, word_to_eol, userdata):
hexchat.command('say {}'.format( getstats() ))
return hexchat.EAT_ALL
def check_opped(ctx, nickprefixes):
op_idx = nickprefixes.index('@')
nick = | ctx.get_info('nick')
me = [user for user in ctx.get_list('users') if hexchat.nickcmp(user.nick, nick) == 0][0]
if me.prefix and nickprefixes.index(me.prefix[0]) <= op_idx:
return True
return False
def getstats():
contexts = hexchat.get_list('channels')
channels = 0
servers = 0
queries = 0
ops = 0
for ctx in contexts:
if ctx.type == 1:
servers += 1
elif ctx.type == 2:
channels += 1
if check_opped(ctx.context, ctx.nickprefixes):
ops += 1
elif ctx.type == 3:
queries += 1
return 'Stats: {} channels ({} OPs), {} servers, {} queries'.format( channels, ops,
servers, queries )
hexchat.hook_command("stats", stats, help="/stats displays HexChat user statistics")
hexchat.hook_command("printstats", printstats, help="/printstats Says HexChat user statistics in current context")
|
daftano/interactive-tutorials | suds/client.py | Python | apache-2.0 | 25,972 | 0.002464 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{2nd generation} service proxy provides access to web services.
See I{README.txt}
"""
import suds
import suds.metrics as metrics
from cookielib import CookieJar
from suds import *
from suds.reader import DefinitionsReader
from suds.transport import TransportError, Request
from suds.transport.https import HttpAuthenticated
from suds.servicedefinition import ServiceDefinition
from suds import sudsobject
from sudsobject import Factory as InstFactory
from sudsobject import Object
from suds.resolver import PathResolver
from suds.builder import Builder
from suds.wsdl import Definitions
from suds.cache import ObjectCache
from suds.sax.document import Document
from suds.sax.parser import Parser
from suds.options import Options
from suds.properties import Unskin
from urlparse import urlparse
from copy import deepcopy
from suds.plugin import PluginContainer
from logging import getLogger
log = getLogger(__name__)
class Client(object):
"""
A lightweight web services client.
I{(2nd generation)} API.
@ivar wsdl: The WSDL object.
@type wsdl:L{Definitions}
@ivar service: The service proxy used to invoke operations.
@type service: L{Service}
@ivar factory: The factory used to create objects.
@type factory: L{Factory}
@ivar sd: The service definition
@type sd: L{ServiceDefinition}
@ivar messages: The last sent/received messages.
@type messages: str[2]
"""
@classmethod
def items(cls, sobject):
"""
Extract the I{items} from a suds object much like the
items() method works on I{dict}.
@param sobject: A suds object
@type sobject: L{Object}
@return: A list of items contained in I{sobject}.
@rtype: [(key, value),...]
"""
return sudsobject.items(sobject)
@classmethod
def dict(cls, sobject):
"""
Convert a sudsobject into a dictionary.
@param sobject: A suds object
@type sobject: L{Object}
@return: A python dictionary containing the
items contained in I{sobject}.
@rtype: dict
"""
return sudsobject.asdict(sobject)
@classmethod
def metadata(cls, sobject):
"""
Extract the metadata from a suds object.
@param sobject: A suds object
@type sobject: L{Object}
@return: The object's metadata
@rtype: L{sudsobject.Metadata}
"""
return sobject.__metadata__
def __init__(self, url, **kwargs):
"""
@param url: The URL for the WSDL.
@type url: str
@param kwargs: keyword arguments.
@see: L{Options}
"""
options = Options()
options.transport = HttpAuthenticated()
self.options = options
#options.cache = ObjectCache(days=1)
self.set_options(**kwargs)
reader = DefinitionsReader(options, Definitions)
self.wsdl = reader.open(url)
plugins = PluginContainer(options.plugins)
plugins.init.initialized(wsdl=self.wsdl)
self.factory = Factory(self.wsdl)
self.service = ServiceSelector(self, self.wsdl.services)
self.sd = []
for s in self.wsdl.services:
sd = ServiceDefinition(self.wsdl, s)
self.sd.append(sd)
self.messages = dict(tx=None, rx=None)
def set_options(self, **kwargs):
"""
Set options.
@param kwargs: keyword arguments.
@see: L{Options}
"""
p = Unskin(self.options)
p.update(kwargs)
def add_prefix(self, prefix, uri):
"""
Add I{static} mapping of an XML namespace prefix to a namespace.
This is useful for cases when a wsdl and referenced schemas make heavy
use of namespaces and those namespaces are subject to changed.
@param prefix: An XML namespace prefix.
@type prefix: str
@param uri: An XML namespace URI.
@type uri: str
@raise Exception: when prefix is already mapped.
"""
root = self.wsdl.root
mapped = root.resolvePrefix(prefix, None)
if mapped is None:
root.addPrefix(prefix, uri)
return
if mapped[1] != uri:
raise Exception('"%s" already mapped as "%s"' % (prefix, mapped))
def last_sent( | self):
"""
Get la | st sent I{soap} message.
@return: The last sent I{soap} message.
@rtype: L{Document}
"""
return self.messages.get('tx')
def last_received(self):
"""
Get last received I{soap} message.
@return: The last received I{soap} message.
@rtype: L{Document}
"""
return self.messages.get('rx')
def clone(self):
"""
Get a shallow clone of this object.
The clone only shares the WSDL. All other attributes are
unique to the cloned object including options.
@return: A shallow clone.
@rtype: L{Client}
"""
class Uninitialized(Client):
def __init__(self):
pass
clone = Uninitialized()
clone.options = Options()
cp = Unskin(clone.options)
mp = Unskin(self.options)
cp.update(deepcopy(mp))
clone.wsdl = self.wsdl
clone.factory = self.factory
clone.service = ServiceSelector(clone, self.wsdl.services)
clone.sd = self.sd
clone.messages = dict(tx=None, rx=None)
return clone
def __str__(self):
return unicode(self)
def __unicode__(self):
s = ['\n']
build = suds.__build__.split()
s.append('Suds ( https://fedorahosted.org/suds/ )')
s.append(' version: %s' % suds.__version__)
s.append(' %s build: %s' % (build[0], build[1]))
for sd in self.sd:
s.append('\n\n%s' % unicode(sd))
return ''.join(s)
class Factory:
"""
A factory for instantiating types defined in the wsdl
@ivar resolver: A schema type resolver.
@type resolver: L{PathResolver}
@ivar builder: A schema object builder.
@type builder: L{Builder}
"""
def __init__(self, wsdl):
"""
@param wsdl: A schema object.
@type wsdl: L{wsdl.Definitions}
"""
self.wsdl = wsdl
self.resolver = PathResolver(wsdl)
self.builder = Builder(self.resolver)
def create(self, name):
"""
create a WSDL type by name
@param name: The name of a type defined in the WSDL.
@type name: str
@return: The requested object.
@rtype: L{Object}
"""
timer = metrics.Timer()
timer.start()
type = self.resolver.find(name)
if type is None:
raise TypeNotFound(name)
if type.enum():
result = InstFactory.object(name)
for e, a in type.children():
setattr(result, e.name, e.name)
else:
try:
result = self.builder.build(type)
except Exception, e:
log.error("create '%s' failed", name, exc_info=True)
raise BuildError(name, e)
timer.stop()
metrics.log.debug('%s created: %s', name, timer)
return result
def separator(self, ps):
|
robofab-developers/fontParts | Lib/fontParts/base/point.py | Python | mit | 10,725 | 0 | from fontTools.misc import transform
from fontParts.base.base import (
BaseObject,
TransformationMixin,
PointPositionMixin,
SelectionMixin,
IdentifierMixin,
dynamicProperty,
reference
)
from fontParts.base import normalizers
from fontParts.base.deprecated import DeprecatedPoint, RemovedPoint
class BasePoint(
BaseObject,
TransformationMixin,
PointPositionMixin,
SelectionMixin,
IdentifierMixin,
DeprecatedPoint,
RemovedPoint
):
"""
A point object. This object is almost always
created with :meth:`BaseContour.appendPoint`,
the pen returned by :meth:`BaseGlyph.getPen`
or the point pen returned by :meth:`BaseGLyph.getPointPen`.
An orphan point can be created like this::
>>> point = RPoint()
"""
copyAttributes = (
"type",
"smooth",
"x",
"y",
"name"
)
def _reprContents(self):
contents = [
"%s" % self.type,
("({x}, {y})".format(x=self.x, y=self.y)),
]
if self.name is not None:
contents.append("name='%s'" % self.name)
if self.smooth:
contents.append("smooth=%r" % self.smooth)
return contents
# -------
# Parents
# -------
# Contour
_contour = None
contour = dynamicProperty("contour",
"The point's parent :class:`BaseContour`.")
def _get_contour(self):
if self._contour is None:
return None
return self._contour()
def _set_contour(self, contour):
if self._contour is not None:
raise AssertionError("contour for point already set")
if contour is not None:
contour = reference(contour)
self._contour = contour
# Glyph
glyph = dynamicProperty("glyph", "The point's parent :class:`BaseGlyph`.")
def _get_glyph(self):
if self._contour is None:
return None
return self.contour.glyph
# Layer
layer = dynamicProperty("layer", "The point's parent :class:`BaseLayer`.")
def _get_layer(self):
if self._contour is None:
return None
return self.glyph.layer
# Font
font = dynamicProperty("font", "The point's parent :class:`BaseFont`.")
def _get_font(self):
if self._contour is None:
return None
return self.glyph.font
# ----------
# Attributes
# ----------
# type
type = dynamicProperty(
"base_type",
"""
The point type defined with a :ref:`type-string`.
The possible types are:
+----------+---------------------------------+
| move | An on-curve move to. |
+----------+---------------------------------+
| line | An on-curve line to. |
+----------+---------------------------------+
| curve | An on-curve cubic curve to. |
+----------+---------------------------------+
| qcurve | An on-curve quadratic curve to. |
+----------+---------------------------------+
| offcurve | An off-curve. |
+----------+---------------------------------+
""")
def _get_base_type(self):
value = self._get_type()
value = normalizers.normalizePointType(value)
return value
def _set_base_type(self, value):
value = n | ormalizers.normalizePointType(value)
self._set_type(value)
def _get_type(self):
"""
This is the environment implementation
of :attr:`BasePoint.type`. This must
return a :ref:`type-string` defining
the point type.
Subclasses must override this method.
"""
| self.raiseNotImplementedError()
def _set_type(self, value):
"""
This is the environment implementation
of :attr:`BasePoint.type`. **value**
will be a :ref:`type-string` defining
the point type. It will have been normalized
with :func:`normalizers.normalizePointType`.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# smooth
smooth = dynamicProperty(
"base_smooth",
"""
A ``bool`` indicating if the point is smooth or not. ::
>>> point.smooth
False
>>> point.smooth = True
"""
)
def _get_base_smooth(self):
value = self._get_smooth()
value = normalizers.normalizeBoolean(value)
return value
def _set_base_smooth(self, value):
value = normalizers.normalizeBoolean(value)
self._set_smooth(value)
def _get_smooth(self):
"""
This is the environment implementation of
:attr:`BasePoint.smooth`. This must return
a ``bool`` indicating the smooth state.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
def _set_smooth(self, value):
"""
This is the environment implementation of
:attr:`BasePoint.smooth`. **value** will
be a ``bool`` indicating the smooth state.
It will have been normalized with
:func:`normalizers.normalizeBoolean`.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# x
x = dynamicProperty(
"base_x",
"""
The x coordinate of the point.
It must be an :ref:`type-int-float`. ::
>>> point.x
100
>>> point.x = 101
"""
)
def _get_base_x(self):
value = self._get_x()
value = normalizers.normalizeX(value)
return value
def _set_base_x(self, value):
value = normalizers.normalizeX(value)
self._set_x(value)
def _get_x(self):
"""
This is the environment implementation of
:attr:`BasePoint.x`. This must return an
:ref:`type-int-float`.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
def _set_x(self, value):
"""
This is the environment implementation of
:attr:`BasePoint.x`. **value** will be
an :ref:`type-int-float`.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# y
y = dynamicProperty(
"base_y",
"""
The y coordinate of the point.
It must be an :ref:`type-int-float`. ::
>>> point.y
100
>>> point.y = 101
"""
)
def _get_base_y(self):
value = self._get_y()
value = normalizers.normalizeY(value)
return value
def _set_base_y(self, value):
value = normalizers.normalizeY(value)
self._set_y(value)
def _get_y(self):
"""
This is the environment implementation of
:attr:`BasePoint.y`. This must return an
:ref:`type-int-float`.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
def _set_y(self, value):
"""
This is the environment implementation of
:attr:`BasePoint.y`. **value** will be
an :ref:`type-int-float`.
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# --------------
# Identification
# --------------
# index
index = dynamicProperty(
"base_index",
"""
The index of the point within the ordered
list of the parent glyph's point. This
attribute is read only. ::
>>> point.index
0
"""
)
def _get_base_index(self):
value = self._get_index()
value = normalizers.normalizeIndex(value)
return value
def _get_index(self):
"""
Get the point's index.
This must return an ``int``.
Subclasses may override this method.
"""
contour = self.contour
|
georgeyk/loafer | tests/test_routes.py | Python | mit | 6,121 | 0.000817 | from unittest import mock
import pytest
from asynctest import CoroutineMock
from loafer.message_translators import StringMessageTranslator
from loafer.routes import Route
def test_provider(dummy_provider):
route = Route(dummy_provider, handler=mock.Mock())
assert route.provider is dummy_provider
def test_provider_invalid():
with pytest.raises(TypeError):
Route('invalid-provider', handler=mock.Mock())
def test_name(dummy_provider):
route = Route(dummy_provider, handler=mock.Mock(), name='foo')
assert route.name == 'foo'
def test_message_translator(dummy_provider):
translator = StringMessageTranslator()
route = Route(dummy_provider, handler=mock.Mock(), message_translator=translator)
assert isinstance(route.message_translator, StringMessageTranslator)
def test_default_message_translator(dummy_provider):
route = Route(dummy_provider, handler=mock.Mock())
assert route.message_translator is None
def test_message_translator_invalid(dummy_provider):
with pytest.raises(TypeError):
Route(dummy_provider, handler=mock.Mock(), message_translator='invalid')
def test_apply_message_translator(dummy_provider):
translator = StringMessageTranslator()
translator.translate = mock.Mock(return_value={'content': 'foobar', 'metadata': {}})
route = Route(dummy_provider, mock.Mock(), message_translator=translator)
translated = route.apply_message_translator('message')
assert translated['content'] == 'foobar'
assert translated['metadata'] == {}
assert translator.translate.called
translator.translate.assert_called_once_with('message')
def test_apply_message_translator_error(dummy_provider):
translator = StringMessageTranslator()
translator.translate = mock.Mock(return_value={'content': '', 'metadata': {}})
route = Route(dummy_provider, mock.Mock(), message_translator=translator)
with pytest.raises(ValueError):
route.apply_message_translator('message')
assert translator.translate.called
translator.translate.assert_called_once_with('message')
@pytest.mark.asyncio
async def test_error_handler_unset(dummy_provider):
route = Route(dummy_provider, mock.Mock())
exc = TypeError()
exc_info = (type(exc), exc, None)
result = await route.error_handler(exc_info, 'whatever')
assert result is False
def test_error_handler_invalid(dummy_provider):
with pytest.raises(TypeError):
Route(dummy_provider, handler=mock.Mock(), error_handler='invalid')
@pytest.mark.asyncio
async def test_error_handler(dummy_provider):
attrs = {}
def error_handler(exc_info, message):
attrs['exc_info'] = exc_info
attrs['message'] = message
return True
# we cant mock regular functions in error handlers, because it will
# be checked with asyncio.iscoroutinefunction() and pass as coro
route = Route(dummy_provider, mock.Mock(), error_handler=error_handler)
exc = TypeError()
exc_info = (type(exc), exc, 'traceback')
result = await route.error_handler(exc_info, 'whatever')
assert result is True
assert attrs['exc_info'] == exc_info
assert attrs['message'] == 'whatever'
@pytest.mark.asyncio
async def test_error_handler_coroutine(dummy_provider):
error_handler = CoroutineMock(return_value=True)
route = Route(dummy_provider, mock.Mock(), error_handler=error_handler)
exc = TypeError()
exc_info = (type(exc), exc, 'traceback')
result = await route.error_handler(exc_info, 'whatever')
assert result is True
assert error_handler.called
error_handler.assert_called_once_with(exc_info, 'whatever')
@pytest.mark.asyncio
async def test_handler_class_based(dummy_provider):
class handler:
async def handle(self, *args, **kwargs):
pass
handler = handler()
route = Route(dummy_provider, handler=handler)
assert route.handler == handler.handle
@pytest.mark.asyncio
async def test_handler_class_based_invalid(dummy_provider):
class handler:
pass
handler = handler()
with pytest.raises(ValueError):
Route(dummy_provider, handler=handler)
@pytest.mark.asyncio
async def test_handler_invalid(dummy_provider):
with pytest.raises(ValueError):
Route(dummy_provider, 'invalid-handler')
def test_route_stop(dummy_provider):
dummy_provider.stop = mock.Mock()
route = Route(dummy_provider, handler=mock.Mock())
route.stop()
assert dummy_provider.stop.called
def test_route_stop_with_handler_stop(dummy_provider):
c | lass handler:
def handle(self, *args):
pass
dummy_provider.stop = mock.Mock()
handler = handler()
| handler.stop = mock.Mock()
route = Route(dummy_provider, handler)
route.stop()
assert dummy_provider.stop.called
assert handler.stop.called
# FIXME: Improve all test_deliver* tests
@pytest.mark.asyncio
async def test_deliver(dummy_provider):
attrs = {}
def test_handler(*args, **kwargs):
attrs['args'] = args
attrs['kwargs'] = kwargs
return True
route = Route(dummy_provider, handler=test_handler)
message = 'test'
result = await route.deliver(message)
assert result is True
assert message in attrs['args']
@pytest.mark.asyncio
async def test_deliver_with_coroutine(dummy_provider):
mock_handler = CoroutineMock(return_value=False)
route = Route(dummy_provider, mock_handler)
message = 'test'
result = await route.deliver(message)
assert result is False
assert mock_handler.called
assert message in mock_handler.call_args[0]
@pytest.mark.asyncio
async def test_deliver_with_message_translator(dummy_provider):
mock_handler = CoroutineMock(return_value=True)
route = Route(dummy_provider, mock_handler)
route.apply_message_translator = mock.Mock(return_value={'content': 'whatever', 'metadata': {}})
result = await route.deliver('test')
assert result is True
assert route.apply_message_translator.called
assert mock_handler.called
mock_handler.assert_called_once_with('whatever', {})
|
OpenGov/og-python-utils | tests/loggers_default_test.py | Python | mit | 2,700 | 0.011852 | # This import fixes sys.path issues
from .parentpath import *
import os
import re
import unittest
import logging
import StringIO
from ogutils.loggers import default
from ogutils.system import streams
LOCAL_LOG_DIR = os.path.join(os.path.dirname(__file__), 'logs')
class FlaskLoggerTest(unittest.TestCase):
def clear_logs(self):
for fname in os.listdir(LOCAL_LOG_DIR):
file_path = os.path.join(LOCAL_LOG_DIR, fname)
if os.path.isfile(file_path):
os.unlink(file_path)
def read_console_log(self):
with open(os.path.join(LOCAL_LOG_DIR, 'console.log'), 'r') as console:
return ''.join(console.readlines())
def setUp(self):
if not os.path.exists(LOCAL_LOG_DIR):
os.makedirs(LOCAL_LOG_DIR)
self.clear_logs()
self.log_matcher = re.compile('\[\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d,\d\d\d\] Log Me!\n')
self.logger = default.build_default_logger(
'default_logger',
log_level=logging.INFO,
log_dir=LOCAL_LOG_DIR)
def tearDown(self):
self.logger.handlers = []
self.clear_logs()
def test_logger_default_level(self):
self.logger.debug('Skip me')
self.assertEquals(self.read_console_log(), '')
def test_logger_stdout(self):
stdout = StringIO.StringIO()
with streams.StdRedirector(stdout=stdout):
self.assertEqual(len(re.findall(self.log_matcher, self.read_console_log())), 0)
self.logger.info('Log Me!')
self.assertEqual(len(re.findall(self.log_matcher, self.read_console_log())), 1)
self.assertEqual(len(re.findall(self.log_matcher, stdout.getvalue())), 1)
self.logger.info('Log Me!')
self.assertEqual(len(re.findall(self.log_matcher, self.read_con | sole_log())), 2)
self.assertEqual(len(re.findall(self.log_matcher, stdout.getvalue())), 2)
def test_logger_stderr(self):
stderr = StringIO.StringIO()
with streams.StdRedire | ctor(stderr=stderr):
self.assertEqual(len(re.findall(self.log_matcher, self.read_console_log())), 0)
self.logger.error('Log Me!')
self.assertEqual(len(re.findall(self.log_matcher, self.read_console_log())), 1)
self.assertEqual(len(re.findall(self.log_matcher, stderr.getvalue())), 1)
self.logger.error('Log Me!')
self.assertEqual(len(re.findall(self.log_matcher, self.read_console_log())), 2)
self.assertEqual(len(re.findall(self.log_matcher, stderr.getvalue())), 2)
if __name__ == "__main__":
unittest.main()
|
heysion/clone-cliapp | example3.py | Python | gpl-2.0 | 1,359 | 0.000736 | # Copyright (C) 2012 Lars Wirzenius
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be | useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''Example for cliapp framework.
Demonstra | te the compute_setting_values method.
'''
import cliapp
import urlparse
class ExampleApp(cliapp.Application):
'''A little fgrep-like tool.'''
def add_settings(self):
self.settings.string(['url'], 'a url')
self.settings.string(['protocol'], 'the protocol')
def compute_setting_values(self, settings):
if not self.settings['protocol']:
schema = urlparse.urlparse(self.settings['url'])[0]
self.settings['protocol'] = schema
def process_args(self, args):
return
app = ExampleApp()
app.run()
|
ktan2020/legacy-automation | win/Lib/unittest/case.py | Python | mit | 43,508 | 0.001655 | """Test case implementation"""
import collections
import sys
import functools
import difflib
import pprint
import re
import warnings
from . import result
from .util import (
strclass, safe_repr, unorderable_list_difference,
_count_diff_all_purpose, _count_diff_hashable
)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
pass
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
super(_ExpectedFailure, self).__init__()
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
pass
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
| __init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by | parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = False
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# If a string is longer than _diffThreshold, use normal comparison instead
# of difflib. See #11763.
_diffThreshold = 2**16
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, self.assertDictEqual)
self.addTypeEqualityFunc(list, self.assertListEqual)
self.addTypeEqualityFunc(tuple, self.assertTupleEqual)
self.addTypeEqualityFunc(set, self.assertSetEqual)
self.addTypeEqualityFunc(frozenset, self.assertSetEqual)
self.addTypeEqualityFunc(unicode, self.assertMultiLineEqual)
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture befor |
ujdhesa/unisubs | libs/vidscraper/sites/wistia.py | Python | agpl-3.0 | 4,955 | 0.005651 | # Miro - an RSS based video player application
# Copyright 2009 - Participatory Culture Foundation
#
# This file is part of vidscraper.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import re
import urllib
from lxml import builder
from lxml import etree
from lxml.html import builder as E
from lxml.html import tostring
import oauth2
import simplejson
from vidscraper.decorators import provide_shortmem, parse_url, returns_unicode
from vidscraper import util
from vidscraper.errors import Error
from django.conf import settings
class WistiaError(Error):
pass
WISTIA_OEMBED_API_URL = 'http://fast.wistia.com/oembed?embedType=seo&url='
#'http://fast.wistia.com/oembed?url=http://home.wistia.com/medias/'
EMaker = builder.ElementMaker()
EMBED = EMaker.embed
EMBED_WIDTH = 425
EMBED_HEIGHT = 344
def get_shortmem(url):
shortmem = {}
video_id = WISTIA_REGEX.match(url).groupdict()[ | 'video_id']
apiurl = '%s?%s' % (WISTIA_OEMBED_API_URL, urllib.quote(url))
finalexcept = None
backoff = util.random_exponential_backoff(2)
for i in range(3):
| try:
reponse = urllib.urlopen(apiurl)
api_raw_data = response.read()
api_data = simplejson.loads(api_raw_data)
except Exception as e:
finalexcept = e
continue
else:
shortmem['oembed'] = api_data
break
backoff.next()
if 'oembed' in shortmem:
return shortmem
errmsg = u'Wistia API error : '
if finalexcept is not None:
"""if isinstance(finalexcept, urllib.HTTPError):
errmsg += finalexcept.code + " - " + HTTPResponseMessages[ finalexcept.code ][0]
elif isinstance(finalexcept, urllib.URLError):
errmsg += "Could not connect - " + finalexcept.reason
else:"""
errmsg += str(finalexcept)
else:
errmsg += u' Unrecognized error. Sorry about that, chief.'
return None
def parse_api(scraper_func, shortmem=None):
def new_scraper_func(url, shortmem={}, *args, **kwargs):
if not shortmem:
shortmem = get_shortmem(url)
return scraper_func(url, shortmem=shortmem, *args, **kwargs)
return new_scraper_func
@parse_api
@returns_unicode
def scrape_title(url, shortmem={}):
try:
return shortmem['oembed']['title'] or u''
except KeyError:
return u''
@parse_api
@returns_unicode
def scrape_description(url, shortmem={}):
try:
description = shortmem['oembed']['title'] # No desc provided in oembed. Use title.
except KeyError:
description = ''
return util.clean_description_html(description)
@returns_unicode
def get_embed(url, shortmem={}, width=EMBED_WIDTH, height=EMBED_HEIGHT):
return shortmem['oembed']['html']
@parse_api
@returns_unicode
def get_thumbnail_url(url, shortmem={}):
return shortmem['oembed']['thumbnail_url']
@parse_api
@returns_unicode
def get_user(url, shortmem={}):
return shortmem['oembed']['provider_name']
@parse_api
@returns_unicode
def get_user_url(url, shortmem={}):
return shortmem['oembed']['provider_url']
@parse_api
@returns_unicode
def get_duration(url, shortmem={}):
return shortmem['oembed']['duration']
WISTIA_REGEX = re.compile(r'https?://(.+)?(wistia\.com|wi\.st|wistia\.net)/(medias|embed/iframe)/(?P<video_id>\w+)')
SUITE = {
'regex': WISTIA_REGEX,
'funcs': {
'title': scrape_title,
'description': scrape_description,
'embed': get_embed,
'thumbnail_url': get_thumbnail_url,
'user': get_user,
'user_url': get_user_url,
'duration': get_duration
},
'order': ['title', 'description', 'file_url', 'embed']}
|
cl0ne/vital-records-registry | registry/vital_records/tests.py | Python | gpl-3.0 | 7,137 | 0.002522 | from contextlib import contextmanager
from django.core.exceptions import ValidationError
from django.test import TestCase
from django.utils import timezone
from .models import Person, Residence, RegistryUser, Registrar
from .models import BirthNote, BirthPlace, ApplicantInfo, BirthNoteLaw, BirthEvidence
class TestCaseExt:
"""TestCase extension methods"""
@contextmanager
def assertDoesntRaise(self, cls):
"""Check that code section doesn't throw an exception"""
try:
yield
except cls:
self.fail(cls.__name__ + ' has been raised')
class BirthNoteIntegrationTests(TestCase):
def test_create_hierarchy(self):
home = Residence.objects.create(
postal_code=1024, country='UA', region='Kyivska oblast', district='Obukhivskiy',
city='Obukhiv', street='Long', house=13, room=37
)
self.assertIsNotNone(home)
father_birth_place = BirthPlace.objects.create(
country='UA', regi | on='Volynska oblast', district='Old republic', city='Urbanstadt'
)
self.assertIsNotNone(father_birth_place)
father = Person.objects.create(
first_name='Ivan', last_name='Ivanov', patronymic='Ivanovich',
gender=Person.MALE, residence=home, birth_place=father_birth_place,
date_of_birth=timezone.datetime(year=1 | 970, month=3, day=5).date(),
family_status=Person.MARRIED, military_service=False
)
self.assertIsNotNone(father)
mother_birth_place = BirthPlace.objects.create(
country='UA', region='Chernihivska oblast', district='Rare gems', city='Golden rain'
)
self.assertIsNotNone(mother_birth_place)
mother = Person.objects.create(
first_name='Anna', last_name='Ivanova', patronymic='Mikhailivna',
gender=Person.FEMALE, residence=home, birth_place=mother_birth_place,
date_of_birth=timezone.datetime(year=1975, month=7, day=21).date(),
family_status=Person.MARRIED, military_service=True
)
self.assertIsNotNone(mother)
applicant_residence = Residence.objects.create(
postal_code=256, country='UA', region='Kyivska oblast', district='Solomyanskiy',
city='Kyiv', street='Vandy Vasilevskoi', house=50, room=2
)
self.assertIsNotNone(applicant_residence)
applicant = ApplicantInfo.objects.create(
first_name='John', last_name='Doe', patronymic='Henry', residence=applicant_residence
)
self.assertIsNotNone(applicant)
law = BirthNoteLaw.objects.create(law='Law #133 for some birth-related case')
self.assertIsNotNone(law)
child_birth_place = BirthPlace.objects.create(
country='UA', region='Kyivska oblast', district='Obukhivskiy', city='Obukhiv'
)
self.assertIsNotNone(child_birth_place)
birth_evidences = (
BirthEvidence.objects.create(
title='Important evidence', number=4352,
issue_date=timezone.now().date(), issuer='Issuer info goes here'
),
BirthEvidence.objects.create(
title='Other evidence', number=845,
issue_date=timezone.now().date() - timezone.timedelta(days=1), issuer='Some issuer'
)
)
self.assertTrue(all(birth_evidences))
registrar_residence = Residence.objects.create(
postal_code=1024, country='UA', region='Kyivska oblast', district='Obukhivskiy',
city='Obukhiv', street='Long', house=13, room=37
)
self.assertIsNotNone(registrar_residence)
registrar = Registrar.objects.create(name='Registrar name', residence=registrar_residence)
self.assertIsNotNone(registrar)
note_author = RegistryUser.objects.create_user('test', 'local@email.here', 'test-user')
self.assertIsNotNone(note_author)
note = BirthNote.objects.create(
compose_date=timezone.now().date(), created_by=note_author,
registrar=registrar, official_info='Very important person', language='Ukrainian',
note_number=65535, deadline_passed=False, applicant=applicant, law=law, stillborn=False,
children_born_count=2, child_number=1,
birth_date=timezone.now().date() - timezone.timedelta(days=1),
birth_place=child_birth_place,
child_gender=Person.FEMALE, child_name='jane', child_last_name='Ivanova',
child_patronymic='Ivanovna',
father_info_reason='Nothing'
)
note.parents.add(father, mother)
note.birth_evidences.add(*birth_evidences)
self.assertIsNotNone(note)
self.assertEqual(note, BirthNote.objects.first())
class BirthNoteValidationTests(TestCase, TestCaseExt):
fixtures = ['common', 'birth_note']
def test_child_number_not_less_child_count(self):
n = BirthNote.objects.first()
n.children_born_count = 2
n.child_number = 3
with self.assertRaises(ValidationError):
n.full_clean()
n.child_number = 0
with self.assertRaises(ValidationError):
n.full_clean()
n.child_number = 2
with self.assertDoesntRaise(ValidationError):
n.full_clean()
def test_child_birth_date_not_future(self):
n = BirthNote.objects.first()
n.compose_date = timezone.now().date()
n.birth_date = timezone.now().date()
with self.assertDoesntRaise(ValidationError):
n.full_clean()
n.birth_date = timezone.now().date() + timezone.timedelta(days=1)
with self.assertRaises(ValidationError):
n.full_clean()
n.birth_date = timezone.now().date() - timezone.timedelta(days=1)
with self.assertDoesntRaise(ValidationError):
n.full_clean()
n.compose_date = timezone.now().date() - timezone.timedelta(days=1)
with self.assertDoesntRaise(ValidationError):
n.full_clean()
n.birth_date = timezone.now().date()
with self.assertRaises(ValidationError):
n.full_clean()
def test_children_count_greater_than_zero(self):
n = BirthNote.objects.first()
n.children_born_count = -1
with self.assertRaises(ValidationError):
n.full_clean()
n.children_born_count = 0
with self.assertRaises(ValidationError):
n.full_clean()
n.children_born_count = 1
with self.assertDoesntRaise(ValidationError):
n.full_clean()
def test_compose_date_cant_be_future(self):
n = BirthNote.objects.first()
n.compose_date = timezone.now().date()
with self.assertDoesntRaise(ValidationError):
n.full_clean()
n.compose_date = timezone.now().date() + timezone.timedelta(days=1)
with self.assertRaises(ValidationError):
n.full_clean()
n.compose_date = timezone.now().date() - timezone.timedelta(days=1)
with self.assertDoesntRaise(ValidationError):
n.full_clean()
|
Scandie/openprocurement.tender.esco | openprocurement/tender/esco/views/award_complaint_document.py | Python | apache-2.0 | 769 | 0.005202 | # -*- coding: utf-8 -*-
from openprocurement.tender.core.utils import optendersresource
from openprocurement.tender.openeu.views.award_complaint_document import TenderEU | AwardComplaintDocumentResource
@optendersresource(name='esco.EU:Tender Award Complaint Documents',
| collection_path='/tenders/{tender_id}/awards/{award_id}/complaints/{complaint_id}/documents',
path='/tenders/{tender_id}/awards/{award_id}/complaints/{complaint_id}/documents/{document_id}',
procurementMethodType='esco.EU',
description="Tender award complaint documents")
class TenderESCOEUAwardComplaintDocumentResource(TenderEUAwardComplaintDocumentResource):
""" Tender ESCO EU Award Complaint Document Resource """
|
google-code-export/evennia | src/help/manager.py | Python | bsd-3-clause | 3,168 | 0.000316 | """
Custom manager for HelpEntry objects.
"""
from django.db import models
from src.utils import logger, utils
__all__ = ("HelpEntryManager",)
class HelpEntryManager(models.Manager):
"""
This HelpEntryManager implements methods for searching
and manipulating HelpEntries directly from the database.
These methods will a | ll return database objects
(or QuerySets) directly.
Evennia-specific:
find_topicmatch
find_apropos
find_topicsuggestions
find_topics_with_category
all_to_category
search_help (equivalent to ev.search_helpentry)
"""
def find_topicmatch(self, topicstr, exact=False):
"""
Searches for matching topics based on player's input.
"""
dbref = uti | ls.dbref(topicstr)
if dbref:
return self.filter(id=dbref)
topics = self.filter(db_key__iexact=topicstr)
if not topics and not exact:
topics = self.filter(db_key__istartswith=topicstr)
if not topics:
topics = self.filter(db_key__icontains=topicstr)
return topics
def find_apropos(self, topicstr):
"""
Do a very loose search, returning all help entries containing
the search criterion in their titles.
"""
return self.filter(db_key__icontains=topicstr)
def find_topicsuggestions(self, topicstr):
"""
Do a fuzzy match, preferably within the category of the
current topic.
"""
return self.filter(db_key__icontains=topicstr).exclude(db_key__iexact=topicstr)
def find_topics_with_category(self, help_category):
"""
Search topics having a particular category
"""
return self.filter(db_help_category__iexact=help_category)
def get_all_topics(self):
"""
Return all topics.
"""
return self.all()
def get_all_categories(self, pobject):
"""
Return all defined category names with at least one
topic in them.
"""
return list(set(topic.help_category for topic in self.all()))
def all_to_category(self, default_category):
"""
Shifts all help entries in database to default_category.
This action cannot be reverted. It is used primarily by
the engine when importing a default help database, making
sure this ends up in one easily separated category.
"""
topics = self.all()
for topic in topics:
topic.help_category = default_category
topic.save()
string = "Help database moved to category %s" % default_category
logger.log_infomsg(string)
def search_help(self, ostring, help_category=None):
"""
Retrieve a search entry object.
ostring - the help topic to look for
category - limit the search to a particular help topic
"""
ostring = ostring.strip().lower()
if help_category:
return self.filter(db_key__iexact=ostring,
db_help_category__iexact=help_category)
else:
return self.filter(db_key__iexact=ostring)
|
repleo/bounca | certificate_engine/apps.py | Python | apache-2.0 | 126 | 0 | """App name"""
from django | .apps import AppConfig
class CertificateEngineConfig(AppConfi | g):
name = "certificate_engine"
|
rocky/python-uncompyle6 | test/simple_source/stmts/00_pass.py | Python | gpl-3.0 | 38 | 0 | # | Tests:
# assi | gn ::= expr store
pass
|
superdesk/superdesk-aap | server/aap/macros/abs_indicators.py | Python | agpl-3.0 | 6,529 | 0.00337 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import requests
import json
import re
from flask import current_app as app
import time
import logging
from flask import render_template_string
from superdesk import get_resource_service
from superdesk.utils import config
cpi_url = 'CPI/2.50.999901.20.Q'
cpi_token = '__CPI__'
unemployment_url = 'LF/0.14.3.1599.20.M'
unemployment_token = '__UNEMPLOYMENT__'
lending_url = 'HF/3.0.99.20.140_1.M'
lending_token = '__LENDING__'
retail_url = 'RT/0.1.20.20.M'
retail_trade_token = '__RETAIL_TRADE__'
bop_url = 'BOP/1.100.20.Q'
bop_token = '__BOP__'
token_map = {cpi_token: cpi_url, unemployment_token: unemployment_url, lending_token: lending_url,
retail_trade_token: retail_url}
logger = logging.getLogger(__name__)
def expand_token(token, item, template_map):
url_prefix = app.config.get('ABS_WEB_SERVICE_URL')
abs_web_service_token = app.config.get('ABS_WEB_SERVICE_TOKEN')
url_suffix = '/all?dimensionAtObservation=allDimensions&detail=DataOnly'
# convert the token in the item to one that is jinja compliant
jinja_token = re.sub('\\.|/|\\+|#', '_', token)
item['body_html'] = item.get('body_html').replace(token, jinja_token)
item['headline'] = item.get('headline').replace(token, jinja_token)
item['abstract'] = item.get('abstract').replace(token, jinja_token)
# Get the token for the primary value
temp_token = re.sub('\\.|/|\\+', '_', token)
value_token = temp_token.split('#')[0] + '__' if '#' in token else temp_token
# If we have handled the value we don't need to do it again
if template_map.get(value_token):
return
data_identifier = token_map.get(token).split('#')[0] if '#' in token_map.get(token) else token_map.get(token)
logger.info('ABS request : {}'.format(url_prefix + data_identifier + url_suffix))
r = requests.get(url_prefix + data_identifier + url_suffix, headers={'x-api-key': abs_web_service_token},
verify=False)
if r.status_code == 200:
logger.info('Response Text [{}]'.format(r.text))
try:
response = json.loads(r.text)
except:
logger.error('Exception parsing json for {}'.format(data_identifier))
return
# get the number of dimensions in the dataset
dimensions = len(response.get('structure').get('dimensions').get('observation'))
# Assume that the time period is the last dimension get it's name
last_period_name = response.get('structure').get('dimensions').get('observation')[-1].get('values')[-1].get(
'name')
# get the index into the dimensions of the last time period
last_period_index = len(response.get('structure').get('dimensions').get('observation')[-1].get('values')) - 1
# construct the dimension key of the last data item
dimension_key = '0:' * (dimensions - 1) + str(last_period_index)
raw_value = response['dataSets'][0]['observations'][dimension_key][0]
if raw_value is None:
value = 'N/A'
elif isinstance(raw_value, float):
value = str(round(raw_value, 2))
else:
value = str(response['dataSets'][0]['observations'][dimension_key][0])
template_map[value_token] = value
# the token for the period
template_map[value_token[:-1] + 'PERIOD__'] = last_period_name
# calculate the change from the preceding value
last_p | eriod_index -= 1
if last_period_index >= 0:
# construct the dimension key of the last data item
dimension_key = '0:' * (dimensions - 1) + str(last_period_index)
prev_value = response['dataSets'][0]['observations'][dimension_key][0]
if raw_value and prev_value:
if prev_value > raw_value:
adjectiv | e = 'fell'
elif prev_value < raw_value:
adjective = 'rose'
else:
adjective = 'held steady'
else:
adjective = 'N/A'
template_map[value_token[:-1] + 'ADJECTIVE__'] = adjective
if prev_value is None:
value = 'N/A'
elif isinstance(prev_value, float):
value = str(round(prev_value, 2))
else:
value = str(response['dataSets'][0]['observations'][dimension_key][0])
template_map[value_token[:-1] + 'PREV__'] = value
prev_period_name = response.get('structure').get('dimensions').get('observation')[-1].get('values')[-2].get(
'name')
template_map[value_token[:-1] + 'PREVPERIOD__'] = prev_period_name
else:
logger.info('ABS API returned {} for {}'.format(r.status_code, data_identifier))
time.sleep(.120)
def abs_expand(item, **kwargs):
template_map = {}
# find the primary tokens, delimitered double underscores
tokens = re.findall('__(.*?)__', item['body_html'])
for t in tokens:
if '__' + t + '__' not in token_map:
token_map['__' + t + '__'] = t
for e in token_map:
if e in item['body_html'] or e in item['headline'] or e in item['abstract']:
expand_token(e, item, template_map)
try:
item['body_html'] = render_template_string(item.get('body_html', ''), **template_map)
item['abstract'] = render_template_string(item.get('abstract', ''), **template_map)
item['headline'] = render_template_string(item.get('headline', ''), **template_map)
except Exception as ex:
logger.warning(ex)
# If the macro is being executed by a stage macro then update the item directly
if 'desk' in kwargs and 'stage' in kwargs:
update = {'body_html': item.get('body_html', ''),
'abstract': item.get('abstract', ''),
'headline': item.get('headline', '')}
get_resource_service('archive').system_update(item[config.ID_FIELD], update, item)
return get_resource_service('archive').find_one(req=None, _id=item[config.ID_FIELD])
return item
name = 'Expand ABS indicator tokens into the story'
label = 'ABS indicator expand'
callback = abs_expand
access_type = 'frontend'
action_type = 'direct'
|
emvarun/followup-and-location | Loop_SkyPatch.py | Python | gpl-3.0 | 681 | 0.030837 | import numpy as np
from subprocess import check_output
import os
import subprocess
from subprocess import Popen, PIPE
from Sky_Patch import Coverage
from params import *
#LIST OF LOCATIONS CONSIDERED
for i in range (0, len(obsName)):
tCoverage = []
for dirpath, dirname, files in os.walk(folddir):
for filename | in files:
path = os.path.join(dirpath, filename)
name = filename.strip().split('.')
if(name[-1] == 'gz' ):
print obsName[i], path
nlist = Coverage(str(path), obsName[i], Texp, NsqDeg, stepsize)
tCoverage.append(nlist)
f=open( str(outfile) + '-' + str(obsName[i]) + '.txt','w')
for item in tCoverage:
f.write(str(item) + | '\n')
f.close()
|
bridgetnoelleee/project2 | helpers/functions.py | Python | bsd-3-clause | 2,064 | 0.003391 | from flask import _app_ctx_stack
from flask import current_app as app
from flask import Flask, request, session, url_for, redirect, \
render_template, abort, g, flash, _app_ctx_stack
from sqlite3 import dbapi2 as sqlite3
from hashlib import md5
from datetime import datetime
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
top = _app_ctx_stack.top
if not hasattr(top, 'sqlite_db'):
top.sqlite_db = sqlite3.connect(app.config['DATABASE'])
top.sqlite_db.row_factory = sqlite3.Row
return top.sqlite_db
def close_database(exception):
"""Closes the database again at the | end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'sqlite_ | db'):
top.sqlite_db.close()
def init_db():
"""Initializes the database."""
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
def initdb_command():
"""Creates the database tables."""
init_db()
print('Initialized the database.')
def query_db(query, args=(), one=False):
"""Queries the database and returns a list of dictionaries."""
cur = get_db().execute(query, args)
rv = cur.fetchall()
return (rv[0] if rv else None) if one else rv
def get_user_id(username):
"""Convenience method to look up the id for a username."""
rv = query_db('select user_id from user where username = ?',
[username], one=True)
return rv[0] if rv else None
def format_datetime(timestamp):
"""Format a timestamp for display."""
return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d @ %H:%M')
def gravatar_url(email, size=80):
"""Return the gravatar image for the given email address."""
return 'http://www.gravatar.com/avatar/%s?d=identicon&s=%d' % \
(md5(email.strip().lower().encode('utf-8')).hexdigest(), size)
def url_for(string, data=None):
if data is not None:
return (string) % data
else:
return string
|
ampron/nflstat | simulator_NFL.py | Python | gpl-3.0 | 8,159 | 0.011157 | #!/usr/bin/python
# -*- encoding: UTF-8 -*-
'''NFL Simulator
List of classes: -none-
List of functions:
main
'''
# built-in modules
import random
import math
from pprint import pprint
# third-party modules
import numpy as np
from scipy.stats import skew, kurtosis
from scipy.special import erf, erfinv
import matplotlib as mpl
mpl.use('TkAgg')
from matplotlib import pyplot as plt
# custom modules
import rank_NFL
from NFL_class import NFL, Game, Team
import collectors_NFL as coll
#===============================================================================
def main():
tms = NFL.team_names
tms = [Team(nm) for nm in tms]
tm_lkup = {}
for t in tms:
tm_lkup[str(t)] = t
# Load in the season schedule
# TODO: fix downloading function to use new objects
all_gms = coll.download_schedule_espn()
for g in all_gms:
g.hm = tm_lkup[str(g.hm)]
g.vs = tm_lkup[str(g.vs)]
#all_gms = [Game(gm) for gm in all_gms]
all_gms.sort(key=lambda d: d.wk)
print 'len(all_gms) = ' + str(len(all_gms))
i_sn = 0
win_dist = []
sig = 16.0
real_betas = []
calc_betas = []
while True:
# Set up the team strength distribution
#tm_strength = rand_tm_strengths(tms)
for t in tms:
t.set_rand_strength()
# Simulate ONE season
sim_gms, sim_wins = simulate_season(all_gms)
# END for
# Determine playoff seedings
# TODO: make function that will take season results and return a playoff
# bracket object
# Run ranking algorithm on simulated season results
#rank_ok = True
#for tm in sim_wins:
# if sim_wins[tm] == 0 or sim_wins[tm] == 16:
# rank_ok = False
# i_sn -= 1
## END for
#if rank_ok:
# tm_vals = rank_NFL.bt_rank(sim_gms)
# #X = []
# #Y = []
# for tm in tm_vals:
# real_betas.append(np.log(tm_strength[tm]))
# calc_betas.append(np.log(tm_vals[tm]))
# #plt.loglog(X,Y,'.')
# #plt.show()
# #plt.close()
## END if
win_dist.extend(sim_wins.values())
sig_ = np.std(win_dist)
i_sn += 1
if i_sn >= 1000: # or np.abs(sig - sig_) < 0.0005:
print 'season ' + str(i_sn)
print 'mean = {0:0.2f} wins'.format(np.mean(win_dist))
print 'sigma = {0:0.5f} ± {1:0.2e}'.format(
sig_, np.abs(sig - sig_)
)
print ' sigma/bino = {0:0.3f}'.format(sig_/2.0)
print 'skewness = {0:0.5e}'.format(skew(win_dist))
krt = kurtosis(win_dist)
print 'kurtosis = {0:0.5e}'.format(krt)
print ' kurt/bino = {0:+0.3f}'.format(krt-(-1/8.0)/(-1/8.0))
plt.hist(win_dist, bins=17)
plt.show()
plt.close()
break
else:
sig = sig_
# END if
# END while
#apparent_hm_edge = float(apparent_hm_edge)/len(sim_gms)
#print "Home edge for this season: {0:0.3f}".format(apparent_hm_edge)
# Save simulated results in a csv/html file
#f_txt = 'Date,Week,Visitor,Score,Home,Score' + r'\\n'
#for gm in sim_gms:
# f_txt += '{0[date]},{0[wk]},{0[vs]},{0[vssc]},{0[hm]},{0[hmsc]}'.format(gm)
# f_txt += r'\\n'
# END for
#f = open('../../ampPy/example_chimera_table.csv.html')
#template = f.read()
#f.close()
#f_txt = re.sub(r"(?<=var csv_data = ')[^']*?(?=')", f_txt, template)
#f = open('NFL_simulated_season_results.csv.html', 'w')
#f.write(f_txt)
#f.close()
if len(calc_betas) > 0:
f = open('rankings of simulated seasons.csv', 'w')
f.write('real-beta,calc-beta\n')
for i in range(len(real_betas)):
f.write('{0:0.5f},{1:0.5f}\n'.format(real_betas[i], calc_betas[i]))
f.close()
plt.plot(real_betas, calc_betas, '.')
plt.show()
plt.close()
# Print the divisional standings
tms.sort(key=lambda t: sim_wins[t], reverse=True)
tms.sort(key=lambda t: t.group)
teams_ranked = sorted(tms, key=lambda t: t.v)
div = ''
for tm in tms:
if div != tm.group:
div = tm.group
print ''
print div
print 22*'-'
# END if
try:
print u'{0:22s}: {1:2d}-{2:2d} ({4:2d}, π={3:0.3f})'.format(
str(tm), sim_wins[tm], 16-sim_wins[tm],
tm.v, 32-teams_ranked.index(tm)
)
except:
print u'{0:22s}: {1:2d}-{2:2d} (π={3:0.3f})'.format(
str(tm), sim_wins[tm], 16-sim_wins[tm], tm.v
)
# END try
# END for
print ''
# Print playoff seedings
playoff_pool = [t for t in sorted(tms, key=lambda t: sim_wins[t])]
pprint(playoff_pool[:12])
# Get division winners
#for div in NFL.divisions:
# print '{} champ'
# for t in tms:
# champ = None
# if t.group == div:
# if champ is None:
# champ = t
# else if
# END main
#===============================================================================
def simulate_season(all_gms):
sim_gms = []
sim_wins = {}
for gm in all_gms:
if type(gm) is dict:
gm = Game(gm)
if gm.hm not in sim_wins:
sim_wins[gm.hm] = 0
if gm.vs not in sim_wins:
sim_wins[gm.vs] = 0
x = random.random()
p_hm = gm.predict()
# Translate win probability into expected point difference (pnts)
pnts = 11.87*math.sqrt(2.0)*erfinv(2.0*p_hm - 1.0)
pnts = int( np.random.normal(pnts, 11.87, 1)[0] )
# Do not allow ties, if simulated point diff is 0 then flip a coin
if pnts == 0: pnts = (-1)**random.randint(0,1)
# Record results
new_gm = {}
if pnts > 0:
new_gm['vssc'] = 0
new_gm['hmsc'] = pnts
sim_wins[gm.hm] += 1
else:
new_gm['vssc'] = -pnts
new_gm['hmsc'] = 0
sim_wins[gm.vs] += 1
# END if
new_gm['wk'] = gm.wk
new_gm['date'] = gm.date
new_gm['hm'] = gm.hm
new_gm['vs'] = gm.vs
new_gm['hmto'] = 0
new_gm['vsto'] = 0
new_gm = Game(new_gm)
sim_gms.append(new_gm)
# END for
return sim_gms, sim_wins
# END simulate_season
#===============================================================================
def rand_tm_strengths(tms):
# Set up the team strength distribution
tm_strength = {}
i = 0.0
random.shuffle(tms)
# original
#all_betas = np.random.normal(loc=0.0, scale=0.495904447431, size=32)
# adjusted
all_betas = np.random.normal(loc=0.0, scale=0.75, size=32)
all_betas = sorted(list(all_betas))
for t in tms:
beta = all_betas.pop(0)
tm_strength[t] = math.exp(beta)
t.v = math.exp(beta)
i += 1
# END for
re | turn tm_strength
# END rand_tm_stengths
#===============================================================================
def predictGm(v_hm, v_vs, pnt_adj=0.0):
'''
(P[hm win], finalScore) = predictGm(v_hm, v_vs, ('H | '|'N'), ('M'|'W'))
'''
# Standard deviation in score
sig = 11.87
sigSqrt2 = sig*math.sqrt(2.0)
p_hm = v_hm/(v_hm + v_vs)
# final score difference at a neutral location
# score difference = home score - visitor score
pnts = sigSqrt2*erfinv(2.0*p_hm - 1.0)
if pnt_adj != 0:
# final score with home edge
pnts += pnt_adj
p_hm = 1.0 - ( 0.5*(1.0 + erf( -pnts / sigSqrt2 )) )
return (p_hm, pnts)
# END predictGm
#===============================================================================
def logit(p): return np.log(p/(1.0-p))
def invlogit(x): return 1.0 / (1.0 + np.exp(-x))
#===============================================================================
if __name__ == '__main__':
main()
# END if |
alexliyu/CDMSYSTEM | pyroute2/netlink/rtnl/ifinfmsg.py | Python | mit | 36,186 | 0.000028 | import os
import time
import json
import struct
import logging
import platform
import subprocess
from fcntl import ioctl
from pyroute2.common import map_namespace
from pyroute2.common import ANCIENT
# from pyroute2.netlink import NLMSG_ERROR
from pyroute2.netlink import nla
from pyroute2.netlink import nlmsg
from pyroute2.netlink import nlmsg_atoms
from pyroute2.netlink.rtnl.iw_event import iw_event
# it's simpler to double constants here, than to change all the
# module layout; but it is a subject of the future refactoring
RTM_NEWLINK = 16
RTM_DELLINK = 17
#
_ANCIENT_BARRIER = 0.3
_BONDING_MASTERS = '/sys/class/net/bonding_masters'
_BONDING_SLAVES = '/sys/class/net/%s/bonding/slaves'
_BRIDGE_MASTER = '/sys/class/net/%s/brport/bridge/ifindex'
_BONDING_MASTER = '/sys/class/net/%s/master/ifindex'
IFNAMSIZ = 16
TUNDEV = '/dev/net/tun'
arch = platform.machine()
if arch == 'x86_64':
TUNSETIFF = 0x400454ca
TUNSETPERSIST = 0x400454cb
TUNSETOWNER = 0x400454cc
TUNSETGROUP = 0x400454ce
elif arch == 'ppc64':
TUNSETIFF = 0x800454ca
TUNSETPERSIST = 0x800454cb
TUNSETOWNER = 0x800454cc
TUNSETGROUP = 0x800454ce
else:
TUNSETIFF = None
##
#
# tuntap flags
#
IFT_TUN = 0x0001
IFT_TAP = 0x0002
IFT_NO_PI = 0x1000
IFT_ONE_QUEUE = 0x2000
IFT_VNET_HDR = 0x4000
IFT_TUN_EXCL = 0x8000
IFT_MULTI_QUEUE = 0x0100
IFT_ATTACH_QUEUE = 0x0200
IFT_DETACH_QUEUE = 0x0400
# read-only
IFT_PERSIST = 0x0800
IFT_NOFILTER = 0x1000
##
#
# normal flags
#
IFF_UP = 0x1 # interface is up
IFF_BROADCAST = 0x2 # broadcast address valid
IFF_DEBUG = 0x4 # turn on debugging
IFF_LOOPBACK = 0x8 # is a loopback net
IFF_POINTOPOINT = 0x10 # interface is has p-p link
IFF_NOTRAILERS = 0x20 # avoid use of trailers
IFF_RUNNING = 0x40 # interface RFC2863 OPER_UP
IFF_NOARP = 0x80 # no ARP protocol
IFF_PROMISC = 0x100 # receive all packets
IFF_ALLMULTI = 0x200 # receive all multicast packets
IFF_MASTER = 0x400 # master of a load balancer
IFF_SLAVE = 0x800 # slave of a load balancer
IFF_MULTICAST = 0x1000 # Supports multicast
IFF_PORTSEL = 0x2000 # can set media type
IFF_AUTOMEDIA = 0x4000 # auto media select active
IFF_DYNAMIC = 0x8000 # dialup device with changing addresses
IFF_LOWER_UP = 0x10000 # driver signals L1 up
IFF_DORMANT = 0x20000 # driver signals dormant
IFF_ECHO = 0x40000 # echo sent packets
(IFF_NAMES, IFF_VALUES) = map_namespace('IFF', globals())
IFF_MASK = IFF_UP |\
IFF_DEBUG |\
IFF_NOTRAILERS |\
IFF_NOARP |\
IFF_PROMISC |\
IFF_ALLMULTI
IFF_VOLATILE = IFF_LOOPBACK |\
IFF_POINTOPOINT |\
IFF_BROADCAST |\
IFF_ECHO |\
IFF_MASTER |\
IFF_SLAVE |\
IFF_RUNNING |\
IFF_LOWER_UP |\
IFF_DORMANT
states = ('UNKNOWN',
'NOTPRESENT',
'DOWN',
'LOWERLAYERDOWN',
'TESTING',
'DORMANT',
'UP')
state_by_name = dict(((i[1], i[0]) for i in enumerate(states)))
state_by_code = dict(enumerate(states))
stats_names = ('rx_packets',
'tx_packets' | ,
'rx_bytes',
'tx_bytes',
'rx_errors',
'tx_errors',
'rx_dropped',
'tx_dropped',
'multicast',
'collisions',
'rx_length_errors',
'rx_over_errors',
'rx_crc_e | rrors',
'rx_frame_errors',
'rx_fifo_errors',
'rx_missed_errors',
'tx_aborted_errors',
'tx_carrier_errors',
'tx_fifo_errors',
'tx_heartbeat_errors',
'tx_window_errors',
'rx_compressed',
'tx_compressed')
class ifinfbase(object):
'''
Network interface message
struct ifinfomsg {
unsigned char ifi_family; /* AF_UNSPEC */
unsigned short ifi_type; /* Device type */
int ifi_index; /* Interface index */
unsigned int ifi_flags; /* Device flags */
unsigned int ifi_change; /* change mask */
};
'''
prefix = 'IFLA_'
fields = (('family', 'B'),
('__align', 'B'),
('ifi_type', 'H'),
('index', 'i'),
('flags', 'I'),
('change', 'I'))
nla_map = (('IFLA_UNSPEC', 'none'),
('IFLA_ADDRESS', 'l2addr'),
('IFLA_BROADCAST', 'l2addr'),
('IFLA_IFNAME', 'asciiz'),
('IFLA_MTU', 'uint32'),
('IFLA_LINK', 'uint32'),
('IFLA_QDISC', 'asciiz'),
('IFLA_STATS', 'ifstats'),
('IFLA_COST', 'hex'),
('IFLA_PRIORITY', 'hex'),
('IFLA_MASTER', 'uint32'),
('IFLA_WIRELESS', 'wireless'),
('IFLA_PROTINFO', 'hex'),
('IFLA_TXQLEN', 'uint32'),
('IFLA_MAP', 'ifmap'),
('IFLA_WEIGHT', 'hex'),
('IFLA_OPERSTATE', 'state'),
('IFLA_LINKMODE', 'uint8'),
('IFLA_LINKINFO', 'ifinfo'),
('IFLA_NET_NS_PID', 'uint32'),
('IFLA_IFALIAS', 'hex'),
('IFLA_NUM_VF', 'uint32'),
('IFLA_VFINFO_LIST', 'hex'),
('IFLA_STATS64', 'ifstats64'),
('IFLA_VF_PORTS', 'hex'),
('IFLA_PORT_SELF', 'hex'),
('IFLA_AF_SPEC', 'af_spec'),
('IFLA_GROUP', 'uint32'),
('IFLA_NET_NS_FD', 'netns_fd'),
('IFLA_EXT_MASK', 'hex'),
('IFLA_PROMISCUITY', 'uint32'),
('IFLA_NUM_TX_QUEUES', 'uint32'),
('IFLA_NUM_RX_QUEUES', 'uint32'),
('IFLA_CARRIER', 'uint8'),
('IFLA_PHYS_PORT_ID', 'hex'),
('IFLA_CARRIER_CHANGES', 'uint32'))
@staticmethod
def flags2names(flags, mask=0xffffffff):
ret = []
for flag in IFF_VALUES:
if (flag & mask & flags) == flag:
ret.append(IFF_VALUES[flag])
return ret
@staticmethod
def names2flags(flags):
ret = 0
mask = 0
for flag in flags:
if flag[0] == '!':
flag = flag[1:]
else:
ret |= IFF_NAMES[flag]
mask |= IFF_NAMES[flag]
return (ret, mask)
def encode(self):
# convert flags
if isinstance(self['flags'], (set, tuple, list)):
self['flags'], self['change'] = self.names2flags(self['flags'])
return super(ifinfbase, self).encode()
class netns_fd(nla):
fields = [('value', 'I')]
netns_run_dir = '/var/run/netns'
netns_fd = None
def encode(self):
self.close()
#
# There are two ways to specify netns
#
# 1. provide fd to an open file
# 2. provide a file name
#
# In the first case, the value is passed to the kernel
# as is. In the second case, the object opens appropriate
# file from `self.netns_run_dir` and closes it upon
# `__del__(self)`
if isinstance(self.value, int):
self['value'] = self.value
else:
self.netns_fd = os.open('%s/%s' % (self.netns_run_dir,
self.value), os.O_RDONLY)
self['value'] = self.netns_fd
nla.encode(self)
self.register_clean_cb(self.close)
def close(self):
if self.netns_fd is not None:
os.close(self.netns_fd)
class wireless(iw_event):
pass
class state(nla):
fields = (('value', 'B'), )
def encode(self):
self['value'] = state_by_name[self.value]
nla.encode(self)
def decode(self):
nla.decode(self)
self.value = state_by_code[self['value']]
class ifstats(nla):
fields = [(i, 'I') for i in stats_names]
class ifstats64(nla):
fields = [(i, 'Q') for i in stats_names]
class ifmap(nla):
fields = (('mem_start', 'Q'),
('mem_end', 'Q'),
|
Mxit/python-mxit | mxit/settings.py | Python | bsd-3-clause | 78 | 0 | AUTH_ENDPOINT = 'https://a | uth.mxit.com'
API_ENDPOINT = 'htt | ps://api.mxit.com'
|
frague59/wagtailpolls | wagtailpolls/models.py | Python | bsd-3-clause | 2,687 | 0.000372 | from __future__ import absolute_import, unicode_literals
from six import text_type
from django.db import models
from django.db.models.query import QuerySet
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from modelcluster.fields import ParentalKey
from modelcluster.models import ClusterableModel
from wagtail.wagtailadmin.edit_handlers import FieldPanel, InlinePanel
from wagtail.wagtailsearch import index
from wagtail.wagtailsearch.backends import get_search_backend
class PollQuerySet(QuerySet):
def search(self, query_string, fields=None, backend='default'):
"""
This runs a search query on all the pages in the QuerySet
"""
search_backend = get_search_backend(backend)
return search_backend.search(query_string, self)
@python_2_unicode_compatible
class Vote(models.Model):
question = ParentalKey('Question', related_name='votes')
ip = models.GenericIPAddressField()
time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.question.question
class Meta:
verbose_name = _('vote')
verbose_name_plural = _('votes')
@python_2_unicode_compatible
class Question(ClusterableModel, models.Model):
poll = ParentalKey('Poll', related_name='questions')
question = models.CharField(max_length=128, verbose_name=_('Question'))
def __str__(self):
return self.question
class Meta:
verbose_name = _('question')
verbose_name_plural = _('questions')
@pyt | hon_2_unicode_compatible
class Poll(ClusterableModel, models.Model, index.Indexed):
title = models.CharField(max_length=128, verbose_name=_('Title'))
date_created = models.DateTimeField(default=timezone.now)
class Meta:
verbose_name = _('poll')
verbose_name_plural = _('polls')
panels = [
Fi | eldPanel('title'),
InlinePanel('questions', label=_('Questions'), min_num=1)
]
search_fields = (
index.SearchField('title', partial_match=True, boost=5),
index.SearchField('id', boost=10),
)
objects = PollQuerySet.as_manager()
def get_nice_url(self):
return slugify(text_type(self))
def get_template(self, request):
try:
return self.template
except AttributeError:
return '{0}/{1}.html'.format(self._meta.app_label, self._meta.model_name)
def form(self):
# Stops circular import
from .forms import VoteForm
return VoteForm(self)
def __str__(self):
return self.title
|
lucab/security_monkey | security_monkey/__init__.py | Python | apache-2.0 | 5,751 | 0.008346 | # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Patrick Kelley <patrick@netflix.com>
"""
### FLASK ###
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_envvar("SECURITY_MONKEY_SETTINGS")
db = SQLAlchemy(app)
# For ELB and/or Eureka
@app.route('/healthcheck')
def healthcheck():
return 'ok'
### LOGGING ###
import logging
from logging import Formatter
from logging.handlers import RotatingFileHandler
from logging import StreamHandler
handler = RotatingFileHandler(app.config.get('LOG_FILE'), maxBytes=10000000, backupCount=100)
handler.setFormatter(
Formatter('%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]')
)
handler.setLevel(app.config.get('LOG_LEVEL'))
app.logger.setLevel(app.config.get('LOG_LEVEL'))
app.logger.addHandler(handler)
app.logger.addHandler(StreamHandler())
### Flask-Login ###
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
from security_monkey.datastore import User, Role
@login_manager.user_loader
def load_user(email):
"""
For Flask-Login, returns the user object given the userid.
:return: security_monkey.datastore.User object
"""
app.logger.info("Inside load_user!")
user = User.query.filter(User.email == email).first()
if not user:
user = User(email=email)
db.session.add(user)
db.session.commit()
db.session.close()
user = User.query.filter(User.email == email).first()
return user
### Flask-Security ###
from flask.ext.security import Security, SQLAlchemyUserDatastore
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
### Flask Mail ###
from flask_mail import Mail
mail = Mail(app=app)
from security_monkey.common.utils.utils import send_email as common_send_email
@security.send_mail_task
def send_email(msg):
"""
Overrides the Flask-Security/Flask-Mail integration
to send emails out via boto and ses.
"""
common_send_email(subject=msg.subject, recipients=msg.recipients, html=msg.html)
### FLASK API ###
from flask.ext.restful import Api
api = Api(app)
from security_monkey.views.account import AccountGetPutDelete
from security_monkey.views.account import AccountPostList
api.add_resource(AccountGetPutDelete, '/api/1/accounts/<int:account_id>')
api.add_resource(AccountPostList, '/api/1/accounts')
from security_monkey.views.distinct import Distinct
api.add_resource(Distinct, '/api/1/distinct/<string:key_id>')
from security_monkey.views.ignore_list import IgnoreListGetPutDelete
from security_monkey.vie | ws.ignore_list import IgnorelistListPost
api.add_resource(IgnoreListGetPutDelete, '/api/1/ignorelistentries/<int:item_id>')
api.add_resource(IgnorelistListPost, '/api | /1/ignorelistentries')
from security_monkey.views.item import ItemList
from security_monkey.views.item import ItemGet
api.add_resource(ItemList, '/api/1/items')
api.add_resource(ItemGet, '/api/1/items/<int:item_id>')
from security_monkey.views.item_comment import ItemCommentPost
from security_monkey.views.item_comment import ItemCommentDelete
from security_monkey.views.item_comment import ItemCommentGet
api.add_resource(ItemCommentPost, '/api/1/items/<int:item_id>/comments')
api.add_resource(ItemCommentDelete, '/api/1/items/<int:item_id>/comments/<int:comment_id>')
api.add_resource(ItemCommentGet, '/api/1/items/<int:item_id>/comments/<int:comment_id>')
from security_monkey.views.item_issue import ItemAuditGet
from security_monkey.views.item_issue import ItemAuditList
api.add_resource(ItemAuditList, '/api/1/issues')
api.add_resource(ItemAuditGet, '/api/1/issues/<int:audit_id>')
from security_monkey.views.item_issue_justification import JustifyPostDelete
api.add_resource(JustifyPostDelete, '/api/1/issues/<int:audit_id>/justification')
from security_monkey.views.logout import Logout
api.add_resource(Logout, '/api/1/logout')
from security_monkey.views.revision import RevisionList
from security_monkey.views.revision import RevisionGet
api.add_resource(RevisionList, '/api/1/revisions')
api.add_resource(RevisionGet, '/api/1/revisions/<int:revision_id>')
from security_monkey.views.revision_comment import RevisionCommentPost
from security_monkey.views.revision_comment import RevisionCommentGet
from security_monkey.views.revision_comment import RevisionCommentDelete
api.add_resource(RevisionCommentPost, '/api/1/revisions/<int:revision_id>/comments')
api.add_resource(RevisionCommentGet, '/api/1/revisions/<int:revision_id>/comments/<int:comment_id>')
api.add_resource(RevisionCommentDelete, '/api/1/revisions/<int:revision_id>/comments/<int:comment_id>')
from security_monkey.views.user_settings import UserSettings
api.add_resource(UserSettings, '/api/1/settings')
from security_monkey.views.whitelist import WhitelistGetPutDelete
from security_monkey.views.whitelist import WhitelistListPost
api.add_resource(WhitelistGetPutDelete, '/api/1/whitelistcidrs/<int:item_id>')
api.add_resource(WhitelistListPost, '/api/1/whitelistcidrs')
|
onenameio/utilitybelt | setup.py | Python | mit | 747 | 0 | """
Useful Utils
==== | ==========
"""
from setuptools import setup, find_packages
setup(
name='utilitybelt',
version='0.2.6',
author='Halfmoon Labs',
author_email='hello@halfmoonlabs.com',
description='Generally useful tools. A python utility belt.',
keywords=('dict dictionary scrub to_dict todict json characters charset '
| 'hex entropy utility'),
url='https://github.com/onenameio/utilitybelt',
license='MIT',
packages=find_packages(),
install_requires=[
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
zip_safe=False,
)
|
fras2560/graph-helper | algorithms/color.py | Python | apache-2.0 | 14,060 | 0.003272 | """
-------------------------------------------------------
color
a module to determine the chromatic number of a graph
-------------------------------------------------------
Author: Dallas Fraser
ID: 110242560
Email: fras2560@mylaurier.ca
Version: 2014-09-17
-------------------------------------------------------
"""
import unittest
import networkx as nx
from itertools import permutations
import logging
import copy
def valid_coloring(coloring, G):
'''
a function that determines if the coloring is valid
Parameters:
coloring: a list of colors in which each color is a list of nodes
e.g. [[1,2],[3]]
G: a networkx graph (networkx)
Returns:
valid: True if valid coloring,
False otherwise
'''
valid = False
if coloring is not None:
valid = True
for color in coloring:
for vertex in color:
neighbors = G.neighbors(vertex)
for neighbor in neighbors:
if neighbor in color:
valid = False
break;
if not valid:
break;
if not valid:
break;
return valid
def add_list(l1, l2, index):
'''
a function that adds the list l1 to the two dimensional
list l2
Parameters:
l1: the first list (list)
l2: the second list (list of lists)
i1: the starting index to l1 (int)
Returns:
l: the list of lists(list of lists)
'''
l = copy.deepcopy(l1)
i = 0
while i < len(l2):
l[index] += l2[i]
i += 1
index += 1
return l
def combine_color_clique(clique, color):
'''
a function that takes a clique list and a color split
and yields all the ways the clique list can be combine with coloring
Parameters:
clique: the clique (list of lists)
color: the coloring (list of lists)
index: the index
Returns:
coloring: the combined color (list of lists)
'''
color_length = len(color)
clique_number = len(clique)
for c in permutations(clique):
c = convert_combo(c)
if clique_number < color_length:
index = 0
while index <= color_length - clique_number:
yield add_list(color, c, index)
index += 1
elif clique_number > color_length:
index = 0
while index <= clique_number - color_length:
yield add_list(c, color, index)
index += 1
else:
yield add_list(c, color, 0)
def coloring(G, logger=None):
'''
a function that finds the chromatic number of graph G
using brute force
Parameters:
G: the networkx graph (networkx)
logger: the logger for the functio | n (logging)
Returns:
chromatic: the chromatic number (int)
| '''
if logger is None:
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(message)s')
logger = logging.getLogger(__name__)
valid = False
largest = 0
largest_clique = []
# find largest clique
for clique in nx.find_cliques(G):
if len(clique) > largest:
largest = len(clique)
largest_clique = clique
# set chromatic to the largest clique
chromatic = largest - 1 # one less since add at start of loop
if chromatic == 0:
# can be no edge between any node
coloring = [G.nodes()]
valid = True
nodes = G.nodes()
# remove nodes from largest clique
i = 0
clique = []
# reformat the clique
for node in largest_clique:
clique.append([node])
while i < len(nodes):
if nodes[i] in largest_clique:
nodes.pop(i)
else:
i += 1
balls = len(nodes)
if balls == 0:
valid = True
coloring = clique
logger.debug(nodes)
logger.debug(clique)
while not valid:
chromatic += 1
logger.info('''
------------------------------\n
Testing Chromatic Number of %s\n
------------------------------\n
''' %chromatic)
boxes = [balls] * chromatic
for combo in permutations(nodes):
logger.debug(combo)
for split in unlabeled_balls_in_unlabeled_boxes(balls, boxes):
coloring = None
node_combo = convert_combo(combo)
coloring = assemble_coloring(node_combo, split)
for check in combine_color_clique(clique, coloring):
if valid_coloring(check, G):
logger.debug("Valid Check")
logger.debug(check)
coloring = check
valid = True
break;
if valid:
break
if valid:
break
if chromatic > 10:
# stop case
valid = True
coloring = None
return coloring
def chromatic_number(G):
'''
a function that finds the chromatic number of a graph
Parameter:
G: the networkx graph
Returns:
: the chromatic number (int)
'''
return len(coloring(G))
def valid_split(split):
'''
a function that checks if the split of nodes is valid
for that number of colors
Parameters:
split: the split of each color (tuple)
Returns:
True if valid
False otherwise
'''
valid = True
for color in split:
if color == 0:
valid = False
return valid
def convert_combo(combo):
'''
a function that converts a combo tuple to a list
Parameters:
combo: a tuple of combinations (tuple)
Returns:
conversion: the converted combination (list)
'''
conversion = []
for c in combo:
conversion.append(c)
return conversion
def assemble_coloring(nodes, split):
'''
a function that assembles the coloring
Parameters:
nodes: the list of node (list)
split: the list of split (how many nodes for each color) (list)
Returns:
coloring: list of color's list
E.G. [[n1, n2], [n3]]
'''
coloring = None
if len(split) == 0:
coloring = [nodes]
else:
coloring = []
for size in split:
color = []
while len(color) < size:
color.append(nodes.pop())
coloring.append(color)
return coloring
def unlabeled_balls_in_unlabeled_boxes(balls, box_sizes):
'''
@author Dr. Phillip M. Feldman
'''
if not isinstance(balls, int):
raise TypeError("balls must be a non-negative integer.")
if balls < 0:
raise ValueError("balls must be a non-negative integer.")
if not isinstance(box_sizes,list):
raise ValueError("box_sizes must be a non-empty list.")
capacity= 0
for size in box_sizes:
if not isinstance(size, int):
raise TypeError("box_sizes must contain only positive integers.")
if size < 1:
raise ValueError("box_sizes must contain only positive integers.")
capacity+= size
if capacity < balls:
raise ValueError("The total capacity of the boxes is less than the "
"number of balls to be distributed.")
box_sizes= list( sorted(box_sizes)[::-1] )
return unlabeled_balls_in_unlabeled_boxe(balls, box_sizes)
def unlabeled_balls_in_unlabeled_boxe(balls, box_sizes):
'''
@author Dr. Phillip M. Feldman
'''
if not balls:
yield len(box_sizes) * (0,)
elif len(box_sizes) == 1:
if box_sizes[0] >= balls:
yield (balls,)
else:
for balls_in_first_box in range( min(balls, box_sizes[0]), -1, -1 ):
balls_in_other_boxes = balls - balls_in_first_box
short = unlabeled_balls_in_unlabeled_boxe
for distribution_other in short(balls_in_other_boxes,
box_sizes[1:]) |
shree-shubham/Unitype | List Comprehensions.py | Python | gpl-3.0 | 164 | 0.012195 | import filein | put
X, Y, Z, N = map(int, fileinput.input())
nums=[[x, | y, z] for x in range(X+1) for y in range(Y+1) for z in range(Z+1) if x + y + z != N]
print nums
|
msunardi/PTVS | Python/Tests/TestData/RemoveImport/FromImport1.py | Python | apache-2.0 | 31 | 0.032258 | from sys i | mport oar, baz
|
oar |
miurahr/seahub | tests/seahub/notifications/test_models.py | Python | apache-2.0 | 4,419 | 0.003847 | from seahub.notifications.models import (
UserNotification, repo_share_msg_to_json, file_comment_msg_to_json,
repo_share_to_group_msg_to_json, file_uploaded_msg_to_json,
group_join_request_to_json, add_user_to_group_to_json, group_msg_to_json)
from seahub.share.utils import share_dir_to_user, share_dir_to_group
from seahub.test_utils import BaseTestCase
class UserNotificationTest(BaseTestCase):
def setUp(self):
self.clear_cache()
def test_format_file_comment_msg(self):
detail = file_comment_msg_to_json(self.repo.id, self.file,
self.user.username, 'test comment')
notice = UserNotification.objects.add_file_comment_msg('a@a.com', detail)
msg = notice.format_file_comment_msg()
assert msg is not None
assert 'new comment from user' in msg
def test_format_file_uploaded_msg(self):
upload_to = '/'
detail = file_uploaded_msg_to_json('upload_msg', self.repo.id, upload_to)
notice = UserNotification.objects.add_file_uploaded_msg('file@upload.com', detail)
msg = notice.format_file_uploaded_msg()
assert '/library/%(repo_id)s/%(repo_name)s/%(path)s' % {'repo_id': self.repo.id,
'repo_name': self.repo.name,
'path': upload_to.strip('/')} in msg
def test_format_group_join_request(self):
detail = group_join_request_to_json('group_join', self.group.id, 'join_request_msg')
notice = UserNotification.objects.add_group_join_request_notice('group_join',
detail=detail)
msg = notice.format_group_join_request()
assert '/#group/%(group_id)s/members/' % {'group_id': self.group.id} in msg
def test_format_add_user_to_group(self):
detail = add_user_to_group_to_json(self.user.username, self.group.id)
notice = UserNotification.objects.set_add_user_to_group_notice(self.user.username,
detail=detail)
msg = notice.format_add_user_to_group()
assert '/group/%(group_id)s/' % {'group_id': self.group.id} in msg
def test_format_repo_share_msg(self):
notice = UserNotification.objects.add_repo_share_msg(
self.user.username,
repo_share_msg_to_json('bar@bar.com', self.repo.id, '/', None))
msg = notice.format_repo_share_msg()
assert msg is not None
assert 'bar has shared a library named' in msg
assert '/library/%(repo_id)s/%(repo_name)s/%(path)s' % {
'repo_id': self.repo.id,
'repo_name': self.repo.name,
'path': ''} in msg
def test_format_repo_share_msg_with_folder(self):
folder_path = self.folder
share_dir_to_user(self.repo, folder_path, self.user.username,
self.user.username, 'bar@bar.com', 'rw', None)
notice = UserNotification.objects.add_repo_share_msg(
self.user.username,
repo_share_msg_to_json('bar@bar.com', self.repo.id, folder_path, None))
msg = notice.format_repo_share_msg()
assert msg is not None
assert 'bar has shared a folder named' in msg
def test_format_repo_share_to_group_msg(self):
notice = UserNotification.objects.add_repo_share_to | _group_msg(
self.user.username,
repo_share_to_group_msg_to_json('bar@bar.com', self.repo.id, self.group.id, '/', None))
msg = notice.format_repo_share_to_group_msg()
assert msg is not None
assert 'bar has shared a library named' in msg
assert '/group/%(group_id)s/' % {'group_id': self.group.id} in msg
def test_format_repo_ | share_to_group_msg_with_folder(self):
folder_path = self.folder
share_dir_to_group(self.repo, folder_path, self.user.username,
self.user.username, self.group.id, 'rw', None)
notice = UserNotification.objects.add_repo_share_to_group_msg(
self.user.username,
repo_share_to_group_msg_to_json('bar@bar.com', self.repo.id, self.group.id, folder_path, None))
msg = notice.format_repo_share_to_group_msg()
assert msg is not None
assert 'bar has shared a folder named' in msg
|
evanbrumley/psmove-restful | server.py | Python | mit | 3,841 | 0.003645 | """PS Move Restful Server
Usage:
server.py [--battery-saver]
Options:
--battery_saver If set, only allows a few seconds without a GET request before shutting off a controllers LEDs and rumble
"""
from docopt import docopt
import datetime
import time
from threading import Thread
from flask import Flask
from flask.ext.restful import reqparse, abort, Api, Resource
try:
from settings import CONTROLLER_SERIALS
except ImportError:
CONTROLLER_SERIALS = {}
from utils import get_controllers
app = Flask(__name__)
api = Api(app)
controllers = get_controllers()
def color_type(val):
val = int(val)
if val < 0 or val > 255:
raise ValueError("Colors must be between 0 and 255")
return val
def rumble_type(val):
val = int(val)
if val < 0 or val > 255:
raise ValueError("Rumble must be between 0 and 255")
return val
parser = reqparse.RequestParser()
parser.add_argument('red', type=color_type)
parser.add_argument('green', type=color_type)
parser.add_argument('blue', type=color_type)
parser.add_argument('rumble', type=rumble_type)
def get_controller_by_id(controller_id):
if CONTROLLER_SERIALS:
serial = CONTROLLER_SERIALS.get(controller_id)
if not serial:
return None
for controller in controllers:
if controller.controller.get_serial() == serial:
return controller
return None
try:
return controllers[controller_id]
except IndexError:
return None
class ControllerListResource(Resource):
def get(self):
return [controller.state_as_dict() for controller in controllers]
class ControllerResource(Resource):
def get(self, controller_id):
controller = get_controller_by_id(controller_id)
if controller is None:
abort(404, message="Controller {} doesn't exist".format(controller_id))
controller.last_accessed = datetime.datetime.now()
return controller.state_as_d | ict()
def put(self, controller_id):
controller = get_controller_by_id(controller_id)
if controller is None:
abort(404, message= | "Controller {} doesn't exist".format(controller_id))
args = parser.parse_args()
red = args['red']
green = args['green']
blue = args['blue']
controller.set_color(red, green, blue)
rumble = args['rumble']
if rumble is not None:
controller.set_rumble(rumble)
return 'Updated', 201
api.add_resource(ControllerListResource, '/controllers/')
api.add_resource(ControllerResource, '/controllers/<int:controller_id>/')
def battery_saver_factory(seconds):
def battery_saver():
while(True):
for controller in controllers:
if controller.red or controller.green or controller.blue or controller.rumble:
time_since_last_accessed = datetime.datetime.now() - controller.last_accessed
if (time_since_last_accessed.days * 86400 + time_since_last_accessed.seconds > seconds):
controller.set_color(0, 0, 0)
controller.set_rumble(0)
time.sleep(0.5)
return battery_saver
def main():
arguments = docopt(__doc__, version='PS Move Restful Server')
battery_saver_mode = arguments['--battery-saver']
for controller in controllers:
controller.last_accessed = datetime.datetime.now()
if battery_saver_mode:
battery_saver_thread = Thread(target=battery_saver_factory(3))
battery_saver_thread.daemon = True
battery_saver_thread.start()
app.run(debug=False, use_reloader=False)
for controller in controllers:
controller.terminate()
if __name__ == '__main__':
main()
|
garyForeman/LHBassClassifier | mongodb/thread_data.py | Python | agpl-3.0 | 4,209 | 0.000475 | #! /usr/bin/env python
"""
Author: Gary Foreman
Created: September 18, 2016
This script scrapes image urls, thread titles, user names, and thread
ids from thread links in the For Sale: Bass Guitars forum at
talkbass.com. Information from each thread is saved as a document in a
MongoDB database.
"""
from __future__ import print_function
import sys
import pymongo
from pyquery import PyQuery as pq
sys.path.append('..')
from utilities.utilities import pause_scrape, report_progress
NUM_PAGES = 1
MIN_PAUSE_SECONDS = 5.
MAX_PAUSE_SECONDS = 15.
REPORT_MESSAGE = 'Finished scraping page'
INDEX_TALKBASS = 'http://www.talkbass.com/'
CLASSIFIEDS = "forums/for-sale-bass-guitars.126/"
def get_page_url(i):
"""
i : integer page number of classified section
returns : full url path to desired page number
"""
tb_classified_page = INDEX_TALKBASS + CLASSIFIEDS
if i > 1:
tb_classified_page += 'page-' + str(i)
return tb_classified_page
def get_threads(d):
"""
d : a PyQuery object containing web page html
returns: list of thread lis with id beginning with "thread-" and class not
containing the string 'sticky'
"""
return d('li[id^="thread-"]:not(.sticky)')
class ThreadDataExtractor(object):
"""
Extracts thread data to be stored as MongoDB document
Attributes
----------
thread: lxml.html.HtmlElement
contains a for sale thread link
data: dictionary
contains thread data
Methods
-------
extract_data
populates fields of data attribute
"""
def __init__(self, thread):
self.thread = thread
self._d = self._parse_thread()
self.data = {}
def _parse_thread(self):
return pq(self.thread)
def extract_data(self):
self.data['_id'] = self._extract_thread_id()
self.data['username'] = self._extract_username()
self.data['thread_title'] = self._extract_thread_title()
self.data['image_url'] = self._extract_image_url()
self.data['post_date'] = self._extract_post_date()
def _extract_thread_id(self):
return self._d('li').attr['id'][len('thread-'):]
def _extract_username(self):
return self._d('li').attr['data-author']
def _extract_thread_title(self):
return self._d('.PreviewTooltip').text()
def _extract_image_url(self):
return self._d('.thumb.Av1s.Thumbnail').attr['data-thumbnailurl']
def _extract_post_date(self):
post_date = self._d('span.DateTime').text()
# if thread has been posted within the last week, date is contained
# elsewhere
if post_date == '':
post_date = self._d('abbr.DateTime').attr['data-datestring']
return post_date
def extract_thread_data(thread_list):
"""
thread_list: | list of lxml.html.HtmlElement containing each for sale thread
link
extracts thread data we want to keep: user name, thread title, thread id,
and thumbnail image url, and returns a list of documents to be inserted
into a MongoDB database
"""
document_list = []
for thread in thread_list:
extractor = ThreadDataExtractor(thread)
extractor.extract_data()
document_list. | append(extractor.data)
return document_list
def main():
# Establish connection to MongoDB open on port 27017
client = pymongo.MongoClient()
# Access threads database
db = client.for_sale_bass_guitars
for i in xrange(1, NUM_PAGES+1):
tb_classified_page = get_page_url(i)
# initialize PyQuery
d = pq(tb_classified_page)
thread_list = get_threads(d)
document_list = extract_thread_data(thread_list)
try:
_ = db.threads.insert_many(document_list, ordered=False)
except pymongo.errors.BulkWriteError:
# Will throw error if _id has already been used. Just want
# to skip these threads since data has already been written.
pass
pause_scrape(MIN_PAUSE_SECONDS, MAX_PAUSE_SECONDS)
report_progress(i, REPORT_MESSAGE)
client.close()
if __name__ == '__main__':
main()
|
dmlc/xgboost | tests/python/test_with_shap.py | Python | apache-2.0 | 817 | 0 | import numpy as np
import xgboost as xgb
import pytest
try:
import shap
except ImportError:
shap = None
pass
pytestmark = pytest.mark.skipif(shap is None, reason="Requires shap package")
# Check integration is not broken from xgboost side
# Changes in binary format may cause problems
| def test_with_shap():
from sklearn.datasets import fetch_california_housing
X, y = fetch_california_housing(return_X_y=True)
dtrain = xgb.DMatrix(X, label=y)
model = xgb.train({"learning_rate": 0.01}, dtrain, 10)
explainer = shap.TreeExplainer(model)
shap_values = explainer.shap_values(X)
margin = model.predict(dtrain, output_margin=True)
assert np.allclose(np.sum(shap_values, axis=len(shap_va | lues.shape) - 1),
margin - explainer.expected_value, 1e-3, 1e-3)
|
asedunov/intellij-community | python/helpers/pydev/setup.py | Python | apache-2.0 | 4,628 | 0.006914 | '''
Full setup, used to distribute the debugger backend to PyPi.
Note that this is mostly so that users can do:
pip install pydevd
in a machine for doing remote-debugging, as a local installation with the IDE should have
everything already distributed.
Reference on wheels:
https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/
http://lucumr.pocoo.org/2014/1/27/python-on-wheels/
Another (no wheels): https://jamie.curle.io/blog/my-first-experience-adding-package-pypi/
New version: change version and then:
rm dist/pydevd*
C:\tools\Miniconda32\Scripts\activate py27_32
python setup.py sdist bdist_wheel
deactivate
C:\tools\Miniconda32\Scripts\activate py34_32
python setup.py sdist bdist_wheel
deactivate
C:\tools\Miniconda32\Scripts\activate py35_32
python setup.py sdist bdist_wheel
deactivate
C:\tools\Miniconda\Scripts\activate py27_64
python setup.py sdist bdist_wheel
deactivate
C:\tools\Miniconda\Scripts\activate py34_64
python setup.py sdist bdist_wheel
deactivate
C:\tools\Miniconda\Scripts\activate py35_64
python setup.py sdist bdist_wheel
deactivate
twine upload dist/pydevd*
'''
from setuptools import setup
from setuptools.dist import Distribution
from distutils.extension import Extension
import os
class BinaryDistribution(Distribution):
def is_pure(self):
return False
data_files = []
def accept_file(f):
f = f.lower()
for ext in '.py .dll .so .dylib .txt .cpp .h .bat .c .sh .md .txt'.split():
if f.endswith(ext):
return True
return f in ['readme', 'makefile']
data_files.append(('pydevd_attach_to_process', [os.path.join('pydevd_attach_to_process', f) for f in os.listdir('pydevd_attach_to_process') if accept_file(f)]))
for root, dirs, files in os.walk("pydevd_attach_to_process"):
for d in dirs:
data_files.append((os.path.join(root, d), [os.path.join(root, d, f) for f in os.listdir(os.path.join(root, d)) if accept_file(f)]))
import pydevd
version = pydevd.__version__
args = dict(
name='pydevd',
version=version,
description = 'PyDev.Debugger (used in PyDev and PyCharm)',
author='Fabio Zadrozny and others',
url='https://github.com/fabioz/PyDev.Debugger/',
license='EPL (Eclipse Public License)',
packages=[
'_pydev_bundle',
'_pydev_imps',
'_pydev_runfiles',
'_pydevd_bundle',
'pydev_ipython',
# 'pydev_sitecustomize', -- Not actually a package (not added)
# 'pydevd_attach_to_process', -- Not actually a package (included in MANIFEST.in)
'pydevd_concurrency_analyser',
'pydevd_plugins',
],
py_modules=[
# 'interpreterInfo', -- Not needed for debugger
# 'pycompletionserver', -- Not needed for debugger
'pydev_app_engine_debug_startup',
# 'pydev_coverage', -- Not needed for debugger
# 'pydev_pysrc', -- Not needed for debugger
'pydev_run_in_console',
'pydevconsole',
'pydevd_file_utils',
'pydevd',
'pydevd_tracing',
# 'runfiles', -- Not needed for debugger
# 'setup_cython', -- Should not be included as a module
# 'setup', -- Should not be included as a module
],
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: Developers',
# It seems that the license is not recognized by Pypi, so, not categorizing it for now.
# https://bitbucket.org/pypa/pypi/issues/369/the-eclipse-public-license-superseeded
# 'License :: OSI Approved :: Eclipse Public License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development :: Debuggers',
],
data_files=data_files,
keywords=['pydev', 'pydevd', 'pydev.debugger'],
include_package_data=True,
zip_safe=False,
)
import sys
try:
args_with_binaries = args.copy()
args_with_binaries.update(dict(
distclass=BinaryDistribution,
ext_modules=[
# In this setup, don't even try to compile with cython, just go with the .c file which should've
# been properly generated from | a tested version.
Extension('_pydevd_bundle.pydevd_cython', ["_pydevd_bundle/pydevd_cython.c",])
]
))
setup(**args_with_binaries)
except:
# Co | mpile failed: just setup without compiling cython deps.
setup(**args)
sys.stdout.write('Plain-python version of pydevd installed (cython speedups not available).\n')
|
shishaochen/TensorFlow-0.8-Win | tensorflow/python/ops/gen_data_flow_ops.py | Python | apache-2.0 | 45,781 | 0.002075 | """Python wrappers around Brain.
This file is MACHINE GENERATED! Do not edit.
"""
from google.protobuf import text_format
from tensorflow.core.framework import op_def_pb2
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.ops import op_def_library
def _delete_session_tensor(handle, name=None):
r"""Delete the tensor specified by its handle in the session.
Args:
handle: A `Tensor` of type `string`.
The handle for a tensor stored in the session state.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
return _op_def_lib.apply_op("DeleteSessionTensor", handle=handle, name=name)
def dynamic_partition(data, partitions, num_partitions, name=None):
r"""Partitions `data` into `num_partitions` tensors using indices from `partitions`.
For each index tuple `js` of size `partitions.ndim`, the slice `data[js, ...]`
becomes part of `outputs[partitions[js]]`. The slices with `partitions[js] = i`
are placed in `outputs[i]` in lexicographic order of `js`, and the first
dimension of `outputs[i]` is the number of entries in `partitions` equal to `i`.
In detail,
outputs[i].shape = [sum(partitions == i)] + data.shape[partitions.ndim:]
outputs[i] = pack([data[js, ...] for js if partitions[js] == i])
`data.shape` must start with `partitions.shape`.
For example:
# Scalar partitions
partitions = 1
num_partitions = 2
data = [10, 20]
outputs[0] = [] # Empty with shape [0, 2]
outputs[1] = [[10, 20]]
# Vector partitions
partitions = [0, 0, 1, 1, 0]
num_partitions = 2
data = [10, 20, 30, 40, 50]
outputs[0] = [10, 20, 50]
outputs[1] = [30, 40]
<div style="width:70%; margin:auto; margin-bottom:10px; margin-top:20px;">
<img style="width:100%" src="../../images/DynamicPartition.png" alt>
</div>
Args:
data: A `Tensor`.
partitions: A `Tensor` | of type `int32`.
Any shape. Indices in the range `[0 | , num_partitions)`.
num_partitions: An `int` that is `>= 1`.
The number of partitions to output.
name: A name for the operation (optional).
Returns:
A list of `num_partitions` `Tensor` objects of the same type as data.
"""
return _op_def_lib.apply_op("DynamicPartition", data=data,
partitions=partitions,
num_partitions=num_partitions, name=name)
def dynamic_stitch(indices, data, name=None):
r"""Interleave the values from the `data` tensors into a single tensor.
Builds a merged tensor such that
merged[indices[m][i, ..., j], ...] = data[m][i, ..., j, ...]
For example, if each `indices[m]` is scalar or vector, we have
# Scalar indices
merged[indices[m], ...] = data[m][...]
# Vector indices
merged[indices[m][i], ...] = data[m][i, ...]
Each `data[i].shape` must start with the corresponding `indices[i].shape`,
and the rest of `data[i].shape` must be constant w.r.t. `i`. That is, we
must have `data[i].shape = indices[i].shape + constant`. In terms of this
`constant`, the output shape is
merged.shape = [max(indices)] + constant
Values are merged in order, so if an index appears in both `indices[m][i]` and
`indices[n][j]` for `(m,i) < (n,j)` the slice `data[n][j]` will appear in the
merged result.
For example:
indices[0] = 6
indices[1] = [4, 1]
indices[2] = [[5, 2], [0, 3]]
data[0] = [61, 62]
data[1] = [[41, 42], [11, 12]]
data[2] = [[[51, 52], [21, 22]], [[1, 2], [31, 32]]]
merged = [[1, 2], [11, 12], [21, 22], [31, 32], [41, 42],
[51, 52], [61, 62]]
<div style="width:70%; margin:auto; margin-bottom:10px; margin-top:20px;">
<img style="width:100%" src="../../images/DynamicStitch.png" alt>
</div>
Args:
indices: A list of at least 2 `Tensor` objects of type `int32`.
data: A list with the same number of `Tensor` objects as `indices` of `Tensor` objects of the same type.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `data`.
"""
return _op_def_lib.apply_op("DynamicStitch", indices=indices, data=data,
name=name)
def _fifo_queue(component_types, shapes=None, capacity=None, container=None,
shared_name=None, name=None):
r"""A queue that produces elements in first-in first-out order.
Args:
component_types: A list of `tf.DTypes` that has length `>= 1`.
The type of each component in a value.
shapes: An optional list of shapes (each a `tf.TensorShape` or list of `ints`). Defaults to `[]`.
The shape of each component in a value. The length of this attr must
be either 0 or the same as the length of component_types. If the length of
this attr is 0, the shapes of queue elements are not constrained, and
only one element may be dequeued at a time.
capacity: An optional `int`. Defaults to `-1`.
The upper bound on the number of elements in this queue.
Negative numbers mean no limit.
container: An optional `string`. Defaults to `""`.
If non-empty, this queue is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this queue will be shared under the given name
across multiple sessions.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. The handle to the queue.
"""
return _op_def_lib.apply_op("FIFOQueue", component_types=component_types,
shapes=shapes, capacity=capacity,
container=container, shared_name=shared_name,
name=name)
def _get_session_handle(value, name=None):
r"""Store the input tensor in the state of the current session.
Args:
value: A `Tensor`. The tensor to be stored.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
The handle for the tensor stored in the session state.
"""
return _op_def_lib.apply_op("GetSessionHandle", value=value, name=name)
def _get_session_tensor(handle, dtype, name=None):
r"""Get the value of the tensor specified by its handle.
Args:
handle: A `Tensor` of type `string`.
The handle for a tensor stored in the session state.
dtype: A `tf.DType`. The type of the output value.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `dtype`. The tensor for the given handle.
"""
return _op_def_lib.apply_op("GetSessionTensor", handle=handle, dtype=dtype,
name=name)
def _hash_table(key_dtype, value_dtype, container=None, shared_name=None,
name=None):
r"""Creates a non-initialized hash table.
This op creates a hash table, specifying the type of its keys and values.
Before using the table you will have to initialize it. After initialization the
table will be immutable.
Args:
key_dtype: A `tf.DType`. Type of the table keys.
value_dtype: A `tf.DType`. Type of the table values.
container: An optional `string`. Defaults to `""`.
If non-empty, this table is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this table is shared under the given name across
multiple sessions.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. Handle to a table.
"""
return _op_def_lib.apply_op("HashTable", key_dtype=key_dtype,
value_dtype=value_dtype, container=container,
shared_name=shared_name, name=name)
def _initialize_table(table_handle, keys, values, name=None):
r"""Table initializer that takes two tensors for keys and values respectively.
Args:
table_handle: A `Tensor` of type mutable `string`.
Handle to a table which will be initialized.
keys: A `Tensor`. Keys of |
diafygi/pdfformfiller | docs/conf.py | Python | gpl-3.0 | 9,307 | 0.006017 | # -*- coding: utf-8 -*-
#
# pdfformfiller documentation build configuration file, created by
# sphinx-quickstart on Sat Mar 19 21:38:58 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../pdfformfiller'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pdfformfiller'
copyright = u'2016, Daniel Roesler'
author = u'Daniel Roesler'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.4'
# The full version, including alpha/beta/rc tags.
release = u'0.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default. |
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help build | er.
htmlhelp_basename = 'pdfformfillerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pdfformfiller.tex', u'pdfformfiller Documentation',
u'Daniel roesler', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
|
fro391/Investing | PRAW/Praw.py | Python | gpl-2.0 | 168 | 0.017857 | import praw
| r = praw.Reddit(user_agent='kumaX')
#r.login('kumaX','Sho3lick')
submissions = r.get_subreddit('worldnews').get_top()
print [str(x) for x in submissions]
| |
datagutten/comics | comics/comics/billy.py | Python | agpl-3.0 | 384 | 0 | from comics.aggregator.crawler import CrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = 'Billy'
language = 'no'
url = 'http://www.bill | y.no/'
start_date = '1950-01-01'
active = False
rights = 'Mort Walker'
class Crawler(CrawlerBase):
def crawl(self, pub_date):
pass # Com | ic no longer published
|
mstepniowski/django-newtagging | newtagging/managers.py | Python | mit | 2,765 | 0.005063 | """
Custom managers for Django models registered with the tagging
application.
"""
from django.contrib.contenttypes.models import ContentType
from django.db import models
class ModelTagManager(models.Manager):
"""
A manager for retrieving tags for a particular model.
"""
def __init__(self, tag_model):
super(ModelTagManager, self).__init__()
self.tag_model = tag_model
def get_query_set(self):
content_type = ContentType.objects.get_for_model(self.model)
return self.tag_model.objects.filter(
items__content_type__pk=content_type.pk).distinct()
def related(self, tags, *args, **kwargs):
return self.tag_model.objects.related_for_model(tags, self.model, *args, **kwargs)
def usage(self, *args, **kwargs):
return self.tag_model.objects.usage_for_model(self.model, *args, **kwargs)
class ModelTaggedItemManager(models.Manager):
"""
A manager for retrieving model instances based on their tags.
"""
def __init__(self, tag_model):
super(ModelTaggedItemManager, self).__init__()
self.intermediary_table_model = tag_model.objects.intermediary_table_model
def related_to(self, obj, queryset=None, num=None):
if queryset is None:
return self.intermediary_table_model.objects.get_related(obj, self.mode | l, num=num)
else:
return self.intermediary_table_model.objects.get_related(obj, queryset, num=num)
def with_all(self, tags, queryset=None):
if queryset is None:
return self.intermediary_table_model.objects.get_by_model(self.model, tags)
else:
| return self.intermediary_table_model.objects.get_by_model(queryset, tags)
def with_any(self, tags, queryset=None):
if queryset is None:
return self.intermediary_table_model.objects.get_union_by_model(self.model, tags)
else:
return self.intermediary_table_model.objects.get_union_by_model(queryset, tags)
class TagDescriptor(object):
"""
A descriptor which provides access to a ``ModelTagManager`` for
model classes and simple retrieval, updating and deletion of tags
for model instances.
"""
def __init__(self, tag_model):
self.tag_model = tag_model
def __get__(self, instance, owner):
if not instance:
tag_manager = ModelTagManager(self.tag_model)
tag_manager.model = owner
return tag_manager
else:
return self.tag_model.objects.get_for_object(instance)
def __set__(self, instance, value):
self.tag_model.objects.update_tags(instance, value)
def __del__(self, instance):
self.tag_model.objects.update_tags(instance, [])
|
hekra01/mercurial | tests/hgweberror.py | Python | gpl-2.0 | 547 | 0.003656 | # A dummy extension that installs an hgweb command that throws an Exception.
from mercurial.hgweb import webcommands
def raiseerror(web, req, tmpl):
'''Dummy web command that raises an uncaught Exception.'''
# Simulate an error after partial response.
if 'partialresponse' in req.form:
req.respond(200, 'text/plain')
req.write('partial content\n')
raise AttributeError(' | I am an uncaught error!')
def extsetup(ui):
setattr(webcommands, 'raiseerror', raiseerror)
webcommands.__all__.append('raiseerror')
| |
okfde/froide-campaign | froide_campaign/listeners.py | Python | mit | 1,213 | 0 | from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from .consumers import PRESENCE_ROOM
from .models import Campaign
def connect_info_object(sender, **kwargs):
reference = kwargs.get("reference")
if not reference:
reference = sender.reference
if not reference:
return
if not reference.startswith("campaign:"):
return
namespace, campaign_value = reference.split(":", 1)
try:
campaign, ident = campaign_value.split("@", 1)
except (ValueError, IndexError):
return
if not ident:
return
| try:
campaign_pk = int(campaign)
except ValueError:
return
try:
campaign = Campaign.objects.get(pk=campaign_pk)
except Campaign.DoesNotExist:
return
provider = campaign.get_provider()
iobj = provider.connect_request(ident, sender)
if iobj:
broadcast_request_made(provider, iobj)
def broa | dcast_request_made(provider, iobj):
channel_layer = get_channel_layer()
async_to_sync(channel_layer.group_send)(
PRESENCE_ROOM.format(provider.campaign.id),
{"type": "request_made", "data": provider.get_detail_data(iobj)},
)
|
ian-wilson/cron-admin | nextrun/__main__.py | Python | mit | 975 | 0.001026 | # -*- coding: utf-8 -*-
import argparse
from next_run import NextRun
if __name__ == u'__main | __':
# Parse command line arguments
parser = argparse.ArgumentParser(
description=u'Cron Admin tool.'
)
parser.add_argument(
u'-t',
dest=u'current_time',
default=u'16:10',
help=u'The time from which to check'
)
parser.add_argument(
u'-p',
dest=u'cron_path',
default=None,
help= | u'Full path to the cron file to check'
)
parser.add_argument(
u'-s',
dest=u'cron_string',
default=None,
help=u'A newline separated string of cron data'
)
args = parser.parse_args()
# Call the class controller to run the script
next_run_times = NextRun().find_next_run_times(
args.current_time, args.cron_path, args.cron_string
)
# Output results to console
for cron_data in next_run_times:
print u' '.join(cron_data) |
paxy97/qmk_firmware | layouts/community/ergodox/german-manuneo/compile_keymap.py | Python | gpl-2.0 | 21,163 | 0.001512 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Compiler for keymap.c files
This scrip will generate a keymap.c file from a simple
markdown file with a specific layout.
Usage:
python | compile_keymap.py INPUT_PATH [OUTPUT_PATH]
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import io
import re
import sys
import json
import unicodedata
import collections
import itertools as it
PY2 = sys.version_info.major == 2
if | PY2:
chr = unichr
KEYBOARD_LAYOUTS = {
# These map positions in the parsed layout to
# positions in the LAYOUT_ergodox MATRIX
'ergodox_ez': [
[ 0, 1, 2, 3, 4, 5, 6], [38, 39, 40, 41, 42, 43, 44],
[ 7, 8, 9, 10, 11, 12, 13], [45, 46, 47, 48, 49, 50, 51],
[14, 15, 16, 17, 18, 19 ], [ 52, 53, 54, 55, 56, 57],
[20, 21, 22, 23, 24, 25, 26], [58, 59, 60, 61, 62, 63, 64],
[27, 28, 29, 30, 31 ], [ 65, 66, 67, 68, 69],
[ 32, 33], [70, 71 ],
[ 34], [72 ],
[ 35, 36, 37], [73, 74, 75 ],
]
}
ROW_INDENTS = {
'ergodox_ez': [0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 5, 0, 6, 0, 4, 0]
}
BLANK_LAYOUTS = [
# Compact Layout
"""
.------------------------------------.------------------------------------.
| | | | | | | | | | | | | | |
!-----+----+----+----+----+----------!-----+----+----+----+----+----+-----!
| | | | | | | | | | | | | | |
!-----+----+----+----x----x----! ! !----x----x----+----+----+-----!
| | | | | | |-----!-----! | | | | | |
!-----+----+----+----x----x----! ! !----x----x----+----+----+-----!
| | | | | | | | | | | | | | |
'-----+----+----+----+----+----------'----------+----+----+----+----+-----'
| | | | | | ! | | | | |
'------------------------' '------------------------'
.-----------. .-----------.
| | | ! | |
.-----+-----+-----! !-----+-----+-----.
! ! | | ! | ! !
! ! !-----! !-----! ! !
| | | | ! | | |
'-----------------' '-----------------'
""",
# Wide Layout
"""
.---------------------------------------------. .---------------------------------------------.
| | | | | | | | ! | | | | | | |
!-------+-----+-----+-----+-----+-------------! !-------+-----+-----+-----+-----+-----+-------!
| | | | | | | | ! | | | | | | |
!-------+-----+-----+-----x-----x-----! ! ! !-----x-----x-----+-----+-----+-------!
| | | | | | |-------! !-------! | | | | | |
!-------+-----+-----+-----x-----x-----! ! ! !-----x-----x-----+-----+-----+-------!
| | | | | | | | ! | | | | | | |
'-------+-----+-----+-----+-----+-------------' '-------------+-----+-----+-----+-----+-------'
| | | | | | ! | | | | |
'------------------------------' '------------------------------'
.---------------. .---------------.
| | | ! | |
.-------+-------+-------! !-------+-------+-------.
! ! | | ! | ! !
! ! !-------! !-------! ! !
| | | | ! | | |
'-----------------------' '-----------------------'
""",
]
DEFAULT_CONFIG = {
"keymaps_includes": [
"keymap_common.h",
],
'filler': "-+.'!:x",
'separator': "|",
'default_key_prefix': ["KC_"],
}
SECTIONS = [
'layout_config',
'layers',
]
# Markdown Parsing
ONELINE_COMMENT_RE = re.compile(r"""
^ # comment must be at the start of the line
\s* # arbitrary whitespace
// # start of the comment
(.*) # the comment
$ # until the end of line
""", re.MULTILINE | re.VERBOSE
)
INLINE_COMMENT_RE = re.compile(r"""
([\,\"\[\]\{\}\d]) # anythig that might end a expression
\s+ # comment must be preceded by whitespace
// # start of the comment
\s # and succeded by whitespace
(?:[^\"\]\}\{\[]*) # the comment (except things which might be json)
$ # until the end of line
""", re.MULTILINE | re.VERBOSE)
TRAILING_COMMA_RE = re.compile(r"""
, # the comma
(?:\s*) # arbitrary whitespace
$ # only works if the trailing comma is followed by newline
(\s*) # arbitrary whitespace
([\]\}]) # end of an array or object
""", re.MULTILINE | re.VERBOSE)
def loads(raw_data):
if isinstance(raw_data, bytes):
raw_data = raw_data.decode('utf-8')
raw_data = ONELINE_COMMENT_RE.sub(r"", raw_data)
raw_data = INLINE_COMMENT_RE.sub(r"\1", raw_data)
raw_data = TRAILING_COMMA_RE.sub(r"\1\2", raw_data)
return json.loads(raw_data)
def parse_config(path):
def reset_section():
section.update({
'name': section.get('name', ""),
'sub_name': "",
'start_line': -1,
'end_line': -1,
'code_lines': [],
})
def start_section(line_index, line):
end_section()
if line.startswith("# "):
name = line[2:]
elif line.startswith("## "):
name = line[3:]
else:
name = ""
name = name.strip().replace(" ", "_").lower()
if name in SECTIONS:
section['name'] = name
else:
section['sub_name'] = name
section['start_line'] = line_index
def end_section():
if section['start_line'] >= 0:
if section['name'] == 'layout_config':
config.update(loads("\n".join(
section['code_lines']
)))
elif section['sub_name'].startswith('layer'):
layer_name = section['sub_name']
config['layer_lines'][layer_name] = section['code_lines']
reset_section()
def amend_section(line_index, line):
section['end_line'] = line_index
section['code_lines'].append(line)
config = DEFAULT_CONFIG.copy()
config.update({
'layer_lines': collections.OrderedDict(),
'macro_ids': {'UM'},
'unicode_macros': {},
})
section = {}
reset_section()
with io.open(path, encoding="utf-8") as fh:
for i, line in enumerate(fh):
if line.startswith("#"):
start_section(i, line)
elif line.startswith(" "):
amend_section(i, line[4:])
else:
# TODO: maybe parse description
pass
end_section()
assert 'layout' in config
return config
# header file parsing
IF0_RE = re.compile(r"""
^
#if 0
$.*?
#endif
""", re.MULTILINE | re.DOTALL | re.VERBOSE)
COMMENT_RE = re.compile(r"""
/\*
.*?
\*/"
""", re.MULTILINE | re.DOTALL | re.VERBOSE)
def read_header_file(path):
with io.open(path, encoding="utf-8") as fh:
data = fh.read()
data, _ = COMMENT_RE.subn("", data)
data, _ = IF0_RE.subn("", data)
return data
def regex_part |
cpennington/edx-platform | common/djangoapps/student/tests/test_helpers.py | Python | agpl-3.0 | 6,067 | 0.004121 | """ Test Student helpers """
import logging
import ddt
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import patch
from testfixtures import LogCapture
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration_context
from student.helpers import get_next_url_for_login_page
LOGGER_NAME = "student.helpers"
@ddt.ddt
class TestLoginHelper(TestCase):
"""Test login helper methods."""
static_url = settings.STATIC_URL
def setUp(self):
super(TestLoginHelper, self).setUp()
self.request = RequestFactory()
@staticmethod
def _add_session(request):
"""Annotate the request object with a session"""
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
@ddt.data(
(logging.WARNING, "WARNING", "https://www.amazon.com", "text/html", None,
"Unsafe redirect parameter detected after login page: 'https://www.amazon.com'"),
(logging.WARNING, "WARNING", "testserver/edx.org/images/logo", "text/html", None,
"Redirect to theme content detected after login page: 'testserver/edx.org/images/logo'"),
(logging.INFO, "INFO", "favicon.ico", "image/*", "test/agent",
"Redirect to non html content 'image/*' detected from 'test/agent' after login page: 'favicon.ico'"),
(logging.WARNING, "WARNING", "https://www.test.com/test.jpg", "image/*", None,
"Unsafe redirect parameter detected after login page: 'https://www.test.com/test.jpg'"),
| (logging.INFO, "INFO", static_url + "dummy.png", "image/*", "test/agent",
"Redirect to non html content 'image/*' detected from 'test/agent' after login page: '" + static_url +
"dummy.png" + "'"),
(logging.WARNING, "WARNING", "test.png", "text/html", None,
"Redirect to url path with specified filed type 'image/png' not allowed: 'test.png'"),
(logging. | WARNING, "WARNING", static_url + "dummy.png", "text/html", None,
"Redirect to url path with specified filed type 'image/png' not allowed: '" + static_url + "dummy.png" + "'"),
)
@ddt.unpack
def test_next_failures(self, log_level, log_name, unsafe_url, http_accept, user_agent, expected_log):
""" Test unsafe next parameter """
with LogCapture(LOGGER_NAME, level=log_level) as logger:
req = self.request.get(settings.LOGIN_URL + "?next={url}".format(url=unsafe_url))
req.META["HTTP_ACCEPT"] = http_accept
req.META["HTTP_USER_AGENT"] = user_agent
get_next_url_for_login_page(req)
logger.check(
(LOGGER_NAME, log_name, expected_log)
)
@ddt.data(
('/dashboard', 'text/html', 'testserver'),
('https://edx.org/courses', 'text/*', 'edx.org'),
('https://test.edx.org/courses', '*/*', 'edx.org'),
('https://test2.edx.org/courses', 'image/webp, */*;q=0.8', 'edx.org'),
)
@ddt.unpack
@override_settings(LOGIN_REDIRECT_WHITELIST=['test.edx.org', 'test2.edx.org'])
def test_safe_next(self, next_url, http_accept, host):
""" Test safe next parameter """
req = self.request.get(settings.LOGIN_URL + "?next={url}".format(url=next_url), HTTP_HOST=host)
req.META["HTTP_ACCEPT"] = http_accept
next_page = get_next_url_for_login_page(req)
self.assertEqual(next_page, next_url)
tpa_hint_test_cases = [
# Test requests outside the TPA pipeline - tpa_hint should be added.
(None, '/dashboard', '/dashboard', False),
('', '/dashboard', '/dashboard', False),
('', '/dashboard?tpa_hint=oa2-google-oauth2', '/dashboard?tpa_hint=oa2-google-oauth2', False),
('saml-idp', '/dashboard', '/dashboard?tpa_hint=saml-idp', False),
# THIRD_PARTY_AUTH_HINT can be overridden via the query string
('saml-idp', '/dashboard?tpa_hint=oa2-google-oauth2', '/dashboard?tpa_hint=oa2-google-oauth2', False),
# Test requests inside the TPA pipeline - tpa_hint should not be added, preventing infinite loop.
(None, '/dashboard', '/dashboard', True),
('', '/dashboard', '/dashboard', True),
('', '/dashboard?tpa_hint=oa2-google-oauth2', '/dashboard?tpa_hint=oa2-google-oauth2', True),
('saml-idp', '/dashboard', '/dashboard', True),
# OK to leave tpa_hint overrides in place.
('saml-idp', '/dashboard?tpa_hint=oa2-google-oauth2', '/dashboard?tpa_hint=oa2-google-oauth2', True),
]
tpa_hint_test_cases_with_method = [
(method, *test_case)
for test_case in tpa_hint_test_cases
for method in ['GET', 'POST']
]
@patch('student.helpers.third_party_auth.pipeline.get')
@ddt.data(*tpa_hint_test_cases_with_method)
@ddt.unpack
def test_third_party_auth_hint(
self,
method,
tpa_hint,
next_url,
expected_url,
running_pipeline,
mock_running_pipeline,
):
mock_running_pipeline.return_value = running_pipeline
def validate_login():
"""
Assert that get_next_url_for_login_page returns as expected.
"""
if method == 'GET':
req = self.request.get(settings.LOGIN_URL + "?next={url}".format(url=next_url))
elif method == 'POST':
req = self.request.post(settings.LOGIN_URL, {'next': next_url})
req.META["HTTP_ACCEPT"] = "text/html"
self._add_session(req)
next_page = get_next_url_for_login_page(req)
self.assertEqual(next_page, expected_url)
with override_settings(FEATURES=dict(settings.FEATURES, THIRD_PARTY_AUTH_HINT=tpa_hint)):
validate_login()
with with_site_configuration_context(configuration=dict(THIRD_PARTY_AUTH_HINT=tpa_hint)):
validate_login()
|
tomi77/protobuf-gis | python/tests/test_polygon.py | Python | mit | 2,150 | 0.005581 | im | port unittest
from gis.protobuf.polygon_pb2 import Polygon2D, Polygon3D, MultiPolygon2D, MultiPolygon3D
from gis.protobuf.point_pb2 import Point2D, Point3D
class Polygon2DTestCase(unittest.TestCase):
def test_toGeoJSON(self):
polygon = | Polygon2D(point=[Point2D(x=1.0, y=2.0),
Point2D(x=3.0, y=4.0)])
self.assertEqual(polygon.toGeoJSON(), {
'type': 'Polygon',
'coordinates': [[[1.0, 2.0], [3.0, 4.0]]]
})
class Polygon3DTestCase(unittest.TestCase):
def test_toGeoJSON(self):
polygon = Polygon3D(point=[Point3D(x=1.0, y=2.0, z=3.0),
Point3D(x=4.0, y=5.0, z=6.0)])
self.assertEqual(polygon.toGeoJSON(), {
'type': 'Polygon',
'coordinates': [[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]]
})
class MultiPolygon2DTestCase(unittest.TestCase):
def test_toGeoJSON(self):
multiPolygon = MultiPolygon2D(polygon=[Polygon2D(point=[Point2D(x=1.0, y=2.0),
Point2D(x=3.0, y=4.0)]),
Polygon2D(point=[Point2D(x=5.0, y=6.0),
Point2D(x=7.0, y=8.0)])])
self.assertEqual(multiPolygon.toGeoJSON(), {
'type': 'MultiPolygon',
'coordinates': [[[[1.0, 2.0], [3.0, 4.0]]], [[[5.0, 6.0], [7.0, 8.0]]]]
})
class MultiPolygon3DTestCase(unittest.TestCase):
def test_toGeoJSON(self):
multiPolygon = MultiPolygon3D(polygon=[Polygon3D(point=[Point3D(x=1.0, y=2.0, z=3.0),
Point3D(x=4.0, y=5.0, z=6.0)]),
Polygon3D(point=[Point3D(x=7.0, y=8.0, z=9.0),
Point3D(x=10.0, y=11.0, z=12.0)])])
self.assertEqual(multiPolygon.toGeoJSON(), {
'type': 'MultiPolygon',
'coordinates': [[[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]], [[[7.0, 8.0, 9.0], [10.0, 11.0, 12.0]]]]
})
|
garbear/EventGhost | eg/Classes/IrDecoder/Rcmm.py | Python | gpl-2.0 | 2,548 | 0.00314 | # This file is part of EventGhost.
# Copyright (C) 2009 Lars-Peter Voss <bitmonster@eventghost.org>
#
# EventGhost is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# EventGhost is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EventGhost; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from eg.Classes.IrDecoder import IrProtocolBase, DecodeError
MODES = {
1: "Mouse",
2: "Keyboard",
3: "Gamepad",
}
class Rcmm(IrProtocolBase):
def GetBits(self):
if 66 > self.data[self.pos] > 266:
raise DecodeError("wrong pulse")
pause = self.data[self.pos + 1]
self.pos += 2
if pause < 366:
return 0 # binary 00
elif pause < 528 | :
return 1 # binary 01
elif pause < 694:
return 2 # binary 10
elif pause < 861:
return 3 # binary 11
else:
raise DecodeError("pause too long")
def ShiftInBits(self, numBits):
data = 0
for dummyCounter in xrange(numBits):
data <<= 2
data |= self.GetBits()
return data
def Decode(self, data): |
raise DecodeError("not implemented")
if not (200 < data[0] < 600):
DecodeError("wrong header pulse")
if not (100 < data[1] < 500):
DecodeError("wrong header pause")
self.pos = 2
self.data = data
mode = self.GetBits()
if mode != 0:
addr = self.GetBits()
data = self.ShiftInBits(4)
return "RC-MM.%s.%X.%04X" % (MODES[mode], addr, data)
mode = self.GetBits()
if mode != 0:
data = self.ShiftInBits(10)
return "RC-MM.Ex%s.%06X" % (MODES[mode], data)
mode = self.GetBits()
if mode != 3:
raise DecodeError("wrong OEM mode")
customerId = self.ShiftInBits(3)
data = self.ShiftInBits(6)
return "RC-MM.Oem%02X.%04X" % (customerId, data)
|
jkeifer/pyHytemporal | old_TO_MIGRATE/classifyclips.py | Python | mit | 4,401 | 0.004999 | from create_rule_image_multiprocessed_bypx import phenological_classificaion, read_reference_file
import os
from pyhytemporal.utils import find_files
clip1refs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012/clip1", "mean.ref")
clip2refs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012/clip2", "mean.ref")
clip3refs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012/clip3", "mean.ref")
clip4refs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012/clip4", "mean.ref")
clip5refs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012/clip5", "mean.ref")
clip6refs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012/clip6", "mean.ref")
meanrefs = find_files("/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Refs/2012", ".ref", recursive=False)
reffiles = [clip1refs, clip2refs, clip3refs, clip4refs, clip5refs, clip6refs, meanrefs]
rootout = "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/Classified"
outfolders = [os.path.join(rootout, "clip1refs"), os.path.join(rootout, "clip2refs"), os.path.join(rootout, "clip3refs"), os.path.join(rootout, "clip4refs"), os.path.join(rootout, "clip5refs"), os.path.join(rootout, "clip6refs"), os.path.join(rootout, "meanrefs")]
clip1imgs = ["/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasNDVI_2012_clip1.tif", | "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/repro | jected/clips/KansasEVI_2012_clip1.tif"]
clip2imgs = ["/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasNDVI_2012_clip2.tif", "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasEVI_2012_clip2.tif"]
clip3imgs = ["/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasNDVI_2012_clip3.tif", "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasEVI_2012_clip3.tif"]
clip4imgs = ["/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasNDVI_2012_clip4.tif", "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasEVI_2012_clip4.tif"]
clip5imgs = ["/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasNDVI_2012_clip5.tif", "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasEVI_2012_clip5.tif"]
clip6imgs = ["/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasNDVI_2012_clip6.tif", "/Users/phoetrymaster/Documents/School/Geography/Thesis/Data/MODIS_KANSAS_2007-2012/reprojected/clips/KansasEVI_2012_clip6.tif"]
imagelist = [clip1imgs, clip2imgs, clip3imgs, clip4imgs, clip5imgs, clip6imgs]
searchstrings = ["soy", "corn", "wwheat", "sorghum", "wwheatsoydbl"]
fitmethods = ["SLSQP"]#, "TNC"]
for method in fitmethods:
for images in imagelist:
for img in images:
name = os.path.splitext(os.path.basename(img))[0]
if "NDVI" in name:
type = "NDVI"
else:
type = "EVI"
for reffile, outfolder in zip(reffiles, outfolders):
refs = {}
for f in reffile:
if type in f:
for string in searchstrings:
if string + "_" + type in f:
error, refs[string] = read_reference_file(f)
if not error:
print refs
phenological_classificaion(img, outfolder, name + "_" + method, refs, "ENVI", 17, 16, -500, 0, method, toprint=False)
else:
print "ERROR +++++++++++++++++++++++++++++++++++++++++++++++++++ ERROR" |
MasterScrat/ChatShape | parsers/utils.py | Python | mit | 308 | 0 | import os
import datetime
def export_dataframe(df, filename='exported.pkl'):
f | ilepath = os.path.join('data', filename)
print('Saving to pickle file %s...' % filepath)
df.to_pickle(filepath)
def timestamp_to_ordinal(value):
ret | urn datetime.datetime.fromtimestamp(float(value)).toordinal()
|
VPAC/pbs_python | src/PBSQuery.py | Python | gpl-3.0 | 17,223 | 0.008303 | #
# Authors: Roy Dragseth (roy.dragseth@cc.uit.no)
# Bas van der Vlies (basv@sara.nl)
#
# SVN INFO:
# $Id$
#
"""
Usage: from PBSQuery import PBSQuery
This class gets the info from the pbs_server via the pbs.py module
for the several batch objects. All get..() functions return an dictionary
with id as key and batch object as value
There are four batch objects:
- server
- queue
- job
- node
Each object can be handled as an dictionary and has several member
functions. The second parameter is an python list and can be used if you
are only interested in certain resources, see example
There are the following functions for PBSQuery:
job -
getjob(job_id, attributes=<default is all>)
getjobs(attributes=<default is all>)
node -
getnode(node_id, attributes=<default is all>)
getnodes(attributes=<default is all>)
queue -
getqueue(queue_id, attributes=<default is all>)
getqueues(attributes=<default is all>)
server -
get_serverinfo(attributes=<default is all>)
Here is an example how to use the module:
from PBSQuery import PBSQuery
p = PBSQuery()
nodes = p.getnodes()
for name,node in nodes.items():
print name
if node.is_free():
print node, node['state']
l = [ 'state', 'np' ]
nodes = p.getnodes(l)
for name,node in nodes.items():
print node, node['state']
The parameter 'attributes' is an python list of resources that
you are interested in, eg: only show state of nodes
l = list()
l.append('state')
nodes = p.getnodes(l)
"""
import pbs
import UserDict
import string
import sys
import re
import types
class PBSError(Exception):
def __init__(self, msg=''):
self.msg = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.msg
__str__ = __repr__
class PBSQuery:
# a[key] = value, key and value are data type string
#
OLD_DATA_STRUCTURE = False
def __init__(self, server=None):
if not server:
self.server = pbs.pbs_default()
else:
self.server = server
self._connect()
## this is needed for getjob a jobid is made off:
# sequence_number.server (is not self.server)
#
self.job_server_id = list(self.get_serverinfo())[0]
self._disconnect()
def _connect(self):
"""Connect to the PBS/Torque server"""
self.con = pbs.pbs_connect(self.server)
if self.con < 0:
str = "Could not make a connection with %s\n" %(self.server)
raise PBSError(str)
def _disconnect(self):
"""Close the PBS/Torque connection"""
pbs.pbs_disconnect(self.con)
self.attribs = 'NULL'
def _list_2_attrib(self, list):
"""Convert a python list to an attrib list suitable for pbs"""
self.attribs = pbs.new_attrl( len(list) )
i = 0
for attrib in list:
# So we can user Resource
attrib = attrib.split('.')
self.attribs[i].name = attrib[0]
i = i + 1
def _pbsstr_2_list(self, str, delimiter):
"""Convert a string to a python list and use delimiter as spit char"""
l = sting.splitfields(str, delimiter)
if len(l) > 1:
return l
def _list_2_dict(self, l, class_func):
"""
Convert a pbsstat function list to a class dictionary, The
data structure depends on the function new_data_structure().
Default data structure is:
class[key] = value, Where key and value are of type string
Future release, can be set by new_data_structure():
- class[key] = value where value can be:
1. a list of values of type string
2. a dictionary with as list of values of type string. If
values contain a '=' character
eg:
print node['np']
>> [ '2' ]
print node['status']['arch']
>> [ 'x86_64' ]
"""
self.d = {}
for item in l:
new = class_func()
self.d[item.name] = new
|
new.name = item.name
for a in item.attribs:
| if self.OLD_DATA_STRUCTURE:
if a.resource:
key = '%s.%s' %(a.name, a.resource)
else:
key = '%s' %(a.name)
new[key] = a.value
else:
values = string.split(a.value, ',')
sub_dict = string.split(a.value, '=')
# We must creat sub dicts, only for specified
# key values
#
if a.name in ['status', 'Variable_List']:
for v in values:
tmp_l = v.split('=')
## Support for multiple EVENT mesages in format [key=value:]+
# format eg: message=EVENT:sample.time=1288864220.003,EVENT:kernel=upgrade,cputotals.user=0
# message=ERROR <text>
#
if tmp_l[0] in ['message']:
if tmp_l[1].startswith('EVENT:'):
tmp_d = dict()
new['event'] = class_func(tmp_d)
message_list = v.split(':')
for event_type in message_list[1:]:
tmp_l = event_type.split('=')
new['event'][ tmp_l[0] ] = tmp_l[1:]
else:
## ERROR message
#
new['error'] = tmp_l [1:]
elif tmp_l[0].startswith('EVENT:'):
message_list = v.split(':')
for event_type in message_list[1:]:
tmp_l = event_type.split('=')
new['event'][ tmp_l[0] ] = tmp_l[1:]
else:
## Check if we already added the key
#
if new.has_key(a.name):
new[a.name][ tmp_l[0] ] = tmp_l[1:]
else:
tmp_d = dict()
tmp_d[ tmp_l[0] ] = tmp_l[1:]
new[a.name] = class_func(tmp_d)
else:
## Check if it is a resource type variable, eg:
# - Resource_List.(nodes, walltime, ..)
#
if a.resource:
if new.has_key(a.name):
new[a.name][a.resource] = values
else:
tmp_d = dict()
tmp_d[a.resource] = values
new[a.name] = class_func(tmp_d)
else:
# Simple value
#
new[a.name] = values
self._free(l)
def _free(self, memory):
"""
freeing up used memmory
"""
pbs.pbs_statfree(memory)
def _statserver(self, attrib_list=None):
"""Get the server config from the pbs server"""
if attrib_list:
self._list_2_attrib(attrib_list)
else:
self.attribs = 'NULL'
self._connect()
serverinfo = pbs.pbs_statserver(self.con, self.attribs, 'NULL')
self._disconnect()
self._list_2_dict(serverinfo, server)
def get_serverinfo(self, attrib_list=None):
self._statserver(attrib_list)
r |
pllim/astropy | astropy/coordinates/tests/test_intermediate_transformations.py | Python | bsd-3-clause | 38,779 | 0.001573 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Accuracy tests for GCRS coordinate transformations, primarily to/from AltAz.
"""
import os
import warnings
from importlib import metadata
import pytest
import numpy as np
import erfa
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose as assert_allclose
from astropy.time import Time
from astropy.coordinates import (
EarthLocation, get_sun, ICRS, GCRS, CIRS, ITRS, AltAz, HADec,
PrecessedGeocentric, CartesianRepresentation, SkyCoord,
CartesianDifferential, SphericalRepresentation, UnitSphericalRepresentation,
HCRS, HeliocentricMeanEcliptic, TEME, TETE)
from astropy.coordinates.solar_system import _apparent_position_in_true_coordinates, get_body
from astropy.utils import iers
from astropy.utils.exceptions import AstropyWarning, AstropyDeprecationWarning
from astropy.utils.compat.optional_deps import HAS_JPLEPHEM
from astropy.coordinates.angle_utilities import golden_spiral_grid
from astropy.coordinates.builtin_frames.intermediate_rotation_transforms import (
get_location_gcrs, tete_to_itrs_mat, gcrs_to_cirs_mat, cirs_to_itrs_mat)
from astropy.coordinates.builtin_frames.utils import get_jd12
from astropy.coordinates import solar_system_ephemeris
from astropy.units import allclose
CI = os.environ.get('CI', False) == "true"
def test_icrs_cirs():
"""
Check a few cases of ICRS<->CIRS for consistency.
Also includes the CIRS<->CIRS transforms at different times, as those go
through ICRS
"""
usph = golden_spiral_grid(200)
dist = np.linspace(0., 1, len(usph)) * u.pc
inod = ICRS(usph)
iwd = ICRS(ra=usph.lon, dec=usph.lat, distance=dist)
cframe1 = CIRS()
cirsnod = inod.transform_to(cframe1) # uses the default time
# first do a round-tripping test
inod2 = cirsnod.transform_to(ICRS())
assert_allclose(inod.ra, inod2.ra)
assert_allclose(inod.dec, inod2.dec)
# now check that a different time yields different answers
cframe2 = CIRS(obstime=Time('J2005'))
cirsnod2 = inod.transform_to(cframe2)
assert not allclose(cirsnod.ra, cirsnod2.ra, rtol=1e-8)
assert not allclose(cirsnod.dec, cirsnod2.dec, rtol=1e-8)
# parallax effects should be included, so with and w/o distance should be different
cirswd = iwd.transform_to(cframe1)
assert not allclose(cirswd.ra, cirsnod.ra, rtol=1e-8)
assert not allclose(cirswd.dec, cirsnod.dec, rtol=1e-8)
# and the distance should transform at least somehow
assert not allclose(cirswd.distance, iwd.distance, rtol=1e-8)
# now check that the cirs self-transform works as expected
cirsnod3 = cirsnod.transform_to(cframe1) # should be a no-op
assert_allclose(cirsnod.ra, cirsnod3.ra)
assert_allclose(cirsnod.dec, cirsnod3.dec)
cirsnod4 = cirsnod.transform_to(cframe2) # should be different
assert not allclose(cirsnod4.ra, cirsnod.ra, rtol=1e-8)
assert not allclose(cirsnod4.dec, cirsnod.dec, rtol=1e-8)
cirsnod5 = cirsnod4.transform_to | (cframe1) # should be back to the same
| assert_allclose(cirsnod.ra, cirsnod5.ra)
assert_allclose(cirsnod.dec, cirsnod5.dec)
usph = golden_spiral_grid(200)
dist = np.linspace(0.5, 1, len(usph)) * u.pc
icrs_coords = [ICRS(usph), ICRS(usph.lon, usph.lat, distance=dist)]
gcrs_frames = [GCRS(), GCRS(obstime=Time('J2005'))]
@pytest.mark.parametrize('icoo', icrs_coords)
def test_icrs_gcrs(icoo):
"""
Check ICRS<->GCRS for consistency
"""
gcrscoo = icoo.transform_to(gcrs_frames[0]) # uses the default time
# first do a round-tripping test
icoo2 = gcrscoo.transform_to(ICRS())
assert_allclose(icoo.distance, icoo2.distance)
assert_allclose(icoo.ra, icoo2.ra)
assert_allclose(icoo.dec, icoo2.dec)
assert isinstance(icoo2.data, icoo.data.__class__)
# now check that a different time yields different answers
gcrscoo2 = icoo.transform_to(gcrs_frames[1])
assert not allclose(gcrscoo.ra, gcrscoo2.ra, rtol=1e-8, atol=1e-10*u.deg)
assert not allclose(gcrscoo.dec, gcrscoo2.dec, rtol=1e-8, atol=1e-10*u.deg)
# now check that the cirs self-transform works as expected
gcrscoo3 = gcrscoo.transform_to(gcrs_frames[0]) # should be a no-op
assert_allclose(gcrscoo.ra, gcrscoo3.ra)
assert_allclose(gcrscoo.dec, gcrscoo3.dec)
gcrscoo4 = gcrscoo.transform_to(gcrs_frames[1]) # should be different
assert not allclose(gcrscoo4.ra, gcrscoo.ra, rtol=1e-8, atol=1e-10*u.deg)
assert not allclose(gcrscoo4.dec, gcrscoo.dec, rtol=1e-8, atol=1e-10*u.deg)
gcrscoo5 = gcrscoo4.transform_to(gcrs_frames[0]) # should be back to the same
assert_allclose(gcrscoo.ra, gcrscoo5.ra, rtol=1e-8, atol=1e-10*u.deg)
assert_allclose(gcrscoo.dec, gcrscoo5.dec, rtol=1e-8, atol=1e-10*u.deg)
# also make sure that a GCRS with a different geoloc/geovel gets a different answer
# roughly a moon-like frame
gframe3 = GCRS(obsgeoloc=[385000., 0, 0]*u.km, obsgeovel=[1, 0, 0]*u.km/u.s)
gcrscoo6 = icoo.transform_to(gframe3) # should be different
assert not allclose(gcrscoo.ra, gcrscoo6.ra, rtol=1e-8, atol=1e-10*u.deg)
assert not allclose(gcrscoo.dec, gcrscoo6.dec, rtol=1e-8, atol=1e-10*u.deg)
icooviag3 = gcrscoo6.transform_to(ICRS()) # and now back to the original
assert_allclose(icoo.ra, icooviag3.ra)
assert_allclose(icoo.dec, icooviag3.dec)
@pytest.mark.parametrize('gframe', gcrs_frames)
def test_icrs_gcrs_dist_diff(gframe):
"""
Check that with and without distance give different ICRS<->GCRS answers
"""
gcrsnod = icrs_coords[0].transform_to(gframe)
gcrswd = icrs_coords[1].transform_to(gframe)
# parallax effects should be included, so with and w/o distance should be different
assert not allclose(gcrswd.ra, gcrsnod.ra, rtol=1e-8, atol=1e-10*u.deg)
assert not allclose(gcrswd.dec, gcrsnod.dec, rtol=1e-8, atol=1e-10*u.deg)
# and the distance should transform at least somehow
assert not allclose(gcrswd.distance, icrs_coords[1].distance, rtol=1e-8,
atol=1e-10*u.pc)
def test_cirs_to_altaz():
"""
Check the basic CIRS<->AltAz transforms. More thorough checks implicitly
happen in `test_iau_fullstack`
"""
from astropy.coordinates import EarthLocation
usph = golden_spiral_grid(200)
dist = np.linspace(0.5, 1, len(usph)) * u.pc
cirs = CIRS(usph, obstime='J2000')
crepr = SphericalRepresentation(lon=usph.lon, lat=usph.lat, distance=dist)
cirscart = CIRS(crepr, obstime=cirs.obstime, representation_type=CartesianRepresentation)
loc = EarthLocation(lat=0*u.deg, lon=0*u.deg, height=0*u.m)
altazframe = AltAz(location=loc, obstime=Time('J2005'))
cirs2 = cirs.transform_to(altazframe).transform_to(cirs)
cirs3 = cirscart.transform_to(altazframe).transform_to(cirs)
# check round-tripping
assert_allclose(cirs.ra, cirs2.ra)
assert_allclose(cirs.dec, cirs2.dec)
assert_allclose(cirs.ra, cirs3.ra)
assert_allclose(cirs.dec, cirs3.dec)
def test_cirs_to_hadec():
"""
Check the basic CIRS<->HADec transforms.
"""
from astropy.coordinates import EarthLocation
usph = golden_spiral_grid(200)
dist = np.linspace(0.5, 1, len(usph)) * u.pc
cirs = CIRS(usph, obstime='J2000')
crepr = SphericalRepresentation(lon=usph.lon, lat=usph.lat, distance=dist)
cirscart = CIRS(crepr, obstime=cirs.obstime, representation_type=CartesianRepresentation)
loc = EarthLocation(lat=0*u.deg, lon=0*u.deg, height=0*u.m)
hadecframe = HADec(location=loc, obstime=Time('J2005'))
cirs2 = cirs.transform_to(hadecframe).transform_to(cirs)
cirs3 = cirscart.transform_to(hadecframe).transform_to(cirs)
# check round-tripping
assert_allclose(cirs.ra, cirs2.ra)
assert_allclose(cirs.dec, cirs2.dec)
assert_allclose(cirs.ra, cirs3.ra)
assert_allclose(cirs.dec, cirs3.dec)
def test_gcrs_itrs():
"""
Check basic GCRS<->ITRS transforms for round-tripping.
"""
usph = golden_spiral_grid(200)
gcrs = GCRS(usph, obstime='J2000')
gcrs6 = GCRS(usph, obstime='J2006')
gcrs2 = gcrs.transform_to(ITRS()).tran |
mindbender-studio/config | polly/plugins/maya/publish/validate_rig_hierarchy.py | Python | mit | 515 | 0 | import pyblish.api
class ValidateMindbenderRigHierarchy(pyblish.ap | i.InstancePlugin):
"""A rig must reside under a single assembly called "ROOT"
- Must reside within `ROOT` transform
"""
label = "Rig Hierarchy"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
families = ["mindbender.rig"]
| def process(self, instance):
from maya import cmds
assert cmds.ls(instance, assemblies=True) == ["ROOT"], (
"Rig must have a single parent called 'ROOT'.")
|
RonnyPfannschmidt/borg | src/borg/testsuite/crypto.py | Python | bsd-3-clause | 13,340 | 0.003748 | from binascii import hexlify, unhexlify
from ..crypto.low_level import AES256_CTR_HMAC_SHA256, AES256_OCB, CHACHA20_POLY1305, UNENCRYPTED, \
IntegrityError, blake2b_256, hmac_sha256, openssl10
from ..crypto.low_level import bytes_to_long, bytes_to_int, long_to_bytes
from ..crypto.low_level import hkdf_hmac_sha512
from . import BaseTestCase
# Note: these tests are part of the self test, do not use or import py.test functionality here.
# See borg.selftest for details. If you add/remove test methods, update SELFTEST_COUNT
class CryptoTestCase(BaseTestCase):
def test_bytes_to_int(self):
self.assert_equal(bytes_to_int(b'\0\0\0\1'), 1)
def test_bytes_to_long(self):
self.assert_equal(bytes_to_long(b'\0\0\0\0\0\0\0\1'), 1)
self.assert_equal(long_to_bytes(1), b'\0\0\0\0\0\0\0\1')
def test_UNENCRYPTED(self):
iv = b'' # any IV is ok, it just must be set and not None
data = b'data'
header = b'header'
cs = UNENCRYPTED(None, None, iv, header_len=6)
envelope = cs.encrypt(data, header=header)
self.assert_equal(envelope, header + data)
got_data = cs.decrypt(envelope)
self.assert_equal(got_data, data)
def test_AES256_CTR_HMAC_SHA256(self):
# this tests the layout as in attic / borg < 1.2 (1 type byte, no aad)
mac_key = b'Y' * 32
enc_key = b'X' * 32
iv = 0
data = b'foo' * 10
header = b'\x42'
# encrypt-then-mac
cs = AES256_CTR_HMAC_SHA256(mac_key, enc_key, iv, header_len=1, aad_offset=1)
hdr_mac_iv_cdata = cs.encrypt(data, header=header)
hdr = hdr_mac_iv_cdata[0:1]
mac = hdr_mac_iv_cdata[1:33]
iv = hdr_mac_iv_cdata[33:41]
cdata = hdr_mac_iv_cdata[41:]
self.assert_equal(hexlify(hdr), b'42')
self.assert_equal(hexlify(mac), b'af90b488b0cc4a8f768fe2d6814fa65aec66b148135e54f7d4d29a27f22f57a8')
self.assert_equal(hexlify(iv), b'0000000000000000')
self.assert_equal(hexlify(cdata), b'c6efb702de12498f34a2c2bbc8149e759996d08bf6dc5c610aefc0c3a466')
self.assert_equal(cs.next_iv(), 2)
# auth-then-decrypt
cs = AES256_CTR_HMAC_SHA256(mac_key, enc_key, header_len=len(header), aad_offset=1)
pdata = cs.decrypt(hdr_mac_iv_cdata)
self.assert_equal(data, pdata)
self.assert_equal(cs.next_iv(), 2)
# auth-failure due to corruption (corrupted data)
cs = AES256_CTR_HMAC_SHA256(mac_key, enc_key, header_len=len(header), aad_offset=1)
hdr_mac_iv_cdata_corrupted = hdr_mac_iv_cdata[:41] + b'\0' + hdr_mac_iv_cdata[42:]
self.assert_raises(IntegrityError,
lambda: cs.decrypt(hdr_mac_iv_cdata_corrupted))
def test_AES256_CTR_HMAC_SHA256_aad(self):
mac_key = b'Y' * 32
enc_key = b'X' * 32
iv = 0
data = b'foo' * 10
header = b'\x12\x34\x56'
# encrypt-then-mac
cs = AES256_CTR_HMAC_SHA256(mac_key, enc_key, iv, header_len=3, aad_offset=1)
hdr_mac_iv_cdata = cs.encrypt(data, header=header)
hdr = hdr_mac_iv_cdata[0:3]
mac = hdr_mac_iv_cdata[3:35]
iv = hdr_mac_iv_cdata[35:43]
cdata = hdr_mac_iv_cdata[43:]
self.assert_equal(hexlify(hdr), b'123456')
self.assert_equal(hexlify(mac), b'7659a915d9927072ef130258052351a17ef882692893c3850dd798c03d2dd138')
self.assert_equal(hexlify(iv), b'0000000000000000')
self.assert_equal(hexlify(cdata), b'c6efb702de12498f34a2c2bbc8149e759996d08bf6dc5c610aefc0c3a466')
self.assert_equal(cs.next_iv(), 2)
# auth-then-decrypt
cs = AES256_CTR_HMAC_SHA256(mac_key, enc_key, header_len=len(header), aad_offset=1)
pdata = cs.decrypt(hdr_mac_iv_cdata)
self.assert_equal(data, pdata)
self.assert_equal(cs.next_iv(), 2)
# auth-failure due to corruption (corrupted aad)
cs = AES256_CTR_HMAC_SHA256(mac_key, enc_key, header_len=len(header), aad_offset=1)
hdr_mac_iv_cdata_corrupted = hdr_mac_iv_cdata[:1] + b'\0' + hdr_mac_iv_cdata[2:]
self.assert_raises(IntegrityError,
lambda: cs.decrypt(hdr_mac_iv_cdata_corrupted))
def test_AE(self):
# used in legacy-like layout (1 type byte, no aad)
mac_key = None
enc_key = b'X' * 32
iv = 0
data = b'foo' * 10
header = b'\x23'
tests = [
# (ciphersuite class, exp_mac, exp_cdata)
]
if not openssl10:
tests += [
(AES256_OCB,
b'b6909c23c9aaebd9abbe1ff42097652d',
b'877ce46d2f62dee54699cebc3ba41d9ab613f7c486778c1b3636664b1493', ),
(CHACHA20_POLY1305,
b'fd08594796e0706cde1e8b461e3e0555',
b'a093e4b0387526f085d3c40cca84a35230a5c0dd766453b77ba38bcff775', )
]
for cs_cls, exp_mac, exp_cdata in tests:
# print(repr(cs_cls))
# encrypt/mac
cs = cs_cls(mac_key, enc_key, iv, | header_len=1, aad_offset=1)
hdr_mac_iv_cdata = cs.encrypt(data, header=header)
hdr = hdr_mac_iv_cdata[0:1]
mac = hdr_mac_iv_cdata[1:17]
iv = hdr_mac_iv_cdata[17:29]
cdat | a = hdr_mac_iv_cdata[29:]
self.assert_equal(hexlify(hdr), b'23')
self.assert_equal(hexlify(mac), exp_mac)
self.assert_equal(hexlify(iv), b'000000000000000000000000')
self.assert_equal(hexlify(cdata), exp_cdata)
self.assert_equal(cs.next_iv(), 1)
# auth/decrypt
cs = cs_cls(mac_key, enc_key, header_len=len(header), aad_offset=1)
pdata = cs.decrypt(hdr_mac_iv_cdata)
self.assert_equal(data, pdata)
self.assert_equal(cs.next_iv(), 1)
# auth-failure due to corruption (corrupted data)
cs = cs_cls(mac_key, enc_key, header_len=len(header), aad_offset=1)
hdr_mac_iv_cdata_corrupted = hdr_mac_iv_cdata[:29] + b'\0' + hdr_mac_iv_cdata[30:]
self.assert_raises(IntegrityError,
lambda: cs.decrypt(hdr_mac_iv_cdata_corrupted))
def test_AEAD(self):
# test with aad
mac_key = None
enc_key = b'X' * 32
iv = 0
data = b'foo' * 10
header = b'\x12\x34\x56'
tests = [
# (ciphersuite class, exp_mac, exp_cdata)
]
if not openssl10:
tests += [
(AES256_OCB,
b'f2748c412af1c7ead81863a18c2c1893',
b'877ce46d2f62dee54699cebc3ba41d9ab613f7c486778c1b3636664b1493', ),
(CHACHA20_POLY1305,
b'b7e7c9a79f2404e14f9aad156bf091dd',
b'a093e4b0387526f085d3c40cca84a35230a5c0dd766453b77ba38bcff775', )
]
for cs_cls, exp_mac, exp_cdata in tests:
# print(repr(cs_cls))
# encrypt/mac
cs = cs_cls(mac_key, enc_key, iv, header_len=3, aad_offset=1)
hdr_mac_iv_cdata = cs.encrypt(data, header=header)
hdr = hdr_mac_iv_cdata[0:3]
mac = hdr_mac_iv_cdata[3:19]
iv = hdr_mac_iv_cdata[19:31]
cdata = hdr_mac_iv_cdata[31:]
self.assert_equal(hexlify(hdr), b'123456')
self.assert_equal(hexlify(mac), exp_mac)
self.assert_equal(hexlify(iv), b'000000000000000000000000')
self.assert_equal(hexlify(cdata), exp_cdata)
self.assert_equal(cs.next_iv(), 1)
# auth/decrypt
cs = cs_cls(mac_key, enc_key, header_len=len(header), aad_offset=1)
pdata = cs.decrypt(hdr_mac_iv_cdata)
self.assert_equal(data, pdata)
self.assert_equal(cs.next_iv(), 1)
# auth-failure due to corruption (corrupted aad)
cs = cs_cls(mac_key, enc_key, header_len=len(header), aad_offset=1)
hdr_mac_iv_cdata_corrupted = hdr_mac_iv_cdata[:1] + b'\0' + hdr_mac_iv_cdata[2:]
self.assert_raises(IntegrityError,
lambda: cs.decrypt(hd |
lao605/product-definition-center | pdc/apps/common/renderers.py | Python | mit | 14,144 | 0.001273 | #
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from collections import OrderedDict
import logging
import re
import sys
import json
from django.conf import settings
from django.utils.encoding import smart_text
from contrib import drf_introspection
from django.db.models.fields import NOT_PROVIDED
from django.core.urlresolvers import NoReverseMatch
from django.core.exceptions import FieldDoesNotExist
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.utils import formatting
from rest_framework.reverse import reverse
from rest_framework import serializers, relations, fields
from pdc.apps.utils.utils import urldecode
"""
## Writing documentation in docstrings
Docstrings of each method will be available in browsable API as documentation.
These features are available to simplify writing the comments:
* the content is formatted as Markdown
* %(HOST_NAME)s and %(API_ROOT)s macros will be replaced by host name and URL
fragment for API, respectively
* %(FILTERS)s will be replaced a by a list of available query string filters
* %(SERIALIZER)s will be replaced by a code block with details about
serializer
* %(WRITABLE_SERIALIZER)s will do the same, but without read-only fields
* $URL:route-name:arg1:arg2...$ will be replaced by absolute URL
* $LINK:route-name:arg1:...$ will be replaced by a clickable link with
relative URL pointing to the specified place; arguments for LINK will be
wrapped in braces automatically
When the URL specification can not be resolve, "BAD URL" will be displayed on
the page and details about the error will be logged to the error log.
"""
PDC_APIROOT_DOC = """
The REST APIs make it possible to programmatic access the data in Product Definition Center(a.k.a. PDC).
Create new Product, import rpms and query components with contact informations, and more.
The REST API iden | tifies users using Token which will be generated for all authenticated users.
**Please remember to use your token as HTTP header for every requests that need authentication.**
If you want to record the reaso | n for change, you can add Header (-H "PDC-Change-Comment: reasonforchange") in request.
Responses are available in JSON format.
**NOTE:** in order to use secure HTTPS connections, you'd better to add server's certificate as trusted.
"""
URL_SPEC_RE = re.compile(r'\$(?P<type>URL|LINK):(?P<details>[^$]+)\$')
class ReadOnlyBrowsableAPIRenderer(BrowsableAPIRenderer):
template = "browsable_api/api.html"
methods_mapping = (
'list',
'retrieve',
'create',
'bulk_create',
'update',
'destroy',
'bulk_destroy',
'partial_update',
'bulk_update',
# Token Auth methods
'obtain',
'refresh',
)
def get_raw_data_form(self, data, view, method, request):
return None
def get_rendered_html_form(self, data, view, method, request):
return None
def get_context(self, data, accepted_media_type, renderer_context):
self.request = renderer_context['request']
super_class = super(ReadOnlyBrowsableAPIRenderer, self)
super_retval = super_class.get_context(data, accepted_media_type,
renderer_context)
if super_retval is not None:
del super_retval['put_form']
del super_retval['post_form']
del super_retval['delete_form']
del super_retval['options_form']
del super_retval['raw_data_put_form']
del super_retval['raw_data_post_form']
del super_retval['raw_data_patch_form']
del super_retval['raw_data_put_or_patch_form']
super_retval['display_edit_forms'] = False
super_retval['version'] = "1.0"
view = renderer_context['view']
super_retval['overview'] = self.get_overview(view)
return super_retval
def get_overview(self, view):
if view.__class__.__name__ == 'APIRoot':
return self.format_docstring(None, None, PDC_APIROOT_DOC)
overview = view.__doc__ or ''
return self.format_docstring(view, '<overview>', overview)
def get_description(self, view, *args):
if view.__class__.__name__ == 'APIRoot':
return ''
description = OrderedDict()
for method in self.methods_mapping:
func = getattr(view, method, None)
docstring = func and func.__doc__ or ''
if docstring:
description[method] = self.format_docstring(view, method, docstring)
return description
def format_docstring(self, view, method, docstring):
macros = settings.BROWSABLE_DOCUMENT_MACROS
if view:
macros['FILTERS'] = get_filters(view)
if '%(SERIALIZER)s' in docstring:
macros['SERIALIZER'] = get_serializer(view, include_read_only=True)
if '%(WRITABLE_SERIALIZER)s' in docstring:
macros['WRITABLE_SERIALIZER'] = get_serializer(view, include_read_only=False)
if hasattr(view, 'docstring_macros'):
macros.update(view.docstring_macros)
string = formatting.dedent(docstring)
formatted = string % macros
formatted = self.substitute_urls(view, method, formatted)
string = smart_text(formatted)
return formatting.markup_description(string)
def substitute_urls(self, view, method, text):
def replace_url(match):
type = match.groupdict()['type']
parts = match.groupdict()['details'].split(':')
url_name = parts[0]
args = parts[1:]
if type == 'LINK':
args = ['{%s}' % arg for arg in args]
try:
if type == 'LINK':
url = reverse(url_name, args=args)
return '[`%s`](%s)' % (urldecode(url), url)
return reverse(url_name, args=args, request=self.request)
except NoReverseMatch:
logger = logging.getLogger(__name__)
logger.error('Bad URL specifier <%s> in %s.%s'
% (match.group(0), view.__class__.__name__, method),
exc_info=sys.exc_info())
return 'BAD URL'
return URL_SPEC_RE.sub(replace_url, text)
FILTERS_CACHE = {}
FILTER_DEFS = {
'CharFilter': 'string',
'NullableCharFilter': 'string | null',
'BooleanFilter': 'bool',
'CaseInsensitiveBooleanFilter': 'bool',
'ActiveReleasesFilter': 'bool',
'MultiIntFilter': 'int',
}
LOOKUP_TYPES = {
'icontains': 'case insensitive, substring match',
'contains': 'substring match',
'iexact': 'case insensitive',
}
def get_filters(view):
"""
For a given view set returns which query filters are available for it a
Markdown formatted list. The list does not include query filters specified
on serializer or query arguments used for paging.
"""
if view in FILTERS_CACHE:
return FILTERS_CACHE[view]
allowed_keys = drf_introspection.get_allowed_query_params(view)
filter_class = getattr(view, 'filter_class', None)
filterset = filter_class() if filter_class is not None else None
filterset_fields = filterset.filters if filterset is not None else []
filter_fields = set(getattr(view, 'filter_fields', []))
extra_query_params = set(getattr(view, 'extra_query_params', []))
filters = []
for key in sorted(allowed_keys):
if key in filterset_fields:
# filter defined in FilterSet
filter = filterset_fields.get(key)
filter_type = FILTER_DEFS.get(filter.__class__.__name__, 'string')
lookup_type = LOOKUP_TYPES.get(filter.lookup_type)
if lookup_type:
lookup_type = ', %s' % lookup_type
filters.append(' * `%s` (%s%s)' % (key, filter_type, lookup_type or ''))
elif key in filter_fields or key in extra_query_params:
# filter defined in viewset directly; type depends on model, not easily availab |
nanxung/vip_video | setup.py | Python | gpl-3.0 | 572 | 0.022727 | import sys
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but | it might need fine tuning.
build_exe_options = {"packages": ["os"],
"excludes": ["tkinter"],
}
# GUI applications require a different base on Windows (the default is for a
# console application).
base = None
if sys.pl | atform == "win32":
base = "Win32GUI"
setup( name = "guifoo",
version = "0.2",
description = "luantangui!",
options = {"build_exe": build_exe_options},
executables = [Executable("main.py", base=base)])
|
redsolution/django-generic-ratings | ratings/forms/widgets.py | Python | mit | 7,704 | 0.002207 | from decimal import Decimal
from django import forms
from django.template.loader import render_to_string
from django.template.defaultfilters import slugify
class BaseWidget(forms.TextInput):
"""
Base widget. Do not use this directly.
"""
template = None
instance = None
def get_parent_id(self, name, attrs):
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
return final_attrs['id']
def get_widget_id(self, prefix, name, key=''):
if self.instance:
opts = self.instance._meta
widget_id = '%s-%s-%s_%s-%s' % (prefix, name, opts.app_label, opts.module_name, self.instance.pk)
else:
widget_id = '%s-%s' % (prefix, name)
if key:
widget_id = '%s_%s' % (widget_id, slugify(key))
return widget_id
def get_values(self, min_value, max_value, step=1):
decimal_step = Decimal(str(step))
value = Decimal(str(min_value))
while value <= max_value:
yield value
value += decimal_step
class SliderWidget(BaseWidget):
"""
Slider widget.
In order to use this widget you must load the jQuery.ui slider
javascript.
This widget triggers the following javascript events:
- *slider_change* with the vote value as argument
(fired when the user changes his vote)
- *slider_delete* without arguments
(fired when the user deletes his vote)
It's easy to bind these events using jQuery, e.g.::
$(document).bind('slider_change', function(event, value) {
alert('New vote: ' + value);
});
"""
def __init__(self, min_value, max_value, step, instance=None,
can_delete_vote=True, key='', read_only=False, default='',
template='ratings/slider_widget.html', attrs=None):
"""
The argument *default* is used when the initial value is None.
"""
super(SliderWidget, self).__init__(attrs)
self.min_value = min_value
self.max_value = max_value
self.step = step
self.instance = instance
self.can_delete_vote = can_delete_vote
self.read_only = read_only
self.default = default
self.template = template
self.key = key
def get_context(self, name, value, attrs=None):
# here we convert *min_value*, *max_value*, *step* and *value*
# to string to avoid odd behaviours of Django localization
# in the template (and, for backward compatibility we do not
# want to use the *unlocalize* filter)
attrs['type'] = 'hidden'
return {
'min_value': str(self.min_value),
'max_value': str(self.max_value),
'step': str(self.step),
'can_delete_vote': self.can_delete_vote,
'read_only': self.read_only,
'default': self.default,
'parent': super(SliderWidget, self).render(name, value, attrs),
'parent_id': self.get_parent_id(name, attrs),
'value': str(value),
'has_value': bool(value),
'slider_id': self.get_widget_id('slider', name, self.key),
'label_id': 'slider-label-%s' % name,
'remove_id': 'slider-remove-%s' % name,
}
def render(self, name, value, attrs=None):
context = self.get_context(name, value, attrs or {})
return render_to_string(self.template, context)
class StarWidget(BaseWidget):
"""
Starrating widget.
In order to use this widget you must download the
jQuery Star Rating Plugin available at
http://www.fyneworks.com/jquery/star-rating/#tab-Download
and then load the required javascripts and css, e.g.::
| <link href="/path/to/jquery.rating.css" rel="stylesheet" type="text/css" />
<script type="text/javascript" src="/path/to/jquery.MetaData.js"></script>
<script type="text/javascript" src="/path/to/jquery.rating.js"></script>
This widget triggers the following javascript events:
- *star_change* with the vote value as argument
(fired when the user changes his vote)
- *star_delete* without arguments
| (fired when the user deletes his vote)
It's easy to bind these events using jQuery, e.g.::
$(document).bind('star_change', function(event, value) {
alert('New vote: ' + value);
});
"""
def __init__(self, min_value, max_value, step, instance=None,
can_delete_vote=True, key='', read_only=False,
template='ratings/star_widget.html', attrs=None):
super(StarWidget, self).__init__(attrs)
self.min_value = min_value
self.max_value = max_value
self.step = step
self.instance = instance
self.can_delete_vote = can_delete_vote
self.read_only = read_only
self.template = template
self.key = key
def get_context(self, name, value, attrs=None):
# here we convert *min_value*, *max_value* and *step*
# to string to avoid odd behaviours of Django localization
# in the template (and, for backward compatibility we do not
# want to use the *unlocalize* filter)
attrs['type'] = 'hidden'
split_value = int(1 / self.step)
if split_value == 1:
values = range(1, self.max_value+1)
split = u''
else:
values = self.get_values(self.min_value, self.max_value, self.step)
split = u' {split:%d}' % split_value
return {
'min_value': str(self.min_value),
'max_value': str(self.max_value),
'step': str(self.step),
'can_delete_vote': self.can_delete_vote,
'read_only': self.read_only,
'values': values,
'split': split,
'parent': super(StarWidget, self).render(name, value, attrs),
'parent_id': self.get_parent_id(name, attrs),
'value': self._get_value(value, split_value),
'star_id': self.get_widget_id('star', name, self.key),
}
def _get_value(self, original, split):
if original:
value = round(original * split) / split
return Decimal(str(value))
def render(self, name, value, attrs=None):
context = self.get_context(name, value, attrs or {})
return render_to_string(self.template, context)
class LikeWidget(BaseWidget):
def __init__(self, min_value, max_value, instance=None,
can_delete_vote=True, template='ratings/like_widget.html', attrs=None):
super(LikeWidget, self).__init__(attrs)
self.min_value = min_value
self.max_value = max_value
self.instance = instance
self.can_delete_vote = can_delete_vote
self.template = template
def get_context(self, name, value, attrs=None):
# here we convert *min_value*, *max_value* and *step*
# to string to avoid odd behaviours of Django localization
# in the template (and, for backward compatibility we do not
# want to use the *unlocalize* filter)
attrs['type'] = 'hidden'
return {
'min_value': str(self.min_value),
'max_value': str(self.max_value),
'can_delete_vote': self.can_delete_vote,
'parent': super(LikeWidget, self).render(name, value, attrs),
'parent_id': self.get_parent_id(name, attrs),
'value': str(value),
'like_id': self.get_widget_id('like', name),
}
def render(self, name, value, attrs=None):
context = self.get_context(name, value, attrs or {})
return render_to_string(self.template, context) |
bjornaa/roppy | examples/plot_current25.py | Python | mit | 2,273 | 0 | # ------------------------------------------------------
# current.py
#
# Plot a current field at fixed depth
# Modified from the spermplot example
#
# Bjørn Ådlandsvik <bjorn@imr.no>
# 2020-03-27
# ------------------------------------------------------
# -------------
# Imports
# -------------
import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset
from roppy import SGrid
from roppy.mpl_util import landmask
from roppy.trajectories import curly_vectors
# | -------------------------
# User settings
# ------------------------
ncfile = "data/ocean_avg_example.nc"
timeframe = 3 # Fourth time frame
# subgrid = (1,-1,1,-1) # whole grid except boundary cells
subgrid = (110, 170, 35, 90)
# Depth level [m]
z = 25
# Distance between vectors
stride = 2
# Speed level (isotachs)
speedlevels = np.linspace(0, 0.5, 6) # 0.0, 0. | 1, ...., 0.5
# Colormap for speed
speedcolors = "YlOrRd"
# --------------------
# Read the data
# --------------------
f = Dataset(ncfile)
grid = SGrid(f, subgrid=subgrid)
# Read 3D current for the subgrid
U0 = f.variables["u"][timeframe, :, grid.Ju, grid.Iu]
V0 = f.variables["v"][timeframe, :, grid.Jv, grid.Iv]
Mu = f.variables["mask_u"][grid.Ju, grid.Iu]
Mv = f.variables["mask_v"][grid.Jv, grid.Iv]
# f.close()
# ----------------------
# Handle the data
# ----------------------
# Interpolate to rho-points
U1 = 0.5 * (U0[:, :, :-1] + U0[:, :, 1:])
V1 = 0.5 * (V0[:, :-1, :] + V0[:, 1:, :])
# Interpolate to correct depth level
U = grid.zslice(U1, z)
V = grid.zslice(V1, z)
# Remove velocity at land and below bottom
U[grid.h < z] = np.nan
V[grid.h < z] = np.nan
# Compute the current speed
Speed = np.sqrt(U * U + V * V)
# Impose the stride
X = grid.X[::stride]
Y = grid.Y[::stride]
U = U[::stride, ::stride]
V = V[::stride, ::stride]
# --------------------
# Make the plot
# --------------------
# Contour plot of current speed
plt.contourf(grid.X, grid.Y, Speed, levels=speedlevels, cmap=speedcolors)
plt.colorbar()
# Make the vector plot
plt.quiver(X, Y, U, V, width=0.003)
# Plot green land mask
landmask(grid, "LightGreen")
# Set correct aspect ratio and axis limits
plt.axis("image")
plt.axis((grid.i0 + 0.5, grid.i1 - 1.5, grid.j0 + 0.5, grid.j1 - 1.5))
# Display the plot
plt.show()
|
nedbat/cupid | cupid/box.py | Python | mit | 5,647 | 0 | """Box geometry."""
from __future__ import division
from .helpers import poparg
class Box(object):
"""A Box holds the geometry of a box with a position and a size.
Because of how it is typically used, it takes a single dictionary of
arguments. The dictionary of arguments has arguments popped from it, and
others ignored::
>>> args = {'foo': 17, 'size': (10, 50), 'left': (100, 200)}
>>> b = Box(args)
>>> b.center
(105.0, 200)
>>> b.size
(10, 50)
>>> args
{'foo': 17}
The center and size are available as individual components also::
>>> b.cx
105.0
>>> b.cy
200
>>> b.w
10
>>> b.h
50
You can ask about the edges of the box as coordinates (top, bottom, left,
right) or points (north, south, east, west)::
>>> b.north
(105.0, 175.0)
>>> b.south
| (105.0, 225.0)
>>> b.top
175.0
>>> b.bottom
225.0
"""
def __init__(self, args):
other_box = poparg(args, box=None)
if other_box is not None:
| # Copy all the attributes of the other box.
self.__dict__.update(other_box.__dict__)
return
size = poparg(args, size=None)
assert size, "Have to specify a size!"
pos_name = pos = None
arg_names = "left center right top topleft topright".split()
for arg_name in arg_names:
arg = poparg(args, **{arg_name: None})
if arg is not None:
assert pos is None, "Got duplicate position: %s" % pos_name
pos_name = arg_name
pos = arg
# Can specify position as pos=('topright', (100,200))
pos_arg = poparg(args, pos=None)
if pos_arg is not None:
assert pos is None, "Got duplicate position: pos"
pos_name, pos = pos_arg
if pos_name == 'left':
center = (pos[0]+size[0]/2, pos[1])
elif pos_name == 'right':
center = (pos[0]-size[0]/2, pos[1])
elif pos_name == 'center':
center = pos
elif pos_name == 'top':
center = (pos[0], pos[1]+size[1]/2)
elif pos_name == 'topleft':
center = (pos[0]+size[0]/2, pos[1]+size[1]/2)
elif pos_name == 'topright':
center = (pos[0]-size[0]/2, pos[1]+size[1]/2)
else:
assert False, "Have to specify a position!"
self.cx, self.cy = center
self.w, self.h = size
self.rise = poparg(args, rise=0)
self.set = poparg(args, set=999999)
self.fade = poparg(args, fade=0)
def __repr__(self):
return "<Box ={0.w}x{0.h} @{0.cx},{0.cy}>".format(self)
def __eq__(self, other):
if not isinstance(other, Box):
return False
return (
self.center == other.center and
self.size == other.size and
self.rise == other.rise and
self.set == other.set and
self.fade == other.fade
)
def __ne__(self, other):
return not self == other
def translate(self, dx, dy):
"""Create a new box just like this one, but translated.
`dx` and `dy` are deltas for the center point. The returned box is
the same as this one, but the center has moved::
>>> b = Box(dict(size=(10,20), center=(100,200)))
>>> b2 = b.translate(1, 2)
>>> b2.center
(101, 202)
>>> b2.size
(10, 20)
The original box is unchanged::
>>> b.center
(100, 200)
"""
box = Box(dict(box=self))
box.cx += dx
box.cy += dy
return box
def scale(self, sx, sy=None):
"""Make a new box that is scaled from this one."""
sy = sy or sx
cx = self.cx * sx
cy = self.cy * sy
w = self.w * sx
h = self.h * sy
return Box(dict(size=(w, h), center=(cx, cy)))
def union(self, other):
"""Create a new box that covers self and other."""
left = min(self.left, other.left)
right = max(self.right, other.right)
top = min(self.top, other.top)
bottom = max(self.bottom, other.bottom)
width = right - left
height = bottom - top
box = Box(dict(size=(width, height), topleft=(left, top)))
return box
@property
def center(self):
"""The center point of the box."""
return self.cx, self.cy
@property
def size(self):
"""The width and height as a pair."""
return self.w, self.h
@property
def top(self):
"""The y-coodinate of the top edge."""
return self.cy - self.h/2
@property
def bottom(self):
"""The y-coordinate of the bottom edge."""
return self.cy + self.h/2
@property
def left(self):
"""The x-coordinate of the left edge."""
return self.cx - self.w/2
@property
def right(self):
"""The x-coordinate of the right edge."""
return self.cx + self.w/2
@property
def north(self):
"""The point at the north of the box."""
return self.cx, self.top
@property
def south(self):
"""The point at the south of the box."""
return self.cx, self.bottom
@property
def east(self):
"""The point at the east of the box."""
return self.right, self.cy
@property
def west(self):
"""The point at the west of the box."""
return self.left, self.cy
|
boatd/python-boatd | boatdclient/point.py | Python | gpl-3.0 | 5,843 | 0.002396 | import math
from math import sin as sin
from math import cos as cos
from .bearing import Bearing
EARTH_RADIUS = 6371009.0 # in meters
class Point(object):
'''A point on the face of the earth'''
def __init__(self, latitude, longitude):
self._lat = latitude
self._long = long | itude
@classmethod
def from_radians(cls, lat_radians, long_radians):
'''
Return a new instance of Point from a pair of coordinates in radians.
'''
| return cls(math.degrees(lat_radians), math.degrees(long_radians))
def __getitem__(self, key):
if key == 0:
return self._lat
elif key == 1:
return self._long
else:
raise IndexError('Point objects can only have two coordinates')
def __iter__(self):
'''Return an iterator containing the lat and long'''
return iter([self.lat, self.long])
def __str__(self):
'''Return a string representation of the point'''
return '{0:0.5f}N, {1:0.5f}W'.format(*list(self))
def __repr__(self):
return '<{0}.{1} ({2}) object at {3}>'.format(
self.__module__, type(self).__name__, str(self), hex(id(self)))
@property
def lat(self):
'''Return the latitude in degrees'''
return self._lat
@property
def long(self):
'''Return the longitude in degrees'''
return self._long
@property
def lat_radians(self):
'''Return the latitude in radians'''
return math.radians(self.lat)
@property
def long_radians(self):
'''Return the longitude in radians'''
return math.radians(self.long)
def distance_to(self, point):
'''
Return the distance between this point and another point in meters.
:param point: Point to measure distance to
:type point: Point
:returns: The distance to the other point
:rtype: float
'''
angle = math.acos(
sin(self.lat_radians) * sin(point.lat_radians) +
cos(self.lat_radians) * cos(point.lat_radians) *
cos(self.long_radians - point.long_radians)
)
return angle * EARTH_RADIUS
def bearing_to(self, point):
'''
Return the bearing to another point.
:param point: Point to measure bearing to
:type point: Point
:returns: The bearing to the other point
:rtype: Bearing
'''
delta_long = point.long_radians - self.long_radians
y = sin(delta_long) * cos(point.lat_radians)
x = (
cos(self.lat_radians) * sin(point.lat_radians) -
sin(self.lat_radians) * cos(point.lat_radians) * cos(delta_long)
)
radians = math.atan2(y, x)
return Bearing.from_radians(radians)
def cross_track_distance(self, start_point, end_point):
'''
Return the cross track distance from this point to the line between two
points::
* end_point
/
/
/ * this point
/
/
*
start_point
:param start_point: First point on the line
:type start_point: Point
:param end_point: Second point on the line
:type end_point: Point
:returns: The perpendicular distance to the line between ``start_point``
and ``end_point``, where distance on the right of ``start_point``
is positive and distance on the left is negative
:rtype: float
'''
dist = start_point.distance_to(self)
bearing_to_end = start_point.bearing_to(end_point).radians
bearing_to_point = start_point.bearing_to(self).radians
return math.asin(math.sin(dist / EARTH_RADIUS) * \
math.sin(bearing_to_point - bearing_to_end)) * \
EARTH_RADIUS
def relative_point(self, bearing_to_point, distance):
'''
Return a waypoint at a location described relative to the current point
:param bearing_to_point: Relative bearing from the current waypoint
:type bearing_to_point: Bearing
:param distance: Distance from the current waypoint
:type distance: float
:return: The point described by the parameters
'''
bearing = math.radians(360 - bearing_to_point)
rad_distance = (distance / EARTH_RADIUS)
lat1 = (self.lat_radians)
lon1 = (self.long_radians)
lat3 = math.asin(math.sin(lat1) * math.cos(rad_distance) + math.cos(lat1) * math.sin(rad_distance) * math.cos(bearing))
lon3 = lon1 + math.atan2(math.sin(bearing) * math.sin(rad_distance) * math.cos(lat1) , math.cos(rad_distance) - math.sin(lat1) * math.sin(lat3))
return Point(math.degrees(lat3), math.degrees(lon3))
def __add__(self, other):
return Point(self.lat + other.lat, self.long + other.long)
def __sub__(self, other):
return Point(self.lat - other.lat, self.long - other.long)
def __div__(self, value):
return Point(self.lat / value, self.long / value)
# do a couple of tests
if __name__ == '__main__':
castle = Point(52.41389, -4.09098) # aber castle
print(castle)
hill = Point(52.42459, -4.08339) # Constitution hill
print(hill)
# distance should be ~1.29844 km
print('regular:', castle.distance_to(hill))
dismaland = Point(51.340911, -2.982787)
print('regular:', castle.distance_to(dismaland))
print('cross track:',
Point(52.413990, -4.089979).cross_track_distance(castle, hill))
print(castle.bearing_to(hill))
# should be ~90 degrees
print(Point(52.41398, -4.4627).bearing_to(Point(52.41398, -4.09122)))
|
jwren/intellij-community | python/testData/inspections/PyUnresolvedReferencesInspection3K/descriptorAttribute.py | Python | apache-2.0 | 884 | 0.020362 | from typing import Any
class StringDe | scriptor:
def __get__(self | , instance, owner):
return 'foo'
class AnyDescriptor:
def __get__(self, instance, owner) -> Any:
return 'bar'
class ListDescriptor:
def __get__(self, instance: Any, owner: Any) -> list:
return 'baz'
class C:
foo = StringDescriptor()
bar = AnyDescriptor()
baz = ListDescriptor()
# Instance level
c = C()
c.foo.upper()
c.foo.<warning descr="Unresolved attribute reference 'non_existent' for class 'str'">non_existent</warning>()
c.bar.upper()
c.bar.non_existent()
c.baz.append()
c.baz.<warning descr="Unresolved attribute reference 'non_existent' for class 'list'">non_existent</warning>()
# Class level
C.foo.upper()
C.foo.<warning descr="Unresolved attribute reference 'non_existent' for class 'str'">non_existent</warning>()
C.bar.upper()
C.bar.non_existent()
|
gsvaldes/tequio | districts/serializers.py | Python | gpl-3.0 | 349 | 0 | from rest_framework_gis.serializers import GeoFeatureModelSerializer
from districts.models impor | t AlderDistrict
class DistrictSerializer(GeoFeatureModelSerializer):
"""
Geo Serialize the district model
"""
class Meta:
model = AlderDistrict
geo_field = 'mpoly'
| fields = ('alder', 'wards_txt', 'wards_desc')
|
pedrobaeza/odoo | openerp/service/db.py | Python | agpl-3.0 | 15,507 | 0.003095 | # -*- coding: utf-8 -*-
from contextlib import closing
from functools import wraps
import logging
import os
import shutil
import threading
import traceback
import tempfile
import zipfile
import psycopg2
import openerp
from openerp import SUPERUSER_ID
from openerp.exceptions import Warning
import openerp.release
import openerp.sql_db
import openerp.tools
import security
_logger = logging.getLogger(__name__)
self_actions = {}
self_id = 0
self_id_protect = threading.Semaphore()
class DatabaseExists(Warning):
pass
# This should be moved to openerp.modules.db, along side initialize().
def _initialize_db(id, db_name, demo, lang, user_password):
try:
self_actions[id]['progress'] = 0
db = openerp.sql_db.db_connect(db_name)
with closing(db.cursor()) as cr:
# TODO this should be removed as it is done by RegistryManager.new().
openerp.modules.db.initialize(cr)
openerp.tools.config['lang'] = lang
cr.commit()
registry = openerp.modules.registry.RegistryManager.new(
db_name, demo, self_actions[id], update_module=True)
with closing(db.cursor()) as cr:
if lang:
modobj = registry['ir.module.module']
mids = modobj.search(cr, SUPERUSER_ID, [('state', '=', 'installed')])
modobj.update_translations(cr, SUPERUSER_ID, mids, lang)
# update admin's password and lang
values = {'password': user_password, 'lang': lang}
registry['res.users'].write(cr, SUPERUSER_ID, [SUPERUSER_ID], values)
cr.execute('SELECT login, password FROM res_users ORDER BY login')
self_actions[id].update(users=cr.dictfetchall(), clean=True)
cr.commit()
except Exception, e:
self_actions[id].update(clean=False, exception=e)
_logger.exception('CREATE DATABASE failed:')
self_actions[id]['traceback'] = traceback.format_exc()
def dispatch(method, params):
if method in ['create', 'get_progress', 'drop', 'dump', 'restore', 'rename',
'change_admin_password', 'migrate_databases',
'create_database', 'duplicate_database']:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
elif method in ['db_exist', 'list', 'list_lang', 'server_version']:
# params = params
# No security check for these methods
pass
else:
raise KeyError("Method not found: %s" % method)
fn = globals()['exp_' + method]
return fn(*params)
def _create_empty_database(name):
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
chosen_template = openerp.tools.config['db_template']
cr.execute("SELECT datname FROM pg_database WHERE datname = %s",
(name,))
if cr.fetchall():
raise DatabaseExists("database %r already exists!" % (name,))
else:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (name, chosen_template))
def exp_create(db_name, demo, lang, user_password='admin'):
self_id_protect.acquire()
global self_id
self_id += 1
id = self_id
self_id_protect.release()
self_actions[id] = {'clean': False}
_create_empty_database(db_name)
_logger.info('CREATE DATABASE %s', db_name.lower())
create_thread = threading.Thread(target=_initialize_db,
args=(id, db_name, demo, lang, user_password))
create_thread.start()
self_actions[id]['thread'] = create_thread
return id
def exp_create_database(db_name, demo, lang, user_password='admin'):
""" Similar to exp_create but blocking."""
self_id_protect.acquire()
global self_id
self_id += 1
id = self_id
self_id_protect.release()
self_actions[id] = {'clean': False}
_logger.info('Create database `%s`.', db_name)
_create_empty_database(db_name)
_initialize_db(id, db_name, demo, lang, user_password)
return True
def exp_duplicate_database(db_original_name, db_name):
_logger.info('Duplicate database `%s` to `%s`.', db_original_name, db_name)
openerp.sql_db.close_db(db_original_name)
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "%s" """ % (db_name, db_original_name))
from_fs = openerp.tools.config.filestore(db_original_name)
to_fs = openerp.tools.config.filestore(db_name)
if os.path.exists(from_fs) and not os.path.exists(to_fs):
shutil.copy(from_fs, to_fs)
return True
def exp_get_progress(id):
if self_actions[id]['thread'].isAlive():
# return openerp.modules.init_progress[db_name]
return min(self_actions[id].get('progress', 0), 0.95), []
else:
clean = self_actions[id]['clean']
if clean:
users = self_actions[id]['users']
for user in users:
# Remove the None passwords as they can't be marshalled by XML-RPC.
if user['password'] is None:
user['password'] = ''
self_actions.pop(id)
return 1.0, users
else:
a = self_actions.pop(id)
exc, tb = a['exception'], a['traceback']
raise Exception, exc, tb
def exp_drop(db_name):
if db_name not in exp_list(True):
return False
openerp.modules.registry.RegistryManager.delete(db_name)
openerp.sql_db.close_db(db_name)
db = openerp.sql_db.db_connect('postgres')
with closing(db.cursor()) as cr:
cr.autocommit(True) # avoid transaction block
# Try to terminate all other connections that might prevent
# dropping the database
try:
# PostgreSQL 9.2 renamed pg_stat_activity.procpid to pid:
# http://www.postgresql.org/docs/9.2/static/release-9-2.html#AEN110389
pid_col = 'pid' if cr._cnx.server_version >= 90200 else 'procpid'
cr.execute("""SELECT pg_terminate_backend(%(pid_col)s)
FROM pg_stat_activity
WHERE datname = %%s AND
%(pid_col)s != pg_backend_pid()""" % {'pid_col': pid_col},
(db_name,))
except Exception:
pass
|
try:
cr.execute('DROP DATABASE "%s"' % db_name)
except Exception, e:
_logger.error('DROP DB: %s failed:\n%s', db_name, e)
raise Exception("Couldn't drop database %s: %s" % (db_name, e))
else:
_logger.info('DROP DB: %s', db_name)
fs = openerp.tools.config.filestore(db_name)
if os.path.exists(fs):
shutil.rmtree(fs)
return True
def _set_pg_pa | ssword_in_environment(func):
""" On systems where pg_restore/pg_dump require an explicit
password (i.e. when not connecting via unix sockets, and most
importantly on Windows), it is necessary to pass the PG user
password in the environment or in a special .pgpass file.
This decorator handles setting
:envvar:`PGPASSWORD` if it is not already
set, and removing it afterwards.
See also http://www.postgresql.org/docs/8.4/static/libpq-envars.html
.. note:: This is not thread-safe, and should never be enabled for
SaaS (giving SaaS users the super-admin password is not a good idea
anyway)
"""
@wraps(func)
def wrapper(*args, **kwargs):
if os.environ.get('PGPASSWORD') or not openerp.tools.config['db_password']:
return func(*args, **kwargs)
else:
os.environ['PGPASSWORD'] = openerp.tools.config['db_password']
try:
return func(*args, **kwargs)
finally:
del os.environ['PGPASSWORD']
return wrapper
def exp_dump(db_name):
with tempfile.TemporaryFile() as t:
dump_db(db_name, t)
t.seek(0)
return t.read().encode('base64')
@_set_pg_password_ |
warner/python-tweetnacl | setup.py | Python | mit | 4,417 | 0.008836 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Build and install the TweetNaCl wrapper.
"""
from __future__ import print_function
import sys, os
from distutils.core import setup, Extension, Command
from distutils.util import get_platform
def setup_path():
# copied from distutils/command/build.py
plat_name = get_platform()
plat_specifier = ".%s-%s" % (plat_name, sys.version[0:3])
build_lib = os.path.join("build", "lib"+plat_specifier)
sys.path.insert(0, build_lib)
nacl_module = Extension('nacl._tweetnacl',
["tweetnaclmodule.c", "tweetnacl.c", "randombytes.c"],
extra_compile_args=["-O2",
"-funroll-loops",
"-fomit-frame-pointer"])
class Test(Command):
description = "run tests"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
setup_path()
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "test"))
import test_box; test_box.run()
import test_hash; test_hash.run()
import test_onetimeauth; test_onetimeauth.run()
import test_scalarmult; test_scalarmult.run()
import test_secretbox; test_secretbox.run()
import test_sign; test_sign.run()
import test_stream; test_stream.run()
import test_verify_1 | 6; test_verify_16.run()
import test_verify_32; test_verify_32.run()
class Speed(Test):
des | cription = "run benchmark suite"
def run(self):
setup_path()
from timeit import Timer
def do(setup_statements, statement):
# extracted from timeit.py
t = Timer(stmt=statement, setup="\n".join(setup_statements))
# determine number so that 0.2 <= total time < 2.0
for i in range(1, 10):
number = 10**i
x = t.timeit(number)
if x >= 0.2:
break
return x / number
def abbrev(t):
if t > 1.0:
return "%.3fs" % t
if t > 1e-3:
return "%.1fms" % (t*1e3)
return "%.1fus" % (t*1e6)
IM = "from nacl import raw; msg='H'*1000"
# Hash
S1 = "raw.crypto_hash(msg)"
print(" Hash:", abbrev(do([IM], S1)))
# OneTimeAuth
S1 = "k = 'k'*raw.crypto_onetimeauth_KEYBYTES"
S2 = "auth = raw.crypto_onetimeauth(msg, k)"
S3 = "raw.crypto_onetimeauth_verify(auth, msg, k)"
print(" OneTimeAuth:", abbrev(do([IM, S1], S2)))
print(" OneTimeAuth verify:", abbrev(do([IM, S1, S2], S3)))
# SecretBox
S1 = "k = 'k'*raw.crypto_secretbox_KEYBYTES"
S2 = "nonce = raw.randombytes(raw.crypto_secretbox_NONCEBYTES)"
S3 = "c = raw.crypto_secretbox(msg, nonce, k)"
S4 = "raw.crypto_secretbox_open(c, nonce, k)"
print(" Secretbox encryption:", abbrev(do([IM, S1, S2], S3)))
print(" Secretbox decryption:", abbrev(do([IM, S1, S2, S3], S4)))
# Curve25519
S1 = "pk,sk = raw.crypto_box_keypair()"
S2 = "nonce = raw.randombytes(raw.crypto_box_NONCEBYTES)"
S3 = "ct = raw.crypto_box(msg, nonce, pk, sk)"
S4 = "k = raw.crypto_box_beforenm(pk, sk)"
S5 = "ct = raw.crypto_box_afternm(msg, nonce, k)"
print(" Curve25519 keypair generation:", abbrev(do([IM], S1)))
print(" Curve25519 encryption:", abbrev(do([IM, S1, S2, S3], S3)))
print(" Curve25519 beforenm (setup):", abbrev(do([IM, S1, S2, S3], S4)))
print(" Curve25519 afternm:", abbrev(do([IM, S1, S2, S3, S4], S5)))
# Ed25519
S1 = "vk,sk = raw.crypto_sign_keypair()"
S2 = "sig = raw.crypto_sign(msg, sk)"
S3 = "raw.crypto_sign_open(sig, vk)"
print(" Ed25519 keypair generation:", abbrev(do([IM], S1)))
print(" Ed25519 signing:", abbrev(do([IM, S1], S2)))
print(" Ed25519 verifying:", abbrev(do([IM, S1, S2], S3)))
setup (name = 'tweetnacl',
version = '0.1',
author = "Brian Warner, Jan Mojžíš",
description = """Python wrapper for TweetNaCl""",
ext_modules = [nacl_module],
packages = ["nacl"],
package_dir = {"nacl": "src"},
cmdclass = { "test": Test,
"speed": Speed },
)
|
rizumu/pinax-likes | phileo/urls.py | Python | mit | 197 | 0.005076 | from django.conf.urls import url, pa | tterns
urlpatterns = patterns(
"phileo.views",
u | rl(r"^like/(?P<content_type_id>\d+):(?P<object_id>\d+)/$", "like_toggle", name="phileo_like_toggle")
)
|
brookemosby/titanic | TitanicAttempt/__init__.py | Python | mit | 28 | 0.035714 | fr | om TitanicAttempt impor | t * |
stanta/darfchain | darfchain/models/sale_order.py | Python | gpl-3.0 | 7,313 | 0.010666 | from openerp import models, fields, api
from openerp.tools.translate import _
import logging
#from fingerprint import Fingerprint
from dateutil import relativedelta
from datetime import datetime as dt
from dateutil import parser
import xlsxwriter
import StringIO
from io import BytesIO
import base64
import hashlib
import xmltodict
from math import modf
from lxml import etree
#from xmljson import badgerfish as bf
from xml.etree.ElementTree import fromstring
from json import dumps
import pywaves as pw
import requests
import base58
import rethinkdb as r
from subprocess import call
import os
import ast
import json
from openerp.exceptions import UserError
from web3 import Web3, HTTPProvider, IPCProvider
import hashlib
_logger = logging.getLogger(__name__)
class sign_sale_order(models.Model):
_inherit = 'sale.order'
signature_status = fields.Boolean('Sign')
signature_hash = fields.Char('Signature Hash')
gas_for_signature = fields.Float('Gas for signature',compute='_gas_for_signature')
gas_limit = fields.Float('Gas limit',compute='_gas_limit')
signature_timestamp = fields.Char('Signature timestamp')
result_of_check = fields.Char(default='Not checked')
@api.one
def getDocumentMD5(self):
return hashlib.md5(str(self.incoterm)).hexdigest()
@api.one
def get_ethereum_addres(self):
ethereum_address = self.env['setting.connect'].search([('platforma','=','ethereum')])
result_ethereum_dic = {}
if ethereum_address:
result_ethereum_dic.update({'ethereum_address':ethereum_address[0].ethereum_pk,
'ethereum_interface': ethereum_address[0].ethereum_address,
'address_node':ethereum_address[0].ethereum_node_address})
return result_ethereum_dic
def _gas_for_signature(self):
ethereum_setting = {}
if self.get_ethereum_addres()[0].keys() == {}:
result_of_gas_estimate = 0
else:
date_of_synchronization = dt.now()
ethereum_setting = self.get_ethereum_addres()
ethereum_setting = ethereum_setting[0]
| web3 = Web3(HTTPProvider(ethereum_setting['address_node']))
abi_json = ethereum_setting['ethereum_interface']
ethereum_contract_address = ethereum_setting['ethereum_address']
contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address | )
hash_of_synchronaze = '"'+base58.b58encode(str(date_of_synchronization))+'"'
md5 = self.getDocumentMD5()
md5_for_solidity = '"'+md5[0]+'"'
print hash_of_synchronaze
try:
result_of_gas_estimate = contract.estimateGas().setDocumentHash(str(hash_of_synchronaze),md5_for_solidity)
except:
result_of_gas_estimate = 0
self.gas_for_signature = result_of_gas_estimate
return result_of_gas_estimate
def _gas_limit(self):
ethereum_setting = {}
if self.get_ethereum_addres()[0].keys() == {}:
result_of_gas_limit = 0
else:
ethereum_setting = self.get_ethereum_addres()
ethereum_setting = ethereum_setting[0]
web3 = Web3(HTTPProvider(ethereum_setting['address_node']))
abi_json = ethereum_setting['ethereum_interface']
ethereum_contract_address = ethereum_setting['ethereum_address']
contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address)
result_of_gas_limit = contract.call().getGasLimit()
self.gas_limit = result_of_gas_limit
return result_of_gas_limit
def signature_action(self):
ethereum_setting = {}
date_of_synchronization = dt.now()
ethereum_setting = {}
ethereum_setting = self.get_ethereum_addres()
ethereum_setting = ethereum_setting[0]
web3 = Web3(HTTPProvider(ethereum_setting['address_node']))
abi_json = ethereum_setting['ethereum_interface']
ethereum_contract_address = ethereum_setting['ethereum_address']
contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address)
hash_of_synchronaze = '"'+base58.b58encode(str(date_of_synchronization))+'"'
print hash_of_synchronaze
md5 = self.getDocumentMD5()
md5_for_solidity = '"'+md5[0]+'"'
TransactionHashEthereum = contract.transact().setDocumentHash(str(hash_of_synchronaze),str(md5_for_solidity))
self.signature_timestamp = str(date_of_synchronization)
self.signature_hash = TransactionHashEthereum
self.signature_status = True
self.env['journal.signature'].create({'name':self.name,
'checksum':md5[0],
'hash_of_signature':TransactionHashEthereum,
'timestamp_of_document':self.signature_timestamp,
'date_of_signature':date_of_synchronization})
root = etree.Element("data")
sale_order_name = etree.SubElement(root,'name')
sale_order_name.text=self.name
sale_order_hash = etree.SubElement(root,'transaction_hash')
sale_order_hash.text=TransactionHashEthereum
sale_order_md5 = etree.SubElement(root,'md5')
sale_order_md5.text=md5[0]
xml_result = etree.tostring(root, pretty_print=False)
#xml_result = xml_result.replace('"','\\"')
#-------------------------------------------- write xml to temp file
file_to_save_with_path = '/tmp/'+self.name+str(date_of_synchronization)
temp_file = open(file_to_save_with_path,'w')
temp_file.write(xml_result)
temp_file.close()
string = '/usr/bin/putbigchaindb.py --xml="'+file_to_save_with_path+'"'
os.system(string)
def check_signature_action(self):
date_of_synchronization = dt.now()
ethereum_setting = self.get_ethereum_addres()
ethereum_setting = ethereum_setting[0]
web3 = Web3(HTTPProvider(ethereum_setting['address_node']))
abi_json = ethereum_setting['ethereum_interface']
ethereum_contract_address = ethereum_setting['ethereum_address']
contract = web3.eth.contract(abi = json.loads(abi_json), address=ethereum_contract_address)
get_transact = web3.eth.getTransaction(self.signature_hash)
timestamp = str(contract.call(get_transact).getDocumentHash().replace('"',''))
md5 = self.getDocumentMD5()
md5_from_contract = contract.call(get_transact).getDocumentMD5()
if str(md5_from_contract).replace('"', '') == md5[0]:
self.result_of_check = 'OK'
else:
self.result_of_check = 'Error Checksum'
class JournalOfSignature(models.Model):
_name = 'journal.signature'
name = fields.Char('Document Number')
hash_of_signature = fields.Char('Hash of signature')
checksum = fields.Char('Check sum of Document')
timestamp_of_document = fields.Char('Timestamp')
date_of_signature = fields.Date('Date of signature')
|
eandersson/amqpstorm | amqpstorm/exception.py | Python | mit | 4,732 | 0 | """AMQPStorm Exception."""
AMQP_ERROR_MAPPING = {
311: ('CONTENT-TOO-LARGE',
'The client attempted to transfer content larger than the '
'server could accept at the present time. The client may '
'retry at a later time.'),
312: ('NO-ROUTE', 'Undocumented AMQP Soft Error'),
313: ('NO-CONSUMERS',
'When the exchange cannot deliver to a consumer when the '
'immediate flag is set. As a result of pending data on '
'the queue or the absence of any consumers of the queue.'),
320: ('CONNECTION-FORCED',
'An operator intervened to close the connection for some reason. '
'The client may retry at some later date.'),
402: ('INVALID-PATH',
'The client tried to work with an unknown virtual host.'),
403: ('ACCESS-REFUSED',
'The client attempted to work with a server entity to which '
'has no access due to security settings.'),
404: ('NOT-FOUND',
'The client attempted to work with a server '
'entity that does not exist.'),
405: ('RESOURCE-LOCKED',
'The client attempted to work with a server entity to which it '
'has no access because another client is working with it.'),
406: ('PRECONDITION-FAILED',
'The client requested a method that was not '
'allowed because some precondition failed.'),
501: ('FRAME-ERROR',
'The sender sent a malformed frame that the recipient could '
'not decode. This strongly implies a programming error in '
'the sending peer.'),
502: ('SYNTAX-ERROR',
'The sender sent a frame that contained illegal values for '
'one or more fields. This strongly implies a programming '
'error in the sending peer.'),
503: ('COMMAND-INVALID',
'The client sent an invalid sequence of frames, attempting to '
'perform an operation that was considered invalid by the server. '
'This usually implies a programming error in the client.'),
504: ('CHANNEL-ERROR',
'The client attempted to work with a channel that had not '
'been correctly opened. This most likely indicates a '
'fault in the client layer.'),
505: ('UNEXPECTED-FRAME',
'The peer sent a frame that was not expected, usually in the '
'context of a content header and body. This strongly '
'indicates a fault in the peer\'s content processing.'),
506: ('RESOURC | E-ERROR',
'The server could not complete the method because it lacked '
'sufficient resources. This may be due to the client '
'creating too many of some type of entity.'),
530: ('NOT-A | LLOWED',
'The client tried to work with some entity in a manner '
'that is prohibited by the server, due to security '
'settings or by some other criteria.'),
540: ('NOT-IMPLEMENTED',
'The client tried to use functionality that is '
'notimplemented in the server.'),
541: ('INTERNAL-ERROR',
'The server could not complete the method because of an '
'internal error. The server may require intervention by '
'an operator in order to resume normal operations.')
}
class AMQPError(IOError):
"""General AMQP Error.
Exceptions raised by AMQPStorm are mapped based to the
AMQP 0.9.1 specifications (when applicable).
e.g.
::
except AMQPChannelError as why:
if why.error_code == 312:
self.channel.queue.declare(queue_name)
"""
_documentation = None
_error_code = None
_error_type = None
@property
def documentation(self):
"""AMQP Documentation string."""
return self._documentation or bytes()
@property
def error_code(self):
"""AMQP Error Code - A 3-digit reply code."""
return self._error_code
@property
def error_type(self):
"""AMQP Error Type e.g. NOT-FOUND."""
return self._error_type
def __init__(self, *args, **kwargs):
self._error_code = kwargs.pop('reply_code', None)
super(AMQPError, self).__init__(*args, **kwargs)
if self._error_code not in AMQP_ERROR_MAPPING:
return
self._error_type = AMQP_ERROR_MAPPING[self._error_code][0]
self._documentation = AMQP_ERROR_MAPPING[self._error_code][1]
class AMQPConnectionError(AMQPError):
"""AMQP Connection Error."""
pass
class AMQPChannelError(AMQPError):
"""AMQP Channel Error."""
pass
class AMQPMessageError(AMQPChannelError):
"""AMQP Message Error."""
pass
class AMQPInvalidArgument(AMQPError):
"""AMQP Argument Error."""
|
dennerlager/sepibrews | sepibrews/progressbar/widgets.py | Python | gpl-3.0 | 31,775 | 0.000031 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import with_statement
import abc
import sys
import pprint
import datetime
from python_utils import converters
import six
from . import base
from . import utils
MAX_DATE = datetime.date.max
MAX_TIME = datetime.time.max
MAX_DATETIME = datetime.datetime.max
def string_or_lambda(input_):
if isinstance(input_, six.string_types):
def render_input(progress, data, width):
return input_ % data
return render_input
else:
return input_
def create_marker(marker):
def _marker(progress, data, width):
if progress.max_value is not base.UnknownLength \
and progress.max_value > 0:
length = int(progress.value / progress.max_value * width)
return (marker * length)
else:
return marker
if isinstance(marker, six.string_types):
marker = converters.to_unicode(marker)
assert utils.len_color(marker) == 1, \
'Markers are required to be 1 char'
return _marker
else:
return marker
class FormatWidgetMixin(object):
'''Mixin to format widgets using a formatstring
Variables available:
- max_value: The maximum value (can be None with iterators)
- value: The current value
- total_seconds_elapsed: The seconds since the bar started
- seconds_elapsed: The seconds since the bar started modulo 60
- minutes_elapsed: The minutes since the bar started modulo 60
- hours_elapsed: The hours since the bar started modulo 24
- days_elapsed: The hours since the bar started
- time_elapsed: Shortcut for HH:MM:SS time since the bar started including
days
- percentage: Percentage as a float
'''
required_values = []
def __init__(self, format, new_style=False, **kwargs):
self.new_style = new_style
self.format = format
def __call__(self, progress, data, format=None):
'''Formats the widget into a string'''
try:
if self.new_style:
return (format or self.format).format(**data)
else:
return (format or self.format) % data
except (TypeError, KeyError):
print('Error while formatting %r' % self.format, file=sys.stderr)
pprint.pprint(data, stream=sys.stderr)
raise
class WidthWidgetMixin(object):
'''Mixing to make sure widgets are only visible if the screen is within a
specified size range so the progressbar fits on both large and small
screens..
Variables available:
- min_width: Only display the widget if at least `min_width` is left
- max_width: Only display the widget if at most `max_width` is left
>>> class Progress( | object):
... term_width = 0
>>> Width | WidgetMixin(5, 10).check_size(Progress)
False
>>> Progress.term_width = 5
>>> WidthWidgetMixin(5, 10).check_size(Progress)
True
>>> Progress.term_width = 10
>>> WidthWidgetMixin(5, 10).check_size(Progress)
True
>>> Progress.term_width = 11
>>> WidthWidgetMixin(5, 10).check_size(Progress)
False
'''
def __init__(self, min_width=None, max_width=None, **kwargs):
self.min_width = min_width
self.max_width = max_width
def check_size(self, progress):
if self.min_width and self.min_width > progress.term_width:
return False
elif self.max_width and self.max_width < progress.term_width:
return False
else:
return True
class WidgetBase(WidthWidgetMixin):
__metaclass__ = abc.ABCMeta
'''The base class for all widgets
The ProgressBar will call the widget's update value when the widget should
be updated. The widget's size may change between calls, but the widget may
display incorrectly if the size changes drastically and repeatedly.
The boolean INTERVAL informs the ProgressBar that it should be
updated more often because it is time sensitive.
The widgets are only visible if the screen is within a
specified size range so the progressbar fits on both large and small
screens.
WARNING: Widgets can be shared between multiple progressbars so any state
information specific to a progressbar should be stored within the
progressbar instead of the widget.
Variables available:
- min_width: Only display the widget if at least `min_width` is left
- max_width: Only display the widget if at most `max_width` is left
- weight: Widgets with a higher `weigth` will be calculated before widgets
with a lower one
'''
@abc.abstractmethod
def __call__(self, progress, data):
'''Updates the widget.
progress - a reference to the calling ProgressBar
'''
class AutoWidthWidgetBase(WidgetBase):
'''The base class for all variable width widgets.
This widget is much like the \\hfill command in TeX, it will expand to
fill the line. You can use more than one in the same line, and they will
all have the same width, and together will fill the line.
'''
@abc.abstractmethod
def __call__(self, progress, data, width):
'''Updates the widget providing the total width the widget must fill.
progress - a reference to the calling ProgressBar
width - The total width the widget must fill
'''
class TimeSensitiveWidgetBase(WidgetBase):
'''The base class for all time sensitive widgets.
Some widgets like timers would become out of date unless updated at least
every `INTERVAL`
'''
INTERVAL = datetime.timedelta(milliseconds=100)
class FormatLabel(FormatWidgetMixin, WidgetBase):
'''Displays a formatted label
>>> label = FormatLabel('%(value)s', min_width=5, max_width=10)
>>> class Progress(object):
... pass
>>> label = FormatLabel('{value} :: {value:^6}', new_style=True)
>>> str(label(Progress, dict(value='test')))
'test :: test '
'''
mapping = {
'finished': ('end_time', None),
'last_update': ('last_update_time', None),
'max': ('max_value', None),
'seconds': ('seconds_elapsed', None),
'start': ('start_time', None),
'elapsed': ('total_seconds_elapsed', utils.format_time),
'value': ('value', None),
}
def __init__(self, format, **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, **kwargs):
for name, (key, transform) in self.mapping.items():
try:
if transform is None:
data[name] = data[key]
else:
data[name] = transform(data[key])
except (KeyError, ValueError, IndexError): # pragma: no cover
pass
return FormatWidgetMixin.__call__(self, progress, data, **kwargs)
class Timer(FormatLabel, TimeSensitiveWidgetBase):
'''WidgetBase which displays the elapsed seconds.'''
def __init__(self, format='Elapsed Time: %(elapsed)s', **kwargs):
FormatLabel.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
# This is exposed as a static method for backwards compatibility
format_time = staticmethod(utils.format_time)
class SamplesMixin(TimeSensitiveWidgetBase):
'''
Mixing for widgets that average multiple measurements
Note that samples can be either an integer or a timedelta to indicate a
certain amount of time
>>> class progress:
... last_update_time = datetime.datetime.now()
... value = 1
... extra = dict()
>>> samples = SamplesMixin(samples=2)
>>> samples(progress, None, True)
(None, None)
>>> progress.last_update_time += datetime.timedelta(seconds=1)
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
>>> progress.last_update_time += datetime.timedelta(seconds=1)
>> |
Dakhnovskiy/linguistic_analyzer_projects | analyzer_project/report/csv_report.py | Python | apache-2.0 | 987 | 0 | # -*- coding: utf-8 -*-
__author__ = 'Dmitriy.Dakhnovskiy'
import csv
from .abstract_report import AbstractReport
from .save_io_to_file_mixin import SaveIoToFileMixin
class CsvReport(AbstractReport, SaveIoToFileMixin):
def __init__(self, data_report, headers):
"""
:param data_report: данные отчета
:param headers: список заголовков
"""
super().__init__(data_report, headers)
# TODO убрать хардкод
self.file_n | ame = 'report.csv'
def __del__(self):
super().__del__()
def _make_io_report(self):
"""
записывает в io_report отчет в строковом виде
"""
writer = csv.writer(self.io_report, delimiter=' ')
writer.writerow(self.headers)
writer.writerows(self.data_report)
def make_ | report(self):
"""
Сформировать отчёт
"""
self.save_to_file()
|
bubae/gazeAssistRecognize | lib/BING-Objectness/source/bing.py | Python | mit | 10,487 | 0.023934 | '''
Created on Jan 2, 2015
@author: alessandro
'''
import add_path
import os
import cv2
import sys
import json
import getopt
import random
import numpy as np
from filter_tig import FilterTIG
EDGE = 8
BASE_LOG = 2
MIN_EDGE_LOG = int(np.ceil(np.log(10.)/np.log(BASE_LOG)))
MAX_EDGE_LOG = int(np.ceil(np.log(500.)/np.log(BASE_LOG)))
EDGE_LOG_RANGE = MAX_EDGE_LOG - MIN_EDGE_LOG + 1
NUM_WIN_PSZ = 130
def magnitude(x,y):
#return np.sqrt(np.square(x)+np.square(y))
return x + y
def sobel_gradient(img, ksize):
gray = cv2.cvtColor(img, cv2.cv.CV_BGR2GRAY)
x = cv2.Sobel(gray,cv2.CV_64F,1,0,ksize=ksize)
y = cv2.Sobel(gray,cv2.CV_64F,0,1,ksize=ksize)
mag = magnitude(x,y)
return mag
def sobel_gradient_8u(img, ksize):
grad = sobel_gradient(img, ksize)
grad[grad<0] = 0
grad[grad>255] = 255
return grad.astype(np.uint8)
def rgb_gradient(img):
img = img.astype(float)
h,w, nch = img.shape
gradientX = np.zeros((h,w))
gradientY = np.zeros((h,w))
d1 = np.abs(img[:,1,:] - img[:,0,:])
gradientX[:,0] = np.max( d1, axis = 1) * 2
d2 = np.abs(img[:,-1,:] - img[:,-2,:])
gradientX[:,-1] = np.max( d2 , axis = 1) * 2
d3 = np.abs(img[:,2:w,:] - img[:,0:w-2,:])
gradientX[:,1:w-1] = np.max( d3 , axis = 2 )
d1 = np.abs(img[1,:,:] - img[0,:,:])
gradientY[0,:] = np.max( d1, axis = 1) * 2
d2 = np.abs(img[-1,:,:] - img[-2,:,:])
gradientY[-1,:] = np.max( d2 , axis = 1) * 2
d3 = np.abs(img[2:h,:,:] - img[0:h-2,:,:])
gradientY[1:h-1,:] = np.max( d3 , axis = 2 )
mag = magnitude(gradientX,gradientY)
mag[mag<0] = 0
mag[mag>255] = 255
return mag.astype(np.uint8)
def get_features(img,bb, w = EDGE,h = EDGE, ksize=3, idx = None):
crop_img = img[bb[1]-1:bb[3], bb[0]-1:bb[2],:]
if not idx is None:
cv2.imwrite("/tmp/%s.png"%idx,crop_img)
sub_img = cv2.resize(crop_img,(w,h))
grad = rgb_gradient(sub_img)
return grad
class FirstStagePrediction(object):
def __init__(self, weights_1st_stage, scale_space_sizes_idxs, num_win_psz = 130, edge = EDGE, base_log = BASE_LOG, min_edge_log = MIN_EDGE_LOG, edge_log_range = EDGE_LOG_RANGE):
self.filter_tig = FilterTIG()
self.weights_1st_stage = weights_1st_stage
self.filter_tig.update(self.weights_1st_stage)
self.filter_tig.reconstruct(self.weights_1st_stage)
self.scale_space_sizes_idxs = scale_space_sizes_idxs
self.base_log = base_log
self.min_edge_log = min_edge_log
self.edge_log_range = edge_log_range
self.edge = edge
self.num_win_psz = num_win_psz
def predict(self, image, nss = 2):
bbs = []
img_h,img_w,nch = image.shape
for size_idx in self.scale_space_sizes_idxs:
w = round(pow(self.base_log, size_idx % self.edge_log_range + self.min_edge_log))
h = round(pow(self.base_log, size_idx // self.edge_log_range + self.min_edge_log))
if (h > img_h * self.base_log) or (w > img_w * self.base_log):
continue
h = min(h, img_h)
w = min(w, img_w)
new_w = int(round(float(self.edge)*img_w/w))
new_h = int(round(float(self.edge)*img_h/h))
img_resized = cv2.resize(image,(new_w,new_h))
grad = rgb_gradient(img_resized)
match_map = self.filter_tig.match_template(grad)
points = self.filter_tig.non_maxima_suppression(match_map, nss, self.num_win_psz, False)
ratio_x = w / self.edge
ratio_y = h / self.edge
i_max = min(len(points), self.num_win_psz)
for i in xrange(i_max):
point, score = points[i]
x0 = int(round(point[0] * ratio_x))
y0 = int(round(point[1] * ratio_y))
x1 = min(img_w, int(x0+w))
y1 = min(img_h, int(y0+h))
x0 = x0 + 1
y0 = y0 + 1
bbs.append(((x0,y0,x1,y1), score, size_idx))
return bbs
class SecondStagePrediction(object):
def __init__(self, second_stage_weights):
self.second_stage_weights = second_stage_weights
def predict(self, bbs):
normalized_bbs = []
for bb, score, size_idx in bbs:
try:
weights = self.second_stage_weights["%s"%size_idx]
except:
#if a size_idx is missing, it means that training error for it was empty, so just skip it!
continue
#normalize the score with respect with the size
normalized_score = weights["weight"] * score + weights["bias"]
normalized_bbs.append((normalized_score,bb))
return normalized_bbs
class Bing(object):
def __init__(self, weights_1st_stage, sizes_idx, weights_2nd_stage, num_bbs_per_size_1st_stage= NUM_WIN_PSZ, num_bbs_final = 1500, edge = EDGE, base_log = BASE_LOG, min_edge_log = MIN_EDGE_LOG, edge_log_range = EDGE_LOG_RANGE):
self.first_stage_prediction = FirstStagePrediction(weights_1st_stage, sizes_idx, num_win_psz = num_bbs_per_size_1st_stage, edge = edge, base_log = base_log, min_edge_log = min_edge_log, edge_log_range = edge_log_range)
self.second_stage_prediction = SecondStagePrediction(weights_2nd_stage)
self.num_bbs_final = num_bbs_final
def predict(self, image):
bbs_1st = self.first_stage_prediction.predict(image)
bbs = self.second_stage_prediction.predict(bbs_1st)
sorted_bbs = sorted(bbs, key = lambda x:x[0], reverse = True)
results = [(bb[0],bb[1]) for bb in sorted_bbs[:self.num_bbs_final]]
score_bbs, results_bbs = zip(*results)
return results_bbs, score_bbs
def parse_cmdline_inputs():
"""
Example parameters:
{
"basepath": "/opt/Datasets/VOC2007",
"training_set_fn": "/opt/Datasets/VOC2007/ImageSets/Main/train.txt",
"test_set_fn": "/opt/Datasets/VOC2007/ImageSets/Main/test.txt",
"annotations_path": "/opt/Datasets/VOC2007/Annotations",
"images_path": "/opt/Datasets/VOC2007/JPEGImages",
"results_dir": "/opt/Datasets/VOC2007/BING_Results",
"1st_stage_weights_fn":"/opt/Datasets/VOC2007/BING_Results/weights.txt",
"2nd_stage_weights_fn": "/opt/Datasets/VOC2007/BING_Results/2nd_stage_weights.json",
"sizes_indeces_fn": "/opt/Datas | ets/VOC2007/BING_Results/sizes.txt",
"num_win_psz": 130,
"num_bbs": 1500
}
"""
try:
opts, args = getopt.getopt(sys.argv[1:], "h", ["help", "num_bbs_per_size=",
"num_bbs=" ])
except getopt.GetoptError as err:
# print help information and exit:
print str(err) # will print something like "option -a not recogn | ized"
sys.exit(2)
params_file = sys.argv[-2]
if not os.path.exists(params_file):
print "Specified file for parameters %s does not exist."%params_file
sys.exit(2)
try:
f = open(params_file, "r")
params_str = f.read()
f.close()
except Exception as e:
print "Error while reading parameters file %s. Exception: %s."%(params_file,e)
sys.exit(2)
try:
params = json.loads(params_str)
except Exception as e:
print "Error while parsing parameters json file %s. Exception: %s."%(params_file,e)
sys.exit(2)
for o, a in opts:
if o == "--help" or o =="-h":
print "python bing.py --num_bbs_per_size 130 --num_bbs 1500 /path/to/dataset/parameters.json /path/to/image.jpg"
sys.exit(0)
elif o == "--num_bbs_per_size":
try:
params["num_win_psz"] = int(a)
except Exception as e:
print "Error while converting parameter --num_bb_per_size %s to int. Exception: %s."%(a,e)
sys.exit(2)
elif o == "--num_bbs":
try:
params["num_bbs"] = int(a)
except Exception as e:
print |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.