repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/IPython/kernel/zmq/tests/test_session.py | Python | bsd-3-clause | 12,721 | 0.005817 | """test building messages with streamsession"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
import uuid
from datetime import datetime
import zmq
from zmq.tests import BaseZMQTestCase
from zmq.eventloop.zmqstream import ZMQStream
from IPython.kernel.zmq import session as ss
from IPython.testing.decorators import skipif, module_not_available
from IPython.utils.py3compat import string_types
from IPython.utils import jsonutil
def _bad_packer(obj):
raise TypeError("I don't work")
def _bad_unpacker(bytes):
raise TypeError("I don't work either")
class SessionTestCase(BaseZMQTestCase):
def setUp(self):
BaseZMQTestCase.setUp(self)
self.session = ss.Session()
class TestSession(SessionTestCase):
def test_msg(self):
"""message format"""
msg = self.session.msg('execute')
thekeys = set('header parent_header metadata content msg_type msg_id'.split())
s = set(msg.keys())
self.assertEqual(s, thekeys)
self.assertTrue(isinstance(msg['content'],dict))
self.assertTrue(isinstance(msg['metadata'],dict))
self.assertTrue(isinstance(msg['header'],dict))
self.assertTrue(isinstance(msg['parent_header'],dict))
self.assertTrue(isinstance(msg['msg_id'],str))
self.assertTrue(isinstance(msg['msg_type'],str))
self.assertEqual(msg['header']['msg_type'], 'execute')
self.assertEqual(msg['msg_type'], 'execute')
def test_serialize(self):
msg = self.session.msg('execute', content=dict(a=10, b=1.1))
msg_list = self.session.serialize(msg, ident=b'foo')
ident, msg_list = self.session.feed_identities(msg_list)
new_msg = self.session.unserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
# ensure floats don't come out as Decimal:
self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
def test_send(self):
ctx = zmq.Context.instance()
A = ctx.socket(zmq.PAIR)
B = ctx.socket(zmq.PAIR)
A.bind("inproc://test")
B.connect("inproc://test")
msg = self.session.msg('execute', content=dict(a=10))
self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
ident, msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.unserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['buffers'],[b'bar'])
content = msg['content']
header = msg['header']
parent = msg['parent_header']
metadata = msg['metadata']
msg_type = header['msg_type']
self.session.send(A, None, content=content, parent=parent,
header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
ident, msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.unserialize(msg_list)
self.assertE | qual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
| self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['buffers'],[b'bar'])
self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
ident, new_msg = self.session.recv(B)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['buffers'],[b'bar'])
A.close()
B.close()
ctx.term()
def test_args(self):
"""initialization arguments for Session"""
s = self.session
self.assertTrue(s.pack is ss.default_packer)
self.assertTrue(s.unpack is ss.default_unpacker)
self.assertEqual(s.username, os.environ.get('USER', u'username'))
s = ss.Session()
self.assertEqual(s.username, os.environ.get('USER', u'username'))
self.assertRaises(TypeError, ss.Session, pack='hi')
self.assertRaises(TypeError, ss.Session, unpack='hi')
u = str(uuid.uuid4())
s = ss.Session(username=u'carrot', session=u)
self.assertEqual(s.session, u)
self.assertEqual(s.username, u'carrot')
def test_tracking(self):
"""test tracking messages"""
a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
s = self.session
s.copy_threshold = 1
stream = ZMQStream(a)
msg = s.send(a, 'hello', track=False)
self.assertTrue(msg['tracker'] is ss.DONE)
msg = s.send(a, 'hello', track=True)
self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
M = zmq.Message(b'hi there', track=True)
msg = s.send(a, 'hello', buffers=[M], track=True)
t = msg['tracker']
self.assertTrue(isinstance(t, zmq.MessageTracker))
self.assertRaises(zmq.NotDone, t.wait, .1)
del M
t.wait(1) # this will raise
def test_unique_msg_ids(self):
"""test that messages receive unique ids"""
ids = set()
for i in range(2**12):
h = self.session.msg_header('test')
msg_id = h['msg_id']
self.assertTrue(msg_id not in ids)
ids.add(msg_id)
def test_feed_identities(self):
"""scrub the front for zmq IDENTITIES"""
theids = "engine client other".split()
content = dict(code='whoda',stuff=object())
themsg = self.session.msg('execute',content=content)
pmsg = theids
def test_session_id(self):
session = ss.Session()
# get bs before us
bs = session.bsession
us = session.session
self.assertEqual(us.encode('ascii'), bs)
session = ss.Session()
# get us before bs
us = session.session
bs = session.bsession
self.assertEqual(us.encode('ascii'), bs)
# change propagates:
session.session = 'something else'
bs = session.bsession
us = session.session
self.assertEqual(us.encode('ascii'), bs)
session = ss.Session(session='stuff')
# get us before bs
self.assertEqual(session.bsession, session.session.encode('ascii'))
self.assertEqual(b'stuff', session.bsession)
def test_zero_digest_history(self):
session = ss.Session(digest_history_size=0)
for i in range(11):
session._add_digest(uuid.uuid4().bytes)
self.assertEqual(len(session.digest_histor |
jaycrossler/procyon | procyon/starcatalog/tests.py | Python | mit | 1,921 | 0.003644 | from django.core.urlresolvers import reverse
from django.test import Client, TestCase
from procyon.starcatalog.models import Star
class StarCatalog(TestCase):
def setUp(self):
star1 = Star.objects.create(**{"HR": None, "spectrum": "G5", "Y": -28.6581, "VY": 5.7579e-05, "HD": None, "HIP": 70767, "RV": None, "proper_name": 'Kelda', "RA": 14.47259311, "distance_parsecs": 73.8007380073801, "abs_mag": 5.52969647605212, "gliese": None, "PMDec": -28.21, "mag": 9.87, "color_index": 0.753, "VX": -3.3849e-05, "X": -37.90807, "VZ": -6.498e-06, "Z": -56.46449, "PMRA": -185.44, "bayer_flamsteed": None, "dec": -49.91535182})
star2 = Star.objects.create(**{"HR": None, "spectrum": "F5V", "Y": -37.48622, "VY": 2.3793e-05, "HD": 126819, "HIP": 70768, "RV": None, "proper_name": 'Chloe', "RA": 14.47315865, "distance_parsecs": 67.5219446320054, "abs_mag": 3.76277529260604, "gliese": None, "PMDec": -50.55, "mag": 7.91, "color_index": 0.506, "VX": -9.883e-06, "X": -49.57038, "VZ": -1.5229e-05, "Z": -26.39644, "PMRA": -76.19, "bayer_flamsteed": None, "dec": -23.01246733})
star3 = Star.objects.create(**{"HR": None, "spectrum": "M3/M4III", "Y": -232.61189, "VY": 1.4617e-05, "HD": 126743, "HIP": 70769, "RV": None, "proper_name": 'Jasper', "RA": 14.47365036, "distance_parsecs": 500.0, "abs_mag": -0.414850021680092, "gliese": None, "PMDec": -17.82, "mag": 8.08, "color_index": 1.465, "VX": 2.3422e-05, "X": -307.515, "VZ": -3.3309e-05, "Z": -318.31781, "PMRA": 1.02, "bayer_flamsteed": None, "dec": -39.54140198})
def test_search_vie | w(self):
"""
Tests the search view.
"""
c = Client()
| term = 'kelda'
db_lookup = Star.objects.filter(proper_name__icontains=term)
response = c.get(reverse('star-list-no-id') + '?q={term}'.format(term=term))
for i in db_lookup:
self.assertTrue(i in response.context['items'])
|
oser-cs/oser-website | tests/test_core/test_address.py | Python | gpl-3.0 | 1,331 | 0 | """Address model tests."""
from core.models import Address
from core.factory import AddressFactory
from tests.utils import ModelTestCase
class AddressTest(ModelTestCase):
"""Test the Address model."""
model = Address
field_tests = {
'line1': {
'verbose_name': 'ligne 1',
'blank': False,
'max_length': 300,
},
'line2': {
'verbose_name': 'ligne 2',
'max_length': 300,
'blank': True,
'default': '',
},
'post_code': {
'verbose_name': 'code postal',
'blank': False,
'max_length': 20,
},
'city': {
'verbose_name': 'ville',
'blank': False,
'max_length': 100,
},
'country': {
'verbose_name': 'pays',
'blank': False,
'default': 'FR',
},
}
model_tests = {
'verbose_name': 'adresse',
}
@classmethod
def setUpTestD | ata(cls):
cls.obj = AddressFactory.create(
line1='3 Rue Joliot Curie',
post_code='91190',
city='Gi | f-sur-Yvette',
)
def test_str(self):
expected = '3 Rue Joliot Curie, 91190 Gif-sur-Yvette, France'
self.assertEqual(expected, str(self.obj))
|
MOOCworkbench/MOOCworkbench | experiments_manager/migrations/0002_auto_20170502_0952.py | Python | mit | 2,437 | 0.003283 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-02 09:52
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('cookiecutter_manager', '0002_auto_20170502_0952'),
('requirements_manager', '0001_initial'),
('marketplace', '0002_auto_20170502_0952'),
('pylint_manager', '0001_initial'),
('experiments_manager', '0001_initial'),
('build_manager', '0001_initial'),
('user_manager', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='experiment',
name='language',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='marketplace.Language'),
),
migrations.AddField(
model_name='experiment',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user_manager.WorkbenchUser'),
),
migrations.AddField(
model_name='experiment',
name='pylint',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='pylint_manager.PylintScan'),
),
migrations.AddField(
model_name='experiment',
name='requirements',
field=models.ManyToManyField(to='requirements_manager.Requirement'),
),
migrations.AddField(
model_name='experiment',
name='template',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cookiecutter_manager.CookieCutterTemplate'),
),
migrations.AddField(
model_name='experiment',
| name='travis',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='build_manager | .TravisInstance'),
),
migrations.AddField(
model_name='chosenexperimentsteps',
name='experiment',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='experiments_manager.Experiment'),
),
migrations.AddField(
model_name='chosenexperimentsteps',
name='step',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='experiments_manager.ExperimentStep'),
),
]
|
zuun77/givemegoogletshirts | codejam/2020/qual/q4.py | Python | apache-2.0 | 411 | 0.007299 | import sys
def solve(B):
ans = [0]*B
guess = 1
for _ in range(10):
print(guess)
sys.stdout.flush()
n = int(input().strip())
ans[guess-1] = n
guess += 1
print("". | join(map(str, ans)))
sys.stdout.flush()
result = input()
if result == "N":
sys.exit()
return
T, | B = map(int, input().split())
for case in range(1, T+1):
solve(B)
|
plusky/spec-cleaner | spec_cleaner/rpminstall.py | Python | bsd-3-clause | 1,588 | 0.001259 | # vim: set ts=4 sw=4 et: coding=UTF-8
from .rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
line = self._complete_cleanup(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
line = self.reg.re_jobs. | sub(' %{?_smp_mflags}', line)
if not self.minimal:
line = self._replace_remove_la(line)
line = self._replace_install_command(line)
Section.add(self, line)
def _replace_install_command(self, line):
"""
Replace various install commands with one unified mutation
"""
make_install = '%make_install'
# do not use install macros as we have t | rouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = make_install
# we can deal with additional params for %makeinstall so replace that
line = line.replace('%makeinstall', make_install)
return line
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
|
anurag03/integration_tests | cfme/tests/integration/test_aws_iam_auth_and_roles.py | Python | gpl-2.0 | 4,448 | 0.003372 | import pytest
from deepdiff import DeepDiff
from cfme.roles import role_access_ui_59z, role_access_ui_510z
from cfme.utils.appliance import ViaUI, find_appliance
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.blockers import BZ
from cfme.utils.conf import credentials
from cfme.utils.log import logger
from cfme.utils.version import VersionPicker, Version
def pytest_generate_tests(metafunc):
"""
Build a list of tuples containing (group_name, context)
Returns:
tuple containing (group_name, context)
where group_name is a string and context is ViaUI/SSUI
"""
appliance = find_appliance(metafunc)
parameter_list = []
id_list = []
# TODO: Include SSUI role_access dict and VIASSUI context
role_access_ui = VersionPicker({
Version.lowest(): role_access_ui_59z,
'5.10': role_access_ui_510z
}).pick(appliance.version)
logger.info('Using the role access dict: %s', role_access_ui)
roles_and_context = [(
role_access_ui, ViaUI)
]
for role_access, context in roles_and_context:
for group in role_access.keys():
parameter_list.append((group, role_access, context))
id_list.append('{}-{}'.format(group, context))
metafunc.parametrize('group_name, role_access, context', parameter_list)
@pytest.mark.tier(2)
@pytest.mark.uncollectif(lambda appliance: appliance.is_dev, reason="Is a rails server")
@pytest.mark.meta(blockers=[
BZ(1530683,
unblock=lambda group_name: group_name not in
['evmgroup-user', 'evmgroup-approver', 'evmgroup-auditor', 'evmgroup-operator',
'evmgroup-support', 'evmgroup-security']),
BZ(1590398, forced_streams=['5.9'])
])
def test_group_roles(appliance, setup_aws_auth_provider, group_name, role_access, context,
soft_assert):
"""Basic default AWS_IAM group role auth + RBAC test
Validates expected menu and submenu names are present for default
AWS IAM groups
NOTE: Only tests vertical navigation tree at the moment, not accordions within the page
"""
group_access = role_access[group_name]
try:
iam_group_name = group_name + '_aws_iam'
username = credentials[iam_group_name]['username']
password = credentials[iam_group_name]['password']
fullname = credentials[iam_group_name]['fullname']
except KeyError:
pytest.fail('No match in credentials file for group "{}"'.format(iam_group_name))
with appliance.context.use(context):
# fullname overrides user.name attribute, but doesn't impact login with username credential
user = appliance.collections.users.simple_user(username, password, fullname=fullname)
with user:
view = navigate_to(appliance.server, 'LoggedIn')
assert appliance.server.current_full_name() == user.name
assert group_name.lower() in [name.lower() for name in appliance.server.group_names()]
nav_visible = view.navigation.nav_item_tree()
# RFE BZ 1526495 shows up as an extra requests link in nav
# TODO BZ remove assert skip when BZ is fixed in 59z
bz = BZ(1526495,
forced_streams=['5.8', '5.9'],
unblock=lambda group_name: group_name not in
| ['evmgroup-user', 'evmgroup-approver', 'evmgroup-desktop', 'evmgroup-vm_user',
'evmgroup-administrator', 'evmgroup-s | uper_administrator'])
for area in group_access.keys():
# using .get() on nav_visibility because it may not have `area` key
diff = DeepDiff(group_access[area], nav_visible.get(area, {}),
verbose_level=0, # If any higher, will flag string vs unicode
ignore_order=True)
nav_extra = diff.get('iterable_item_added')
if nav_extra and 'Requests' in nav_extra.values() and bz.blocks:
logger.warning('Skipping RBAC verification for group "%s" in "%s" due to %r',
group_name, area, bz)
continue
else:
soft_assert(diff == {}, '{g} RBAC mismatch (expected first) for {a}: {d}'
.format(g=group_name, a=area, d=diff))
appliance.server.login_admin()
assert user.exists
|
sramana/rtc-schedule | uttar_pradesh.py | Python | unlicense | 2,884 | 0.001734 | """Timetable of buses operated by Uttar Pradesh S | tate Road Transport Corporation (UPSRTC)
"""
from datetime import datetime
import re
import scraperwiki
import | lxml.html
base_url = 'http://www.upsrtc.com/online/query/'
services = dict()
def find_by_text(element, selector, text):
for a in element.cssselect(selector):
if text in a.text_content():
return a
def clean(items):
cleaned = []
for item in items:
# Remove non-ascii characters
item = unicode(item).encode("ascii", "ignore")
# Remove escaped html characters like
item = re.sub(r'&\S+;', '', item)
# Remove extra whitespace
item = ' '.join(item.split())
# Change NULL to empty string
item = item or ''
# Convert to int if numeric
try:
item = int(item)
except Exception:
pass
cleaned.append(item)
return cleaned
def get_services():
url = base_url + 'ser_sch_query.asp'
html = scraperwiki.scrape(url)
root = lxml.html.fromstring(html)
options = root.cssselect('form[name=frm2] select option')
for option in options:
id = option.get('value')
service_number = option.text_content()
services[id] = service_number
def get_schedules():
for service in sorted(services):
print "Processing", service
url = base_url + 'QueryByServiceNo.asp'
params = dict(selSerNo=service)
try:
html = scraperwiki.scrape(url, params)
except Exception as e:
print "Exception while processing", service
print e
continue
root = lxml.html.fromstring(html)
table = find_by_text(root, '.tt table', 'Service Name')
meta_row = find_by_text(table, 'tr', 'Service Name')
meta_data = [b.text_content() for b in meta_row.cssselect('b')]
rows = table.cssselect('tr')
del rows[0:5] # First five rows have just meta-data
del rows[-1] # Last row is empty
data = []
for row in rows:
cells = [td.text_content() for td in row.cssselect('td')]
cells = clean(cells)
meta_data = clean(meta_data)
rec = dict()
rec['service_number'] = meta_data[0]
rec['service_name'] = meta_data[1]
rec['service_type'] = meta_data[2]
rec['stop_name'] = cells[0]
rec['arrival_time'] = cells[1][:5].replace('origi','')
rec['departure_time'] = cells[2][:5]
rec['distance_in_km_from_source'] = cells[3]
rec['fare_in_rupees_from_source'] = cells[4]
rec['crawled_on'] = datetime.now()
data.append(rec)
scraperwiki.sqlite.save(data=data, unique_keys=['service_number', 'stop_name', 'departure_time'])
get_services()
get_schedules()
|
beyondmetis/scikit-video | skvideo/motion/block.py | Python | bsd-3-clause | 38,685 | 0.002714 | import numpy as np
import os
import time
from ..utils import *
def _costMAD(block1, block2):
block1 = block1.astype(np.float)
block2 = block2.astype(np.float)
return np.mean(np.abs(block1 - block2))
def _minCost(costs):
h, w = costs.shape
if costs[h/2, w/2] == 0:
return np.int((h-1)/2), np.int((w-1)/2), 0
idx = np.unravel_index(np.argmin(costs), costs.shape)
return np.int(idx[0]), np.int(idx[1]), costs[idx]
def _checkBounded(xval, yval, w, h, mbSize):
if ((yval < 0) or
(yval + mbSize >= h) or
(xval < 0) or
(xval + mbSize >= w)):
return False
else:
return True
def _DS(imgP, imgI, mbSize, p):
# Computes motion vectors using Diamond Search method
#
# Input
# imgP : The image for which we want to find motion vectors
# imgI : The reference image
# mbSize : Size of the macroblock
# p : Search parameter (read literature to find what this means)
#
# Ouput
# motionVect : the motion vectors for each integral macroblock in imgP
# DScomputations: The average number of points searched for a macroblock
h, w = imgP.shape
vectors = np.zeros((h / mbSize, w / mbSize, 2))
costs = np.ones((9))*65537
L = np.floor(np.log2(p + 1))
LDSP = []
LDSP.append([0, -2])
LDSP.append([-1, -1])
LDSP.append([1, -1])
LDSP.append([-2, 0])
LDSP.append([0, 0])
LDSP.append([2, 0])
LDSP.append([-1, 1])
LDSP.append([1, 1])
LDSP.append([0, 2])
SDSP = []
SDSP.append([0, -1])
SDSP.append([-1, 0])
SDSP.append([0, 0])
SDSP.append([1, 0])
SDSP.append([0, 1])
computations = 0
for i in range(0, h - mbSize + 1, mbSize):
for j in range(0, w - mbSize + 1, mbSize):
x = j
y = i
costs[4] = _costMAD(imgP[i:i + mbSize, j:j + mbSize], imgI[i:i + mbSize, j:j + mbSize])
cost = 0
point = 4
if costs[4] != 0:
computations += 1
for k in range(9):
refBlkVer = y + LDSP[k][1]
refBlkHor = x + LDSP[k][0]
if not _checkBounded(refBlkHor, refBlkVer, w, h, mbSize):
continue
if k == 4:
continue
costs[k] = _costMAD(imgP[i:i + mbSize, j:j + mbSize], imgI[refBlkVer:refBlkVer + mbSize, refBlkHor:refBlkHor + mbSize])
computations += 1
point = np.argmin(costs)
cost = costs[point]
SDSPFlag = 1
if point != 4:
SDSPFlag = 0
cornerFlag = 1
if (np.abs(LDSP[point][0]) == np.abs(LDSP[point][1])):
cornerFlag = 0
xLast = x
yLast = y
x = x + LDSP[point][0]
y = y + LDSP[point][1]
costs[:] = 65537
costs[4] = cost
while SDSPFlag == 0:
if cornerFlag == 1:
for k in range(9):
refBlkVer = y + LDSP[k][1]
refBlkHor = x + LDSP[k][0]
if not _checkBounded(refBlkHor, refBlkVer, w, h, mbSize):
continue
if k == 4:
continue
if ((refBlkHor >= xLast - 1) and
(refBlkHor <= xLast + 1) and
(refBlkVer >= yLast - 1) and
(refBlkVer <= yLast + 1)):
continue
elif ((refBlkHor < j-p) or
(refBlkHor > j+p) or
(refBlkVer < i-p) or
(refBlkVer > i+p)):
continue
else:
costs[k] = _costMAD(imgP[i:i + mbSize, j:j + mbSize], imgI[refBlkVer:refBlkVer + mbSize, refBlkHor:refBlkHor + mbSize])
computations += 1
else:
lst = []
if point == 1:
lst = np.array([0, 1, 3])
elif point == 2:
lst = np.array([0, 2, 5])
elif point == 6:
lst = np.array([3, 6, 8])
elif point == 7:
lst = np.array([5, 7, 8])
for idx in lst:
refBlkVer = y + LDSP[idx][1]
refBlkHor = x + LDSP[idx][0]
if not _checkBounded(refBlkHor, refBlkVer, w, h, mbSize):
continue
elif ((refBlkHor < j - p) or
(refBlkHor > j + p) or
(refBlkVer < i - p) or
(refBlkVer > i + p)):
| continue
else:
costs[idx] = _costMAD(imgP[i:i + mbSize, j:j + mbSize], imgI[refBlkVer:refBlkVer + mbSize, refBlkHor:refBlkHor + mbSize])
computations += 1
point = np.argmin(costs)
cost = costs[point]
SDSPFlag = 1
if point != 4:
SDSPFlag = 0
cornerFlag = 1
| if (np.abs(LDSP[point][0]) == np.abs(LDSP[point][1])):
cornerFlag = 0
xLast = x
yLast = y
x += LDSP[point][0]
y += LDSP[point][1]
costs[:] = 65537
costs[4] = cost
costs[:] = 65537
costs[2] = cost
for k in range(5):
refBlkVer = y + SDSP[k][1]
refBlkHor = x + SDSP[k][0]
if not _checkBounded(refBlkHor, refBlkVer, w, h, mbSize):
continue
elif ((refBlkHor < j - p) or
(refBlkHor > j + p) or
(refBlkVer < i - p) or
(refBlkVer > i + p)):
continue
if k == 2:
continue
costs[k] = _costMAD(imgP[i:i + mbSize, j:j + mbSize], imgI[refBlkVer:refBlkVer + mbSize, refBlkHor:refBlkHor + mbSize])
computations += 1
point = 2
cost = 0
if costs[2] != 0:
point = np.argmin(costs)
cost = costs[point]
x += SDSP[point][0]
y += SDSP[point][1]
vectors[i / mbSize, j / mbSize, :] = [x - j, y - i]
costs[:] = 65537
return vectors, computations / ((h * w) / mbSize**2)
def _ARPS(imgP, imgI, mbSize, p):
# Computes motion vectors using Adaptive Rood Pattern Search method
#
# Input
# imgP : The image for which we want to find motion vectors
# imgI : The reference image
# mbSize : Size of the macroblock
# p : Search parameter (read literature to find what this means)
#
# Ouput
# motionVect : the motion vectors for each integral macroblock in imgP
# ARPScomputations: The average number of points searched for a macroblock
h, w = imgP.shape
vectors = np.zeros((h / mbSize, w / mbSize, 2))
costs = np.ones((6))*65537
SDSP = []
SDSP.append([0, -1])
SDSP.append([-1, 0])
SDSP.append([0, 0])
SDSP.append([1, 0])
SDSP.append([0, 1])
LDSP = {}
checkMatrix = np.zeros((2 * p + 1, 2 * p + 1))
computations = 0
for i in range(0, h - mbSize + 1, mbSize):
for j in range(0, w - mbSize + 1, mbSize):
x = j
y = i
costs[2] = _costMAD(imgP[i:i + mbSize, j:j + mbSize], imgI[i:i + mbSize, j:j + mbSize])
checkMatrix[p, p] = 1
computations += 1
if (j == 0):
stepSize = 2
maxIndex = 5
else:
u = vectors[i / mbSize, j / mb |
google/clusterfuzz | src/clusterfuzz/_internal/tests/core/bot/tasks/impact_task_test.py | Python | apache-2.0 | 35,361 | 0.002375 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""impact_task tests."""
import unittest
import mock
from clusterfuzz._internal.bot.tasks import impact_task
from clusterfuzz._internal.build_management import build_manager
from clusterfuzz._internal.datastore import data_types
from clusterfuzz._internal.tests.core.bot.tasks.component_revision_patching_test import \
ComponentRevisionPatchingTest
from clusterfuzz._internal.tests.test_libs import helpers
from clusterfuzz._internal.tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class ExecuteTaskTest(unittest.TestCase):
"""Test execute_task."""
def setUp(self):
helpers.patch(self, [
'clusterfuzz._internal.base.utils.is_chromium',
'clusterfuzz._internal.bot.tasks.impact_task.get_impacts_from_url',
'clusterfuzz._internal.bot.tasks.impact_task.get_impacts_on_prod_builds',
'clusterfuzz._internal.bot.tasks.setup.setup_testcase',
'clusterfuzz._internal.build_management.build_manager.is_custom_binary',
'clusterfuzz._internal.build_management.build_manager.has_production_builds',
'clusterfuzz._internal.bot.testcase_manager.get_command_line_for_application',
'clusterfuzz._internal.base.tasks.add_task',
])
impacts = impact_task.Impacts(
stable=impact_task.Impact('stable', False, 'trace-stable'),
beta=impact_task.Impact('beta', True, 'trace-beta'),
extended_stable=impact_task.Impact('extended stable', False,
'trace-extended-stable'),
head=impact_task.Impact('head', False, 'trace-head'))
self.mock.is_chromium.return_value = True
self.mock.is_custom_binary.return_value = False
self.mock.has_production_builds.return_value = True
self.mock.get_impacts_from_url.return_value = impacts
self.mock.setup_testcase.return_value = (['a'], None, 'path')
self.mock.get_impacts_on_prod_builds.return_value = impacts
self.testcase = data_types.Testcase()
self.testcase.is_impact_set_flag = False
self.testcase.status = 'Processed'
self.testcase.crash_stacktrace = 'trace'
self.testcase.regression = '123:456'
| self.testcase.job_type = 'job2'
self.testcase.project_name = 'chromium'
self.testcase.put()
def reload(self):
"""Reload testcase."""
self.t | estcase = self.testcase.key.get()
def expect_unchanged(self):
"""Expect testcase's impacts to be unchanged."""
self.reload()
self.assertIsNone(self.testcase.impact_stable_version)
self.assertIsNone(self.testcase.impact_beta_version)
self.assertIsNone(self.testcase.impact_head_version)
def expect_changed(self):
"""Expect testcase's impacts to be changed."""
self.reload()
self.assertTrue(self.testcase.is_impact_set_flag)
self.assertEqual('extended stable',
self.testcase.impact_extended_stable_version)
self.assertFalse(self.testcase.impact_stable_version_likely)
self.assertEqual('stable', self.testcase.impact_stable_version)
self.assertFalse(self.testcase.impact_stable_version_likely)
self.assertEqual('beta', self.testcase.impact_beta_version)
self.assertTrue(self.testcase.impact_beta_version_likely)
self.assertEqual('head', self.testcase.impact_head_version)
self.assertFalse(self.testcase.impact_head_version_likely)
def test_bail_out_non_chromium(self):
"""Test bailing out for non chromium projects."""
self.mock.is_chromium.return_value = False
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_bail_out_fixed(self):
"""Test bailing out when the testcase is fixed."""
self.testcase.fixed = 'Yes'
self.testcase.is_impact_set_flag = True
self.testcase.put()
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_bail_out_status_unreproducible(self):
"""Test bailing out when the testcase status is unreproducible (never
reproduced)."""
self.testcase.status = 'Unreproducible'
self.testcase.is_impact_set_flag = True
self.testcase.put()
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_bail_out_custom_binary(self):
"""Test bailing out for custom binary."""
self.mock.is_custom_binary.return_value = True
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_bail_out_unreproducible(self):
"""Test bailing out when the testcase is unreproducible (reproduced once,
but flaky)."""
self.testcase.one_time_crasher_flag = True
self.testcase.put()
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_bail_out_non_prod_build_and_no_regression_range(self):
"""Test bailing out when reproducible testcase does not have a regression
range yet and we dont have production builds to test."""
self.testcase.one_time_crasher_flag = False
self.testcase.regression = ''
self.testcase.put()
self.mock.has_production_builds.return_value = False
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_non_prod_build(self):
"""Test getting impact for non-prod build."""
self.mock.has_production_builds.return_value = False
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_changed()
self.mock.get_impacts_from_url.assert_has_calls(
[mock.call(self.testcase.regression, self.testcase.job_type)])
def test_bail_out_setup_testcase(self):
"""Test bailing out when setting up testcase fails."""
self.mock.has_production_builds.return_value = True
self.mock.setup_testcase.return_value = ([], None, 'path')
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
def test_build_failed_exception(self):
"""Test when BuildFailedException occurs."""
self.mock.get_impacts_on_prod_builds.side_effect = (
impact_task.BuildFailedException('error-from-build'))
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_unchanged()
self.assertIn('error-from-build', self.testcase.comments)
self.assertIn(data_types.TaskState.ERROR, self.testcase.comments)
self.mock.add_task.assert_has_calls(
[mock.call('impact', self.testcase.key.id(), 'job', wait_time=None)])
def test_prod_build(self):
"""Test getting impact for prod build."""
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_changed()
self.assertIn(data_types.TaskState.FINISHED, self.testcase.comments)
self.assertNotIn('trace-stable', self.testcase.crash_stacktrace)
self.assertNotIn('trace-beta', self.testcase.crash_stacktrace)
self.mock.get_impacts_on_prod_builds.assert_has_calls(
[mock.call(mock.ANY, 'path')])
def test_prod_build_unreproducible(self):
"""Test getting impact for prod build (unreproducible)."""
self.testcase.status = 'Unreproducible'
self.testcase.put()
impact_task.execute_task(self.testcase.key.id(), 'job')
self.expect_changed()
self.assertIn(data_types.TaskState.FINISHED, self.testcase.comments)
self.assertIn('trace-stable', self.testcase.crash_stacktrace)
self.assertIn('trace-beta', self.testcase.crash_stacktrace)
self.mock.get_impacts_on_prod_builds.assert_has_calls(
[mock.call(mock.ANY, 'path')])
class GetImpactsFromUrlTest(ComponentRevisionPatchingTest):
"""Test get_impacts_from_url."""
def setUp(self):
"""Setup for get impacts from url test."""
super().setUp()
helpers.pat |
tylercrompton/streams | tests/__init__.py | Python | gpl-3.0 | 654 | 0 | # This file is part of Streams.
#
# Streams is free software: you can redistribute it and/or modify it
# u | nder the terms of the GNU General Public License as published by the
# Free Softwa | re Foundation, either version 3 of the License, or (at
# your option) any later version.
#
# Streams is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Streams. If not, see <https://www.gnu.org/licenses/>.
|
stefanklug/mapnik | scons/scons-local-2.3.6/SCons/Tool/MSCommon/sdk.py | Python | lgpl-2.1 | 14,900 | 0.006711 | #
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
__revision__ = "src/engine/SCons/Tool/MSCommon/sdk.py rel_2.3.5:3347:d31d5a4e74b6 2015/07/31 14:36:10 bdbaddog"
__doc__ = """Module to detect the Platform/Windows SDK
PSDK 2003 R1 is the earliest version detected.
"""
import os
import | SCons.Errors
import SCons.Util
import common
debug = common.debug
# SDK Checks. This is of course a mess as everything else on MS platforms. Here
# is what we do to detect the SDK:
#
# For Windows SDK >= 6.0: just look into the registry entries:
# HKLM\Software\Microsoft\Microsoft SDKs\Windows
# All the keys in there are the available versions.
#
# For Platform SDK before 6.0 (2003 server R1 and R2, etc...), there does not
# seem to be any sane registry key, so the precise location is hardcoded.
#
# For versions below 2003R1, it seems the PSDK is included with Visual Studio?
#
# Also, per the following:
# http://benjamin.smedbergs.us/blog/tag/atl/
# VC++ Professional comes with the SDK, VC++ Express does not.
# Location of the SDK (checked for 6.1 only)
_CURINSTALLED_SDK_HKEY_ROOT = \
r"Software\Microsoft\Microsoft SDKs\Windows\CurrentInstallFolder"
class SDKDefinition(object):
"""
An abstract base class for trying to find installed SDK directories.
"""
def __init__(self, version, **kw):
self.version = version
self.__dict__.update(kw)
def find_sdk_dir(self):
"""Try to find the MS SDK from the registry.
Return None if failed or the directory does not exist.
"""
if not SCons.Util.can_read_reg:
debug('find_sdk_dir(): can not read registry')
return None
hkey = self.HKEY_FMT % self.hkey_data
debug('find_sdk_dir(): checking registry:%s'%hkey)
try:
sdk_dir = common.read_reg(hkey)
except WindowsError, e:
debug('find_sdk_dir(): no SDK registry key %s' % repr(hkey))
return None
debug('find_sdk_dir(): Trying SDK Dir: %s'%sdk_dir)
if not os.path.exists(sdk_dir):
debug('find_sdk_dir(): %s not on file system' % sdk_dir)
return None
ftc = os.path.join(sdk_dir, self.sanity_check_file)
if not os.path.exists(ftc):
debug("find_sdk_dir(): sanity check %s not found" % ftc)
return None
return sdk_dir
def get_sdk_dir(self):
"""Return the MSSSDK given the version string."""
try:
return self._sdk_dir
except AttributeError:
sdk_dir = self.find_sdk_dir()
self._sdk_dir = sdk_dir
return sdk_dir
def get_sdk_vc_script(self,host_arch, target_arch):
""" Return the script to initialize the VC compiler installed by SDK
"""
if (host_arch == 'amd64' and target_arch == 'x86'):
# No cross tools needed compiling 32 bits on 64 bit machine
host_arch=target_arch
arch_string=target_arch
if (host_arch != target_arch):
arch_string='%s_%s'%(host_arch,target_arch)
debug("sdk.py: get_sdk_vc_script():arch_string:%s host_arch:%s target_arch:%s"%(arch_string,
host_arch,
target_arch))
file=self.vc_setup_scripts.get(arch_string,None)
debug("sdk.py: get_sdk_vc_script():file:%s"%file)
return file
class WindowsSDK(SDKDefinition):
"""
A subclass for trying to find installed Windows SDK directories.
"""
HKEY_FMT = r'Software\Microsoft\Microsoft SDKs\Windows\v%s\InstallationFolder'
def __init__(self, *args, **kw):
SDKDefinition.__init__(self, *args, **kw)
self.hkey_data = self.version
class PlatformSDK(SDKDefinition):
"""
A subclass for trying to find installed Platform SDK directories.
"""
HKEY_FMT = r'Software\Microsoft\MicrosoftSDK\InstalledSDKS\%s\Install Dir'
def __init__(self, *args, **kw):
SDKDefinition.__init__(self, *args, **kw)
self.hkey_data = self.uuid
#
# The list of VC initialization scripts installed by the SDK
# These should be tried if the vcvarsall.bat TARGET_ARCH fails
preSDK61VCSetupScripts = { 'x86' : r'bin\vcvars32.bat',
'amd64' : r'bin\vcvarsamd64.bat',
'x86_amd64': r'bin\vcvarsx86_amd64.bat',
'x86_ia64' : r'bin\vcvarsx86_ia64.bat',
'ia64' : r'bin\vcvarsia64.bat'}
SDK61VCSetupScripts = {'x86' : r'bin\vcvars32.bat',
'amd64' : r'bin\amd64\vcvarsamd64.bat',
'x86_amd64': r'bin\x86_amd64\vcvarsx86_amd64.bat',
'x86_ia64' : r'bin\x86_ia64\vcvarsx86_ia64.bat',
'ia64' : r'bin\ia64\vcvarsia64.bat'}
SDK70VCSetupScripts = { 'x86' : r'bin\vcvars32.bat',
'amd64' : r'bin\vcvars64.bat',
'x86_amd64': r'bin\vcvarsx86_amd64.bat',
'x86_ia64' : r'bin\vcvarsx86_ia64.bat',
'ia64' : r'bin\vcvarsia64.bat'}
# The list of support SDKs which we know how to detect.
#
# The first SDK found in the list is the one used by default if there
# are multiple SDKs installed. Barring good reasons to the contrary,
# this means we should list SDKs with from most recent to oldest.
#
# If you update this list, update the documentation in Tool/mssdk.xml.
SupportedSDKList = [
WindowsSDK('7.1',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK70VCSetupScripts,
),
WindowsSDK('7.0A',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK70VCSetupScripts,
),
WindowsSDK('7.0',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK70VCSetupScripts,
),
WindowsSDK('6.1',
sanity_check_file=r'bin\SetEnv.Cmd',
include_subdir='include',
lib_subdir={
'x86' : ['lib'],
'x86_64' : [r'lib\x64'],
'ia64' : [r'lib\ia64'],
},
vc_setup_scripts = SDK61VCSetupScripts,
),
WindowsSDK('6.0A',
|
imbasimba/astroquery | astroquery/simbad/tests/test_simbad.py | Python | bsd-3-clause | 17,976 | 0.000056 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import re
import six
import pytest
import astropy.units as u
from astropy.table import Table
import numpy as np
from ... import simbad
from ...utils.testing_tools import MockResponse
from ...utils import commons
from ...exceptions import TableParseError
from .test_simbad_remote import multicoords
GALACTIC_COORDS = commons.GalacticCoordGenerator(l=-67.02084, b=-29.75447,
unit=(u.deg, u.deg))
ICRS_COORDS = commons.ICRSCoordGenerator("05h35m17.3s -05h23m28s")
FK4_COORDS = commons.FK4CoordGenerator(ra=84.90759, dec=-80.89403,
unit=(u.deg, u.deg))
FK5_COORDS = commons.FK5CoordGenerator(ra=83.82207, dec=-80.86667,
unit=(u.deg, u.deg))
DATA_FILES = {
'id': 'query_id.data',
'coo': 'query_coo.data',
'cat': 'query_cat.data',
'bibobj': 'query_bibobj.data',
'bibcode': 'query_bibcode.data',
'objectids': 'query_objectids.data',
'error': 'query_error.data',
'sample': 'query_sample.data',
'region': 'query_sample_region.data',
}
class MockResponseSimbad(MockResponse):
query_regex = re.compile(r'query\s+([a-z]+)\s+')
def __init__(self, script, cache=True, **kwargs):
# preserve, e.g., headers
super(MockResponseSimbad, self).__init__(**kwargs)
self.content = self.get_content(script)
def get_content(self, script):
match = self.query_regex.search(script)
if match:
filename = DATA_FILES[match.group(1)]
content = open(data_path(filename), "rb").read()
return content
def data_path(filename):
data_dir = os.path.join(os.path.dirname(__file__), 'data')
return os.path.join(data_dir, filename)
@pytest.fixture
def patch_post(request):
try:
mp = request.getfixturevalue("monkeypatch")
except AttributeError: # pytest < 3
mp = request.getfuncargvalue("monkeypatch")
mp.setattr(simbad.SimbadClass, '_request', post_mockreturn)
return mp
def post_mockreturn(self, method, url, data, timeout, **kwargs):
response = MockResponseSimbad(data['script'], **kwargs)
class last_query:
pass
self._last_query = last_query()
self._last_query.data = data
return response
@pytest.mark.parametrize(('radius', 'expected_radius'),
[('5d0m0s', '5.0d'),
('5d', '5.0d'),
('5.0d', '5.0d'),
(5 * u.deg, '5.0d'),
(5.0 * u.deg, '5.0d'),
(1.2 * u.deg, '1.2d'),
(0.5 * u.deg, '30.0m'),
('0d1m12s', '1.2m'),
(0.003 * u.deg, '10.8s'),
('0d0m15s', '15.0s')
])
def test_parse_radius(radius, expected_radius):
actual = simbad.core._parse_radius(radius)
assert actual == expected_radius
@pytest.mark.parametrize(('ra', 'dec', 'expected_ra', 'expected_dec'),
[(ICRS_COORDS.ra, ICRS_COORDS.dec, u'5:35:17.3',
u'-80:52:00')
])
def test_to_simbad_format(ra, dec, expected_ra, expected_dec):
actual_ra, actual_dec = simbad.core._to_simbad_format(ra, dec)
assert (actual_ra, actual_dec) == (expected_ra, expected_dec)
@pytest.mark.parametrize(('coordinates', 'expected_frame'),
[(GALACTIC_COORDS, 'GAL'),
(ICRS_COORDS, 'ICRS'),
(FK4_COORDS, 'FK4'),
(FK5_COORDS, 'FK5')
])
def test_get_frame_coordinates(coordinates, expected_frame):
actual_frame = simbad.core._get_frame_coords(coordinates)[2]
assert actual_frame == expected_frame
if actual_frame == 'GAL':
l_gal, b_gal = simbad.core._get_frame_coords(coordinates)[:2]
np.testing.assert_almost_equal(float(l_gal) % 360, -67.02084 % 360)
np.testing.assert_almost_equal(float(b_gal), -29.75447)
def test_parse_result():
result1 = simbad.core.Simbad._parse_result(
MockResponseSimbad('query id '), simbad.core.SimbadVOTableResult)
assert isinstance(result1, Table)
with pytest.raises(TableParseError) as ex:
simbad.core.Simbad._parse_result(MockResponseSimbad('query error '),
simbad.core.SimbadVOTableResult)
assert str(ex.value) == ('Failed to parse SIMBAD result! The raw response '
'can be found in self.last_response, and the '
'error in self.last_table_parse_error. '
'The attempted parsed result is in '
'self.last_parsed_result.\n Exception: 7:115: '
'no element found')
assert isinstance(simbad.Simbad.last_response.text, six.string_types)
assert isinstance(simbad.Simbad.last_response.content, six.binary_type)
votable_fields = ",".join(simbad.core.Simbad.get_votable_fields())
@pytest.mark.parametrize(('args', 'kwargs', 'expected_script'),
[(["m [0-9]"], dict(wildcard=True,
caller='query_object_async'),
("\nvotable {" + votable_fields + "}\n"
"votable open\n"
"query id wildcard m [0-9] \n"
"votable close"
)),
(["2006ApJ"], dict(caller='query_bibcode_async',
get_raw=True),
("\n\nquery bibcode 2006ApJ \n"))
])
def test_args_to_payload(args, kwargs, expected_script):
script = simbad.Simbad._args_to_payload(*args, **kwargs)['script']
assert script == expected_script
@pytest.mark.parametrize(('epoch', 'equinox'),
[(2000, 'thousand'),
('J-2000', None),
(None, '10e3b')
])
def test_validation(epoch, equinox):
with pytest.raises(ValueError):
# only one of these has to raise an exception
if equinox is not None:
simbad.core.validate_equinox(equinox)
if epoch is not None:
simbad.core.validate_epoch(epoch)
@pytest.mark.parametrize(('bibcode', 'wildcard'),
[('2006ApJ*', True),
('2005A&A.430.165F', None)
])
def test_query_bibcode_async(patch_post, bibcode, wildcard):
response1 = simbad.core.Simbad.query_bibcode_async(bibcode,
wildcard=wildcard)
response2 = simbad.core.Simbad().query_bibcode_async(bibcode,
wildcard=wildcard)
assert response1 is not None and response2 is not None
assert response1.content == response2.content
def test_query_bibcode_class(patch_post):
result1 = simbad.core.Simbad.query_bibcode("2006ApJ*", wildcard=True)
assert isinstance(result1, Table)
def test_query_bibcode_instance(patch_post):
S = simbad.core.Simbad()
result2 = S.query_bibcode("2006ApJ*", wildcard=True)
assert isinstance(result2, Table)
def test_query_objectids_async(patch_post):
response1 = simbad.core.Simbad.query_obj | ectids_async('Polaris')
response2 = simbad.core.Simbad().query_objectids_async('Polaris')
| assert response1 is not None and response2 is not None
assert response1.content == response2.content
def test_query_objectids(patch_post):
result1 = simbad.core.Simbad.query_objectids('Polaris')
result2 = simbad.core.Simbad().query_objectids('Polaris')
assert isinstance(result1, Table)
assert isinstance(result2, Table)
def test_query_bibobj_async(patch_post):
response1 = simbad.core.Simbad.query_bibobj_async('2005A&A.430.165F')
response2 = simbad.core.Simbad().query_bibobj_async('20 |
onoga/toolib | toolib/wx/controls/gant/LinkShape.py | Python | gpl-2.0 | 1,368 | 0.035819 | import wx
import wx.lib.ogl as ogl
cl | ass LinkShape(ogl.LineShape):
def __init__(self, canvas, link):
self.canvas = canvas
self.__link = link
ogl.LineShape.__init__(self)
#self.SetCanvas(canvas)
#line.SetPen(wx.BLACK_PEN)
#line.SetBrush(wx.BLACK_BRUSH)
self.AddArrow(ogl.ARROW_ARROW)
self.MakeLineControlPoints(3)
self._updateX()
self._updateY()
#shapeActivityFrom.AddLine(self, shapeActivityTo)
def _updateX(self):
#rint '*** _updateX', self, self.canvas.getDx()
a1 = self.canvas | ._getShape(self.__link.getActivityFrom())
a2 = self.canvas._getShape(self.__link.getActivityTo())
p1, p2, p3 = self.GetLineControlPoints()
p1[0] = a1.GetX() + a1.GetWidth() / 2.
p2[0] = p3[0] = a2.GetX() - a2.GetWidth() / 2. + self.canvas.getDx() / 3.
def _updateY(self):
a1 = self.canvas._getShape(self.__link.getActivityFrom())
a2 = self.canvas._getShape(self.__link.getActivityTo())
p1, p2, p3 = self.GetLineControlPoints()
p1[1] = p2[1] = a1.GetY()
p3[1] = a2.GetY() + a2.GetHeight() / 2. * (1 if a1.GetY() > a2.GetY() else -1)
def getMaxPoint(self):
return map(max, zip(*map(tuple, self.GetLineControlPoints())))
def __str__(self):
for o, s in self.canvas._shapes.iteritems():
if s is self:
return "<LinkShape %s>" % o
return "<LinkShape>"
if __name__ == '__main__':
from test import test
test()
|
viswimmer1/PythonGenerator | data/python_files/30004740/widgets.py | Python | gpl-2.0 | 9,892 | 0.007986 | import os.path
import itertools
import pkg_resources
from turbogears.widgets import Widget, TextField
from turbogears.widgets import CSSSource, JSSource, register_static_directory
from turbogears import config
from turbojson import jsonify
from util import CSSLink, JSLink
__all__ = ['YUIBaseCSS', 'YUIResetCSS', 'YUIFontsCSS', 'YUIGrids', 'YUIResetFontsGrids',
'YUIAnimation', 'YUIMenuBar', 'YUIAutoComplete', 'YUITreeView',
'yuibasecss', 'yuiresetcss', 'yuifontscss', 'yuigridscss', 'yui_reset_fonts_grids',
'YUIMenuLeftNav',
]
pkg_path = pkg_resources.resource_filename(__name__, os.path.join("static", "yui"))
register_static_directory("TGYUI", pkg_path)
skin = config.get('app.yui.skin', None)
skin_method = config.get('app.yui.skin_method', 'minimized')
idcounter = itertools.count()
def unique_id(prefix='tgyui'):
"""This function lets us have a new unique id each time a widget is rendered,
to be used by generated css & javascript snippets (e.g. initializing functions,
or instance-specific appearance).
If you have no widgets that are fetched by XMLHttpRequest and inserted into
the document at runtime (e.g. with innerHTML or MochiKit.DOM.swapDOM()), you
can stop reading here.
If you have such widgets, please note:
- if a page retrieves a new widget after the server has been restarted,
the idcounter variable will be reset and an old id could potentially
be recycled.
In order to avoid this, for widgets that are sent by XMLHttpRequest,
you should specify an id.
- CSSLink and JSLink directives will not be processed: you must make sure
the exising page already contains those (i.e. by returing another widget
instance from the controller, even if the page does not display it at first).
- CSSSource and JSSource will be inserted in the HTML fragment as usual,
but the browser will not run the javascript fragment. If the widget needs
to be initialized, you might want to do that in the code that retrives and
inserts the fragment.
There are ways to parse the HTML fragment, extract the <script> tags and execute them,
but it's outside the scope of this module."""
return '%s_%d' % (prefix, idcounter.next())
def skinned(pth, resource_name):
if not skin:
return [
CSSLink("TGYUI", '%s/assets/%s' % (pth, resource_name)),
]
base, ext = resource_name.rsplit('.', 1)
skin_methods = {
'minimized': [
CSSLink("TGYUI", '%s/assets/skins/%s/%s' % (pth, skin, resource_name)),
],
'core': [
CSSLink("TGYUI", '%s/assets/%s-core.%s' % (pth, base, ext)),
CSSLink("TGYUI", '%s/assets/skins/%s/%s-skin.%s' % (pth, skin, base, ext)),
],
'uber': [
CSSLink("TGYUI", '%s/assets/%s-core.%s' % (pth, base, ext)),
CSSLink("TGYUI", 'assets/skins/%s/skin.css' % skin),
],
}
if skin_method in skin_methods:
return skin_methods[skin_method]
else:
raise ValueError("app.yui.skin_method must be one of '%s'" % "', '".join(skin_meth | ods.keys()))
class YUIBaseCSS(Widget):
css = [CSSLink("TGYUI", "base/base-min.css")]
yuibasecss = YUIBaseCSS()
class YUIResetCSS(Widget):
css = [CSSLink("TGYUI", "reset/reset-min.css")]
yuiresetcss = YUIResetCSS()
class YUIFontsCSS(Widget):
css = [CSSLink("TGYUI", "fonts/fonts-min.css")]
yuifontscss = YUIFontsCSS()
class YUIGrids(Widget):
css | = [CSSLink("TGYUI", "grids/grids-min.css")]
yuigridscss = YUIGrids()
class YUIResetFontsGrids(Widget):
"""Use this in place of using all the three YUIResetCSS, YUIFontsCSS,
YUIGrids. You might want to explicitly include all three if you use other
widgets that depend on one of them, to avoid duplications."""
css = [CSSLink("TGYUI", "reset-fonts-grids/reset-fonts-grids.css")]
yui_reset_fonts_grids = YUIResetFontsGrids()
class YUIAnimation(Widget):
javascript = [JSLink("TGYUI", "yahoo-dom-event/yahoo-dom-event.js"),
JSLink("TGYUI", "animation/animation-min.js"),
JSLink("TGYUI", "thirdparty/effects-min.js"),
]
class YUIMenuBar(Widget):
template = 'TGYUI.templates.menubar'
params = ['id', 'entries', 'as_bar']
css = ([CSSLink("TGYUI", "reset-fonts-grids/reset-fonts-grids.css"),
CSSLink("TGYUI", "menu/assets/menu.css"),
] + skinned('menu', 'menu.css'))
javascript = [JSLink("TGYUI", "yahoo-dom-event/yahoo-dom-event.js"),
JSLink("TGYUI", "container/container_core-min.js"),
JSLink("TGYUI", "menu/menu-min.js"),
]
id = unique_id(prefix='mbar')
as_bar = True # set to False for e.g., leftNav
entries = [('Companies', '/companies', [
('add new', '/companies/add_new', []),
('browse', '/companies/browse', [
('by name', '/companies/browse/by_name'),
('by date', '/companies/browse/by_date'),
]),
('list', '/companies/list', []),
]),
('Contacts', '/contacts', []),
('Queries', '/queries', []),
('Mailings', '/mailings', []),
('Search', '/search', []),
]
def __init__(self, entries=None, *args, **kw):
super(YUIMenuBar, self).__init__(*args, **kw)
if entries:
self.entries = entries
class YUIMenuLeftNav(YUIMenuBar):
as_bar = False
class YUIAutoComplete(TextField):
"A standard, single-line text field with YUI AutoComplete enhancements."
template = 'TGYUI.templates.autocomplete'
params = ["attrs", "id", "search_controller", "result_schema", "search_param"]
params_doc = {'attrs' : 'Dictionary containing extra (X)HTML attributes for'
' the input tag',
'id' : 'ID for the entire AutoComplete construct.'}
attrs = {}
id = 'noid'
search_param = 'input'
javascript = [JSLink("TGYUI", "yahoo-dom-event/yahoo-dom-event.js"),
JSLink("TGYUI", "json/json-min.js"),
JSLink("TGYUI", "autocomplete/autocomplete-min.js"),
]
class YUITreeView(Widget):
css = (skinned('treeview', 'treeview.css') +
(skin and [CSSSource(".ygtvitem td {padding:0}.ygtvitem table {margin-bottom: 0}")] or []))
javascript = [
JSLink('TGYUI','yahoo/yahoo-min.js'),
JSLink('TGYUI','event/event-min.js'),
JSLink('TGYUI','treeview/treeview-min.js'),
JSSource("""
function yui_tree_init(id, entries) {
function yui_add_branch(node, branch) {
var newnode = new YAHOO.widget.TextNode(branch.data, node, branch.expanded);
if (branch.children) {
for (var i=0; i<branch.children.length; i++) {
yui_add_branch(newnode, branch.children[i]);
}
}
}
tree = new YAHOO.widget.TreeView(id);
yui_add_branch(tree.getRoot(), entries);
tree.draw();
}
""")
]
template = """
<div xmlns:py="http://purl.org/kid/ns#"
py:strip="True">
<div id="${id}" />
<script type="text/javascript">
yui_tree_init('${id}', ${entries});
</script>
</div>
"""
entries = {'expanded': True,
'data': {'href': '/stuff/foo', 'label': 'Foo'},
'children': [
{'expanded': True,
'data': {'href': '/stuff/foo/bar', 'label': 'Bar'},
'children': [
{'expanded': True,
'data': |
jadecastro/LTLMoP | src/lib/handlers/share/dummySensor.py | Python | gpl-3.0 | 6,284 | 0.007161 | #!/usr/bin/env python
"""
=====================================
dummySensor.py - Dummy Sensor Handler
=====================================
Displays a silly little window for faking sensor values by clicking on buttons.
"""
import threading, subprocess, os, time, socket
import numpy, math
import sys
class sensorHandler:
def __init__(self, proj, shared_data):
"""
Start up sensor handler subwindow and create a new thread to listen to it.
"""
# Since we don't want to have to poll the subwindow for each request,
# we need a data structure to cache sensor states:
self.sensorValue = {}
self.proj = proj
self.sensorListenInitialized = False
self._running = True
self.p_sensorHandler = None
def _stop(self):
if self.p_sensorHandler is not None:
print >>sys.__stderr__, "(SENS) Killing dummysensor GUI..."
self.p_sensorHandler.stdin.write(":QUIT\n")
self.p_sensorHandler.stdin.close()
print >>sys.__stderr__, "(SENS) Terminating dummysensor GUI listen thread..."
self._running = False
self.sensorListenThread.join()
def _createSubwindow(self):
# Create a subprocess
print "(SENS) Starting sensorHandler window and listen thread..."
self.p_sensorHandler = subprocess.Popen(["python", "-u", os.path.join(self.proj.ltlmop_root,"lib","handlers","share","_SensorHandler.py")], stdin=subprocess.PIPE)
# Create new thread to communicate with subwindow
self.sensorListenThread = threading.Thread(target = self._sensorListen)
self.sensorListenThread.daemon = True
self.sensorListenThread.start()
# Block until the sensor listener gets the go-ahead from the subwindow
while not self.sensorListenInitialized:
time.sleep(0.05) # Yield cpu
def regionBit(self,name,init_region,bit_num,initial=False):
"""
Return the value of bit #bit_num in the bit-vector encoding of the currently selected region
name (string): Unique identifier for region sensor (default="target")
init_region (region): Name of the sensor whose state is interested
bit_num (int): The index of the bit to return
"""
if initial:
if not self.sensorListenInitialized:
self._createSubwindow()
if name not in self.sensorValue.keys():
# create a new map element
# choose an initial (decomposed) region inside the desired one
self.sensorValue[name] = self.proj.regionMapping[init_region][0]
self.p_sensorHandler.stdin.write("loadproj," + self.proj.getFilenamePrefix() + ".spec,\n")
self.p_sensorHandler.stdin.write(",".join(["region", name, self.sensorValue[name]]) + "\n")
return True
else:
if name in self.sensorValue:
reg_idx = self.proj.rfi.indexOfRegionWithName(self.sensorValue[name])
numBits = int(math.ceil(math.log(len(self.proj.rfi.regions),2)))
reg_idx_bin = numpy.binary_repr(reg_idx, width=numBits)
#print name, bit_num, (reg_idx_bin[bit_num] == '1')
return (reg_idx_bin[bit_num] == '1')
else:
print "(SENS) WARNING: Region sensor %s is unknown!" % button_name
return None
def buttonPress(self,button_name,init_value,initial=False):
"""
Return a boolean value corresponding to the state of the sensor with name ``sensor_name``
If such a sensor does not exist, returns ``None``
button_name (string): Name of the sensor whose state is interested
init_value (bool): The initial state of the sensor (default=False)
"""
if initial:
if not self.sensorListenInitialized:
self._createSubwindow()
if button_name not in self.sensorValue.keys():
self.sensorValue[button_name] = init_value
if init_value:
self.p_sensorHandler.stdin.write("button," + button_name + ",1\n")
else:
self.p_sensorHandler.stdin.write("button," + button_name + ",0\n")
return self.sensorValue[button_name]
else:
if button_name in self.sensorValue:
return self.sensorValue[button_name]
else:
print "(SENS) WARNING: Sensor %s is unknown!" % button_name
return None
def _sensorListen(self):
"""
Processes messages from the sensor handler subwindow, and updates our cache appropriately
"""
host = 'localhost'
port = 23459
buf = 1024
addr = (host,port)
UDPSock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
UDPSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
UDPSock.settimeout(1)
try:
UDPSock.bind(addr)
except:
print "ERROR: Cannot bind to port. T | ry killing all Python processes and trying again."
return
while self._running:
# Wait for and receive a message from the subwindow
try:
input,addrFrom = UDPSock.recvfrom(1024)
| except socket.timeout:
continue
if input == '': # EOF indicates that the connection has been destroyed
print "(SENS) Sensor handler listen thread is shutting down."
break
# Check for the initialization signal, if necessary
if not self.sensorListenInitialized and input.strip() == "Hello!":
self.sensorListenInitialized = True
continue
# Get the data out of the message
args = input.strip().split("=")
if len(args) != 2:
continue
# Update our internal cache
if args[1] == "True":
self.sensorValue[args[0]] = True
elif args[1] == "False":
self.sensorValue[args[0]] = False
else:
self.sensorValue[args[0]] = args[1]
|
PyThaiNLP/pythainlp | pythainlp/util/keywords.py | Python | apache-2.0 | 3,590 | 0 | # -*- coding: utf-8 -*-
from collections import Counter
from typing import Dict, List
from pythainlp.corpus import thai_stopwords
_STOPWORDS = thai_stopwords()
def rank(words: List[str], exclude_stopwords: bool = False) -> Counter:
"""
Count word frequecy given a list of Thai words with an option |
to exclude stopwords.
:param list words: a list of words
:param bool exclude_stopwords: If this parameter is set to **True**
| to exclude stopwords from counting.
Otherwise, the stopwords will be counted.
By default, `exclude_stopwords`is
set to **False**
:return: a Counter object representing word frequency from the text
:rtype: :class:`collections.Counter`
:Example:
Include stopwords in counting word frequency::
from pythainlp.util import rank
words = ["บันทึก", "เหตุการณ์", " ", "มี", "การ", "บันทึก", \\
"เป็น", " ", "ลายลักษณ์อักษร"]
rank(words)
# output:
# Counter(
# {
# ' ': 2,
# 'การ': 1,
# 'บันทึก': 2,
# 'มี': 1,
# 'ลายลักษณ์อักษร': 1,
# 'เป็น': 1,
# 'เหตุการณ์': 1
# })
Exclude stopword in counting word frequency::
from pythainlp.util import rank
words = ["บันทึก", "เหตุการณ์", " ", "มี", "การ", "บันทึก", \\
"เป็น", " ", "ลายลักษณ์อักษร"]
rank(words)
# output:
# Counter(
# {
# ' ': 2,
# 'บันทึก': 2,
# 'ลายลักษณ์อักษร': 1,
# 'เหตุการณ์': 1
# })
"""
if not words:
return None
if exclude_stopwords:
words = [word for word in words if word not in _STOPWORDS]
return Counter(words)
def find_keyword(word_list: List[str], min_len: int = 3) -> Dict[str, int]:
"""
This function count the frequency of words in the list
where stopword is excluded and returns as a frequency dictionary.
:param list word_list: a list of words
:param int min_len: the mininum frequency for words to obtain
:return: a dictionary object with key-value pair as word and its raw count
:rtype: dict[str, int]
:Example:
::
from pythainlp.util import find_keyword
words = ["บันทึก", "เหตุการณ์", "บันทึก", "เหตุการณ์",
" ", "มี", "การ", "บันทึก", "เป็น", " ", "ลายลักษณ์อักษร"
"และ", "การ", "บันทึก","เสียง","ใน","เหตุการณ์"]
find_keyword(words)
# output: {'บันทึก': 4, 'เหตุการณ์': 3}
find_keyword(words, min_len=1)
# output: {' ': 2, 'บันทึก': 4, 'ลายลักษณ์อักษรและ': 1,
'เสียง': 1, 'เหตุการณ์': 3}
"""
word_list = rank(word_list, exclude_stopwords=True)
return {k: v for k, v in word_list.items() if v >= min_len}
|
villeneuvelab/vlpp | setup.py | Python | mit | 1,093 | 0.006404 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
description = """Villeneuve Lab PET Pipeline"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
from glob import glob
from setuptools import setup
DISTNAME = "vlpp"
DESCRIPTION = description
VERSION = "1.2.2"
AUTHOR = "Christophe Bedetti"
AUTHOR_EMAIL = "christophe.bedetti@umontreal.ca"
#URL = "https://github.com/"
#DOWNLOAD_URL = URL + "/archive/" + VERSION + ".tar.gz"
with open("requirements.txt", "r") as f:
INSTALL_REQUIRES = f.read().splitlines()
if __name__ == "__main__":
setup(
name=DISTNAME,
version=VERSION,
description=description,
long_description=l | ong_description,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
#url=URL,
#download_url=DOWNLOAD_URL,
packages=[DISTNAME],
#scripts=glob('scripts/*') + glob('pipelines/*.nf'),
install_requires=INSTALL_REQUI | RES,
)
|
lithiumtech/skybase.io | skybase/skytask/service/record_state.py | Python | apache-2.0 | 3,815 | 0.002097 | import logging
import json
from skybase.skytask import SkyTask
from skybase.artiball import Artiball
from skybase.utils.logger import Logger
from skybase.ac | tions.dbstate import write_service_state_record
from skybase import skytask
from skybase.planet import Planet
from skybase.utils import simple_error_format
import skybase.actions.skycloud
import skybase.exceptions
def service_record_state_add_arguments(parser):
parser.add_argument(
'-p', '--planet',
dest='planet_name',
a | ction='store',
required=True,
help='planet name')
parser.add_argument(
'-a', '--artiball',
dest='source_artiball',
action='store',
required=True,
help='Packaged Release Bundle Name (required)'
)
parser.add_argument(
'-r', '--provider',
dest='provider_name',
action='store',
required=True,
help='provider name')
parser.add_argument(
'-s', '--service',
dest='service_name',
action='store',
required=True,
help='service name.')
parser.add_argument(
'-d', '--deploy-tag',
dest='deploy_tag',
action='store',
required=True,
help='deployment tag.')
parser.add_argument(
'-k', '--stacks',
dest='stacks',
action='store',
type=json.loads,
required=True,
help='stack info object')
parser.add_argument(
'-m', '--mode',
dest='exec_mode',
action='store',
choices={'local', 'restapi'},
default='restapi',
help='execution mode (default REST api)'
)
class RecordState(SkyTask):
def __init__(self, all_args=None, runner_cfg=None):
SkyTask.__init__(self, all_args, runner_cfg)
self.logger = Logger(logging.getLogger(__name__), logging.INFO)
self.name = 'service.record_state'
self.args = all_args
self.runner_cfg = runner_cfg
self.planet = None
self.stacks = self.args['stacks']
def preflight_check(self):
preflight_result = []
# instantiate planet
try:
self.planet = Planet(self.args.get('planet_name'))
except Exception as e:
self.preflight_check_result.status = 'FAIL'
preflight_result.append(skybase.exceptions.SkyBaseValidationError('planet init: {0}'.format(simple_error_format(e))))
# validate stacks for errors before writing to service state registry
try:
are_stacks_valid = skybase.actions.skycloud.are_stacks_valid(
self.planet.orchestration_engine,
self.stacks)
if not are_stacks_valid:
self.preflight_check_result.status = 'FAIL'
preflight_result.append(skybase.exceptions.SkyBaseValidationError('cannot write service state record with invalid stacks'))
except Exception as e:
self.preflight_check_result.status = 'FAIL'
preflight_result.append(skybase.exceptions.SkyBaseValidationError('test for valid stacks: {0}'.format(simple_error_format(e))))
self.preflight_check_result.set_output(preflight_result)
return self.preflight_check_result
def execute(self):
# record service state record
record_id = write_service_state_record(
planet_name=self.planet.planet_name,
service_name=self.args['service_name'],
tag=self.args['deploy_tag'],
registration=self.args.get('registration'),
provider=self.planet.provider,
stacks=self.stacks,
)
# prepare result object and execute query
self.result.output = record_id
self.result.format = skytask.output_format_json
return self.result |
houssine78/addons | product_internal_ref/models/product.py | Python | agpl-3.0 | 368 | 0.013624 | # -*- coding: utf-8 -*-
# © 2017 Houssine BAKKALI - Coop IT Easy
# License AGPL-3.0 or later (http:// | www.gnu.org/licenses/agpl). |
from openerp import fields, models
class ProductTemplate(models.Model):
_inherit = "product.template"
default_code = fields.Char(related='product_variant_ids.default_code', string='Internal Reference', store=True) |
elfnor/sverchok | nodes/generator/image.py | Python | gpl-3.0 | 6,178 | 0.001964 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from bpy.props import IntProperty, FloatProperty, StringProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, fullList
class ImageNode(bpy.types.Node, SverchCustomTreeNode):
''' Image '''
bl_idname = 'ImageNode'
bl_label = 'Image'
bl_icon = 'FILE_IMAGE'
name_image = StringProperty(name='image_name', description='image name', default='', update=updateNode)
R = FloatProperty(
name='R', description='R', default=0.30, min=0, max=1,
options={'ANIMATABLE'}, update=updateNode)
G = FloatProperty(
name='G', description='G', default=0.59, min=0, max=1,
options={'ANIMATABLE'}, update=updateNode)
B = FloatProperty(
name='B', description='B', default=0.11, min=0, max=1,
options={'ANIMATABLE'}, update=updateNode)
Xvecs = IntProperty(
name='Xvecs', description='Xvecs', default=10, min=2, max=100,
options={'ANIMATABLE'}, update=updateNode)
Yvecs = IntProperty(
name='Yvecs', description='Yvecs', default=10, min=2, max=100,
options={'ANIMATABLE'}, update=updateNode)
Xstep = FloatProperty(
name='Xstep', description='Xstep', default=1.0, min=0.01, max=100,
options={'ANIMATABLE'}, update=updateNode)
Ystep = FloatProperty(
name='Ystep', description='Ystep', default=1.0, min=0.01, max=100,
options={'ANIMATABLE'}, update=updateNode)
def sv_init(self, context):
self.inputs.new('StringsSocket', "vecs X").prop_name = 'Xvecs'
self.inputs.new('StringsSocket', "vecs Y").prop_name = 'Yvecs'
self.inputs.new('StringsSocket', "Step X").prop_name = 'Xstep'
self.inputs.new('StringsSocket', "Step Y").prop_name = 'Ystep'
self.outputs.new('VerticesSocket', "vecs")
self.outputs.new('StringsSocket', "edgs")
self.outputs.new('StringsSocket', "pols")
def draw_buttons(self, context, lay | out):
layout.prop_search(self, "name_image", bpy.data, 'images', text="image")
row = layout.row(align=True)
row.scale_x = 10.0
row.prop(self, "R", text="R")
row.prop(self, "G", text="G")
row.prop(self, "B", text="B")
def process(self):
inputs, outputs = self.inputs, self.outputs
# inputs
if inputs['vecs X'].is_linked:
IntegerX = min(int(inputs['vecs X'].sv_get()[0][0]), 100)
else:
Integ | erX = int(self.Xvecs)
if inputs['vecs Y'].is_linked:
IntegerY = min(int(inputs['vecs Y'].sv_get()[0][0]), 100)
else:
IntegerY = int(self.Yvecs)
step_x_linked = inputs['Step X'].is_linked
step_y_linked = inputs['Step Y'].is_linked
StepX = inputs['Step X'].sv_get()[0] if step_x_linked else [self.Xstep]
StepY = inputs['Step Y'].sv_get()[0] if step_y_linked else [self.Ystep]
fullList(StepX, IntegerX)
fullList(StepY, IntegerY)
# outputs
out = [[[]]]
edg = [[[]]]
plg = [[[]]]
if outputs['vecs'].is_linked:
out = [self.make_vertices(IntegerX-1, IntegerY-1, StepX, StepY, self.name_image)]
outputs['vecs'].sv_set(out)
if outputs['edgs'].is_linked:
listEdg = []
for i in range(IntegerY):
for j in range(IntegerX-1):
listEdg.append((IntegerX*i+j, IntegerX*i+j+1))
for i in range(IntegerX):
for j in range(IntegerY-1):
listEdg.append((IntegerX*j+i, IntegerX*j+i+IntegerX))
edg = [list(listEdg)]
outputs['edgs'].sv_set(edg)
if outputs['pols'].is_linked:
listPlg = []
for i in range(IntegerX-1):
for j in range(IntegerY-1):
listPlg.append((IntegerX*j+i, IntegerX*j+i+1, IntegerX*j+i+IntegerX+1, IntegerX*j+i+IntegerX))
plg = [list(listPlg)]
outputs['pols'].sv_set(plg)
def make_vertices(self, delitelx, delitely, stepx, stepy, image_name):
lenx = bpy.data.images[image_name].size[0]
leny = bpy.data.images[image_name].size[1]
if delitelx > lenx:
delitelx = lenx
if delitely > leny:
delitely = leny
R, G, B = self.R, self.G, self.B
xcoef = lenx//delitelx
ycoef = leny//delitely
# copy images data, pixels is created on every access with [i], extreme speedup.
# http://blender.stackexchange.com/questions/3673/why-is-accessing-image-data-so-slow
imag = bpy.data.images[image_name].pixels[:]
vertices = []
addition = 0
for y in range(delitely+1):
addition = int(ycoef*y*4*lenx)
for x in range(delitelx+1):
# каждый пиксель кодируется RGBA, и записан строкой, без разделения на строки и столбцы.
middle = (imag[addition]*R+imag[addition+1]*G+imag[addition+2]*B)*imag[addition+3]
vertex = [x*stepx[x], y*stepy[y], middle]
vertices.append(vertex)
addition += int(xcoef*4)
return vertices
def register():
bpy.utils.register_class(ImageNode)
def unregister():
bpy.utils.unregister_class(ImageNode)
|
acysos/odoo-addons | edicom/models/product_product.py | Python | agpl-3.0 | 532 | 0 | # -*- coding: utf-8 -*-
# Copyright 2020 Ignacio Ibeas <ignacio@acysos.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, fields, models
class ProductProduct(models.Model):
_inherit = 'product.product'
edicom_tipart = fields.Selection(
string='Tipo articulo',
selection=[('CU', 'Unidad de consumo'), ('DU', 'Unid | ad de expedición'),
('TU', 'Unidad Comerciada'),
('VQ', 'Producto de medid | a variable')],
default='CU')
|
bkpathak/HackerRank-Problems | collections/strings/permuataion.py | Python | mit | 648 | 0.015432 | #https://www.youtube.com/watch?v=hqijNdQTBH8
def permute1(lst):
if len(lst) == 0:
yield []
elif len(lst) == 1:
yield lst
else:
for i in range(len(lst)):
| x = lst[i]
xs = lst[:i]+lst[i+1:]
for p in permute1(xs):
| yield [x] + p
def permute2(str,left,right):
if left == right:
print(str)
else:
for i in range(left,right):
str[i], str[left] = str[left], str[i]
permute2(str,left+1,right)
str[i], str[left] = str[left], str[i]
data = list("ABC")
permute2(data,0,3)
#for p in permute1(data):
# print(p)
|
chrisjdavie/shares | website_searching/telegraph_tips/test_function.py | Python | mit | 164 | 0.030488 | '''
Created on 2 Se | p 2014
@author: chris
'''
def main():
from telegraph_tips import tele_tip
symb = 'IRV.L'
if __name__ == '__main__':
m | ain() |
Cadasta/cadasta-geoodk | xforms/renderers.py | Python | agpl-3.0 | 2,727 | 0.000733 | from rest_framework import renderers
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.six.moves import StringIO
from django.utils.encoding import smart_text
from rest_framework.compat import six
from rest_framework import negotiation
import json
"""
@author: Jon Nordling
@date: 06/19/2016
XFormListRenderer, is a custom django rest framework, renderer
that passing a data object, will render serializes, data to xml for
the views
"""
class MediaFileContentNegotiation(negotiation.DefaultContentNegotiation):
def filter_renderers(self, renderers, format):
"""
If there is a '.json' style format suffix, filter the renderers
so that we only negotiation against those that accept that format.
If there is no renderer available, we use MediaFileRenderer.
"""
renderers = [renderer for renderer in renderers
if renderer.format == format]
if not renderers:
renderers = [MediaFileRenderer()]
return renderers
class MediaFileRenderer(renderers.BaseRenderer):
media_type = '*/*'
format = None
charset = None
render_style = 'binary'
def render(self, data, accepted_media_type=None, renderer_context=None):
return data
class XFormListRenderer(renderers.BaseRenderer):
"""
Renderer which serializes to XML.
"""
media_type = 'text/xml'
format = 'xml'
charset = 'utf-8'
root_node = 'xforms'
element_node = 'xform'
xmlns = "http://openrosa.org/xforms/xformsList"
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders *obj* into serialized XML.
"""
if data is None:
return ''
elif isinstance(data, six.string_types):
return data
stream = StringIO()
xml = SimplerXMLGenera | tor(stream, self.charset)
xml.startDocument()
xml.startElement(self.root_node, {'xmlns': self.xmlns})
self._to_xml(xml, data)
xml.endElement(self.root_node)
| xml.endDocument()
return stream.getvalue()
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement(self.element_node, {})
self._to_xml(xml, item)
xml.endElement(self.element_node)
elif isinstance(data, dict):
for key, value in six.iteritems(data):
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
elif data is None:
# Don't output any value
pass
else:
xml.characters(smart_text(data))
|
poldracklab/crn-app-registration-tool | setup.py | Python | apache-2.0 | 2,099 | 0.001429 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
PACKAGE_NAME = 'cappat'
def main():
""" Install entry-point """
from os import path as op
from glob import glob
from inspect import getfile, currentframe
from setuptools import setup, find_packages
from io import open # pylint: disable=W0622
this_path = op.dirname(op.abspath(getfile(currentframe())))
# Python 3: use a locals dictionary
# http://stackoverflow.com/a/1463370/6820620
ldict = locals()
# Get version and release info, which is all stored in phantomas/info.py
module_file = op.join(this_path, PACKAGE_NAME, 'info.py')
with open(module_file) as infofile:
pythoncode = [line for line in infofile.readlines() if not line.strip().startswith('#')]
exec('\n'.join(pythoncode), globals(), ldict)
setup(
name=PACKAGE_NAME,
version=ldict['__version__'],
description=ldict['__description__'],
long_description=ldict['__longdesc__'],
author=ldict['__author__'],
author_email=ldict['__email__'],
maintainer=ldict['__maintainer__'],
maintainer_email=ldict['__email__'],
license=ldict['__license__'],
url=ldict['URL'],
download_url=ldict['DOWNLOAD_URL'],
classifiers=ldict['CLASSIFIERS'],
packages=find_packages(exclude=['build', 'do | c', 'old-wrappers', 'tests']),
package_data={'cappat': [
'tpl/*.jnj2',
'data/wrapper.sh',
'data/default_app_params.json',
'data/default_app_inputs.json'
]},
entry_points={'console_scripts': [
'cap | pwrapp=cappat.wrapper:main',
'cappgen=cappat.appgen:main'
]},
# scripts=glob('scripts/*'),
zip_safe=False,
# Dependencies handling
setup_requires=ldict['SETUP_REQUIRES'],
install_requires=ldict['REQUIRES'],
dependency_links=ldict['LINKS_REQUIRES'],
tests_require=ldict['TESTS_REQUIRES'],
extras_require=ldict['EXTRA_REQUIRES'],
)
if __name__ == '__main__':
main()
|
yslin/tools-zodlin | ubuntu/vim/script/debug/python/python_mandelbrot.py | Python | apache-2.0 | 1,868 | 0.008565 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#===============================================================================
# Copyright 2012 zod.yslin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: zod.yslin
# Email:
# File Name: python_mandelbrot.py
# Description:
#
# Edit History:
# 2012-01-27 File created.
#===============================================================================
#!/usr/local/bin/python
# by Daniel Rosengren, modified by e-satis
"""
Module doctring
"""
import time
from sys import stdout
BAILOUT = 16
MAX_ITERATIONS = 1000
def mandelbrot(dim_1, dim_2):
"""
function doc string
"""
cr1 = dim_1 - 0.5
ci1 = dim_2
zi1 = 0.0
zr1 = 0.0
for i in xrange(MAX_ITERATIONS) :
temp = zr1 * z | i1
zr2 = zr1 * zr1
zi2 = zi1 * zi1
zr1 = zr2 - zi2 + cr1
zi1 = temp + temp + ci1
if zi2 + zr2 > BAILOUT:
return i
return 0
def execute() :
"""
func doc string
"""
print 'Rendering...'
for dim_1 in xrange(-39, 39):
stdout.write('\n')
for dim_2 in xrange(-39, | 39):
if mandelbrot(dim_1/40.0, dim_2/40.0) :
stdout.write(' ')
else:
stdout.write('*')
START_TIME = time.time()
execute()
print '\nPython Elapsed %.02f' % (time.time() - START_TIME)
|
yinzishao/NewsScrapy | thepaper/thepaper/spiders/travelweeklychina_spider.py | Python | lgpl-3.0 | 8,246 | 0.012463 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'yinzishao'
import re
from scrapy.exceptions import CloseSpider
import scrapy
from bs4 import BeautifulSoup
import logging
from thepaper.items import NewsItem
import json
logger = logging.getLogger("TravelWeeklyChinaSpider")
from thepaper.settings import *
class TravelWeeklyChinaSpider(scrapy.spiders.Spider):
domain = "http://travelweekly-china.com/"
name = "twc"
allowed_domains = ["travelweekly-china.com",]
end_day = END_DAY #终结天数
end_now = END_NOW
post_next_url = "http://travelweekly-china.com/Dyna.asmx/PageContentList"
start_urls = [
"http://travelweekly-china.com/",
]
"""
因为需要爬12个类别的新闻。一起爬取的时候终止条件不应该有一个种类的新过了时间就停止。
应该是12个类别的新闻都达到结束时间时,结束。
而且因为是异步的。爬取一个页面的新闻会不按时间顺序爬取,所以我们对一个页面的所有新闻都爬取才结束。
"""
flag = {}
#根据首页的新闻类别爬取各个类别的url
def parse(self, response):
soup = BeautifulSoup(response.body,"lxml")
menu = soup.find('div',id="channel---7",class_="channel")
# """
if menu:
for topic in menu('ul'):
topic_name = topic.li.a.string
url = topic.find("a").get("href",None)
if url:
topic_url = self.domain+url
self.flag.setdefault(url[1:],0)
yield scrapy.Request(topic_url,callback=self.parse_topic)
# """
# yield scrapy.Request("http://travelweekly-china.com/31774",callback=self.parse_topic)
#根据每个类型的首页得到新闻json的接口与参数
def parse_topic(self,response):
"""
:param response:
:return:抛出每个类型的第一页访问json
爬取下一页的链接
POST请求
需要三个参数:
PageKey
WidgetId
PageNumber
"""
soup = BeautifulSoup(response.body,"lxml")
next_obj = soup.find("a",class_="insert-more show-more")
#如果有下一页
if next_obj:
next_pagekey = next_obj.get("_p",None)
next_wid = next_obj.get("_wid",None)
# next_num = next_obj.get("_n",None)
next_num = 1
post_data = {"req":
{
"PageKey":next_pagekey,
"WidgetId":next_wid,
"PageNumber":next_num
}
}
# print self.flag
# print response.request.url,"----------------"
#抛出每个类型的第一页访问json
yield scrapy.Request(self.post_next_url,
callback=self.parse_newslist_json,
method="POST",
headers={"Content-Type":"application/json"},
body=json.dumps(post_data))
else:
#http://travelweekly-china.com/31781 只有一页!
flag_id = response.url.split("/")[-1]
self.flag[str(flag_id)]=1
print flag_id,"stop ~~~~~~"
logger.warning("can't find next page")
#新闻列表
def parse_newslist_json(self,response):
# print response.url
# if self.post_next_url == response.url:
res = json.loads(response.body)['d']
#需要替换<div>因为并没有</div>会影响beautifulsoup的加载!!!
res = re.sub(re.compile(r'<div.*?>'),"",res)
news_list = BeautifulSoup(res,"lxml").find_all("article")
# else:
# soup = BeautifulSoup(response.body,"lxml")
# news_list = soup.find('div',class_="start-feed")('article')
origin_post_data = json.loads(response.request.body)
post_data = origin_post_data.get("req",None)
if post_data:
old_pagenumber = post_data['PageNumber']
post_data.update({"PageNumber":str(int(old_pagenumber)+1)})
#抛出新闻!
if news_list:
for news in news_list:
title = news.span.a.string
url = news.span.a.get("href",None)
# 格式<p>u'XXXXX\xa0'<a>XX<\a><\p>
# content = unicode(news.p).replace(u'\xa0', u'').replace("<p>","").replace("</p>","")
#可以获取到p的内容
#news.p -> \u7cfb\u5217\u6d3b\u52a8\u3002...\xa0
#TODO:没有replace(u'\xa0'),仍然不知出现编码问题的原因,暂不处理
abstract = news.p.strings.next()
if url:
#列表并没有时间,所以不 | 能设定停止条件
# print self.domain+url,"request"
yield scrapy.Request(self.domain+url,
callback=self.parse | _news,
meta={
"topic_id":post_data["PageKey"],"PageNumber":old_pagenumber
}
)
PageKey = post_data['PageKey']
flag_id =str(int(PageKey)-40037910)
#继续抛出下一页的条件:该类型的标志为0
if not self.flag[flag_id]:
# print flag_id,"要爬取下一页!",pagenumber
yield scrapy.Request(self.post_next_url,
callback=self.parse_newslist_json,
method="POST",
headers={"Content-Type":"application/json"},
body=json.dumps(post_data))
#异步并不按时间顺序!
def parse_news(self,response):
# print response.url,"response"
PageKey = response.meta.get("topic_id")
PageNumber =response.meta.get("PageNumber")
flag_id =str(int(PageKey)-40037910)
soup =BeautifulSoup(response.body,"lxml")
#2016-07-13
news_date = soup.find("time").text if soup.find("time") else None
# print self.flag[flag_id],int(PageNumber)
"""
条件是该类别标记(self.flag[flag_id])是0爬取,说明还没有爬到过期的。
爬取页面是该页的也继续爬取。因为一个页面的爬取顺序是异步的。
self.flag[flag_id]=过期页数
"""
if not self.flag[flag_id] or int(PageNumber)==self.flag[flag_id]:
#,没有超出范围
struct_date = datetime.datetime.strptime(news_date,"%Y-%m-%d")
# print self.end_now,struct_date,"time"
delta = self.end_now-struct_date
# print delta.days,"delta day ~~~~~~~~~~~~~~~~"
if delta.days > self.end_day:
self.flag[str(flag_id)]=int(PageNumber)
# print flag_id,"stop ~~~~~~"
# raise CloseSpider('today scrapy end')
else:
head = soup.find("div",class_="post-head")
topic,title,abstract=None,None,None
if head:
topic = head.find("span",class_="category").text if head.find("span",class_="category") else None
title =head.find("h1",class_="h1").text if head.find("h1",class_="h1") else None
abstract = head.find("span",class_="kicker").text if head.find("span",class_="kicker") else None
content = soup.find("div",class_="post-body clearfix").text if soup.find("div",class_="post-body clearfix") else None
news_no = response.url.split("/")[-1].split("?")[0]
#TODO 评论数量js渲染,未解决
item = NewsItem(title=title,topic=topic,
abstract=abstract,news_date=news_date,
content=content,news_no=news_no
,crawl_date=NOW,news_url=response.url,catalogue='新闻板块')
yield item
|
JudoWill/glue | glue/qt/tests/test_qtutil.py | Python | bsd-3-clause | 13,748 | 0.000145 | # pylint: disable=I0011,W0613,W0201,W0212,E1101,E1103
from __future__ import absolute_import, division, print_function
import pytest
from .. import qtutil
from ...external.qt import QtGui
from ...external.qt.QtCore import Qt
from mock import MagicMock, patch
from ..qtutil import GlueDataDialog
from ..qtutil import pretty_number, GlueComboBox, PythonListModel
from glue.config import data_factory
from glue.core import Subset
def test_glue_action_button():
a = QtGui.QAction(None)
a.setToolTip("testtooltip")
a.setWhatsThis("testwhatsthis")
a.setIcon(QtGui.QIcon("dummy_file"))
a.setText('testtext')
b = qtutil.GlueActionButton()
b.set_action(a)
# assert b.icon() == a.icon() icons are copied, apparently
assert b.text() == a.text()
assert b.toolTip() == a.toolTip()
assert b.whatsThis() == a.whatsThis()
#stays in sync
a.setText('test2')
assert b.text() == 'test2'
@data_factory('testing_factory', '*.*')
def dummy_factory(filename):
from glue.core import Data
result = Data()
result.made_with_dummy_factory = True
return result
dummy_factory_member = [f for f in data_factory.members
if f[0] is dummy_factory][0]
class TestGlueDataDialog(object):
def test_factory(self):
"""Factory method should always match with filter"""
fd = GlueDataDialog()
assert len(fd.filters) > 0
for k, v in fd.filters:
fd._fd.setFilter(v)
assert fd.factory() is k
def test_load_data_cancel(self):
"""Return None if user cancels operation"""
fd = GlueDataDialog()
mock_file_exec(fd, cancel=True)
assert fd.load_data() == []
def test_load_data_normal(self):
"""normal load_data dispatches path to factory"""
fd = GlueDataDialog()
mock_file_exec(fd, cancel=False, path='ld_data_nrml',
factory=dummy_factory_member)
d = fd.load_data()
assert len(d) == 1
d = d[0]
assert d.label == 'ld_data_nrml'
assert d.made_with_dummy_factory is True
def test_filters(self):
"""Should build filter list from data_factories env var"""
fd = GlueDataDialog()
assert len(fd.filters) == len(data_factory.members)
def test_load_multiple(self):
fd = GlueDataDialog()
mock_file_exec(fd, cancel=False, path=['a.fits', 'b.fits'],
factory=dummy_factory_member)
ds = fd.load_data()
assert len(ds) == 2
for d, label in zip(ds, 'ab'):
assert d.label == label
assert d.made_with_dummy_factory is True
def mock_file_exec(fd, cancel=False, path='junk',
factory=dummy_factory_member):
if not isinstance(path, list):
path = [path]
fd._fd.exec_ = MagicMock()
fd._fd.exec_.return_value = 1 - cancel
fd.factory = MagicMock()
fd.factory.return_value = factory
fd.paths = MagicMock()
fd.paths.return_value = path
def test_data_wizard_cancel():
"""Returns empty list if user cancel's dialog"""
with patch('glue.qt.qtutil.GlueDataDialog') as mock:
mock().load_data.return_value = []
assert qtutil.data_wizard() == []
def test_data_wizard_normal():
"""Returns data list if successful"""
with patch('glue.qt.qtutil.GlueDataDialog') as mock:
mock().load_data.return_value = [1]
assert qtutil.data_wizard() == [1]
def test_data_wizard_error_cancel():
"""Returns empty list of error generated and then canceled"""
with patch('glue.qt.qtutil.GlueDataDialog') as mock:
mock().load_data.side_effect = Exception
with patch('glue.qt.qtutil.QMessageBox') as qmb:
qmb().exec_.return_value = 0
assert qtutil.data_wizard() == []
class TestPrettyNumber(object):
def test_single(self):
assert pretty_number([1]) == ['1']
assert pretty_number([0]) == ['0']
assert pretty_number([-1]) == ['-1']
assert pretty_number([1.0001]) == ['1']
assert pretty_number([1.01]) == ['1.01']
assert pretty_number([1e-5]) == ['1.000e-05']
assert pretty_number([1e5]) == ['1.000e+05']
assert pretty_number([3.3]) == ['3.3']
def test_list(self):
assert pretty_number([1, 2, 3.3, 1e5]) == ['1', '2', '3.3',
'1.000e+05']
class TestGlueComboBox(object):
def setup_method(self, method):
self.combo = GlueComboBox()
def test_add_data(self):
self.combo.addItem('hi', userData=3)
assert self.combo.itemData(0) == 3
def test_add_multi_data(self):
self.combo.addItem('hi', userData=3)
self.combo.addItem('ho', userData=4)
assert self.combo.itemData(0) == 3
assert self.combo.itemData(1) == 4
def test_replace(self):
self.combo.addItem('hi', userData=3)
self.combo.removeItem(0)
self.combo.addItem('ho', userData=4)
assert self.combo.itemData(0) == 4
def test_clear(self):
self.combo.addItem('a', 1)
self.combo.addItem('b', 2)
self.combo.addItem('c', 3)
self.combo.clear()
self.combo.addItem('d', 4)
assert self.combo.itemData(0) == 4
def test_mid_remove(self):
self.combo.addItem('a', 1)
self.combo.addItem('b', 2)
self.combo.addItem('c', 3)
self.combo.removeItem(1)
assert self.combo.itemData(1) == 3
def test_set_item_data(self):
self.combo.addItem('a', 1)
self.combo.setItemData(0, 2)
assert self.combo.itemData(0) == 2
def test_default_data(self):
self.combo.addItem('a')
assert self.combo.itemData(0) is None
def test_add_items(self):
self.combo.addItem('a', 1)
self.combo.addItems(['b', 'c', 'd'])
assert self.combo.itemData(0) == 1
assert self.combo.itemData(1) is None
assert self.combo.itemData(2) is None
assert self.combo.itemData(3) is None
def test_non_user_role(self):
"""methods that edit data other than userRole dispatched to super"""
self.combo.addItem('a', 1)
assert self.combo.itemData(0, role=Qt.DisplayRole) == 'a'
self.combo.setItemData(0, 'b', role=Qt.DisplayRole)
assert self.combo.itemData(0, role=Qt.DisplayRole) == 'b'
def test_consistent_with_signals(self):
"""Ensure that when signal/slot connections interrupt
methods mid-call, internal data state is consistent"""
# Qt swallows exceptions in signals, so we can't assert in this
# instead, store state and assert after signal
good = [False]
def assert_consistent(*args):
good[0] = len(self.combo._data) == self.combo.count()
# addItem
self.combo | .currentIndexChanged.connect(assert_consistent)
self.combo.addItem('a', 1)
assert | good[0]
# addItems
self.combo.clear()
good[0] = False
self.combo.addItems('b c d'.split())
assert good[0]
# removeItem
self.combo.clear()
self.combo.addItem('a', 1)
good[0] = False
self.combo.removeItem(0)
assert good[0]
def test_qt4_to_mpl_color():
assert qtutil.qt4_to_mpl_color(QtGui.QColor(255, 0, 0)) == '#ff0000'
assert qtutil.qt4_to_mpl_color(QtGui.QColor(255, 255, 255)) == '#ffffff'
def test_edit_color():
with patch('glue.qt.qtutil.QColorDialog') as d:
d.getColor.return_value = QtGui.QColor(0, 1, 0)
d.isValid.return_value = True
s = Subset(None)
qtutil.edit_layer_color(s)
assert s.style.color == '#000100'
def test_edit_color_cancel():
with patch('glue.qt.qtutil.QColorDialog') as d:
d.getColor.return_value = QtGui.QColor(0, -1, 0)
s = Subset(None)
qtutil.edit_layer_color(s)
def test_edit_symbol():
with patch('glue.qt.qtutil.QInputDialog') as d:
d.getItem.return_value = ('*', True)
s = Subset(None)
qtutil.edit_layer_symbol(s)
assert s.style.marker == '*'
def test_edit_symbol_ca |
cdeboever3/WASP | CHT/combined_test.py | Python | apache-2.0 | 25,839 | 0.009908 | # Copyright 2013 Graham McVicker and Bryce van de Geijn
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is dist | ributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF AN | Y KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import math
import time
import gzip
import argparse
from scipy.optimize import *
from scipy import cast
from scipy.special import gammaln
from scipy.special import betaln
import scipy.stats
import numpy as np
from random import shuffle
from random import randint
# OPTIMIZER="BFGS"
OPTIMIZER="Nelder-Mead"
class TestSNP:
def __init__(self, name, geno_hap1, geno_hap2, AS_target_ref, AS_target_alt,
hetps, totals, counts):
self.name = name
self.geno_hap1 = geno_hap1
self.geno_hap2 = geno_hap2
self.AS_target_ref = AS_target_ref
self.AS_target_alt = AS_target_alt
self.hetps = hetps
self.totals = totals
self.counts = counts
def is_het(self):
"""returns True if the test SNP is heterozygous"""
return self.geno_hap1 != self.geno_hap2
def is_homo_ref(self):
"""Returns True if test SNP is homozygous for reference allele"""
return self.geno_hap1 == 0 and self.geno_hap2 == 0
def is_homo_alt(self):
"""Returns True if test SNP is homozygous for non-reference allele"""
return self.geno_hap1 == 1 and self.geno_hap2 == 1
def open_input_files(in_filename):
if not os.path.exists(in_filename) or not os.path.isfile(in_filename):
sys.stderr.write("input file %s does not exist or is not a regular file\n" %
in_filename)
exit(2)
# read file that contains list of input files
in_file = open(in_filename)
infiles = []
for line in in_file:
# open each input file and read first line
filename = line.rstrip()
if not filename or not os.path.exists(filename) or not os.path.isfile(filename):
sys.stderr.write("input file '%s' does not exist or is not a regular file\n"
% line)
exit(2)
if filename.endswith(".gz"):
f = gzip.open(filename)
else:
f = open(filename)
# skip header
f.readline()
infiles.append(f)
in_file.close()
if len(infiles) == 0:
sys.stderr.write("no input files specified in file '%s'\n" % options.infile_list)
exit(2)
return infiles
def write_header(outfile):
outfile.write("\t".join(["TEST.SNP.CHROM", "TEST.SNP.POS",
"LOGLIKE.NULL", "LOGLIKE.ALT",
"CHISQ", "P.VALUE", "ALPHA", "BETA",
"PHI", "TOTAL.AS.READ.COUNT",
"TOTAL.READ.COUNT"]) + "\n")
def read_bnb_sigmas(options, infiles):
"""Read overdispersion parameters for beta-negative binomial.
Expect one for each individual."""
if (options.bnb_disp):
disp_file = open(options.bnb_disp)
line = disp_file.readline()
bnb_sigmas = []
while line:
bnb_sigmas.append(np.float64(line.strip()))
line = disp_file.readline()
disp_file.close()
if len(bnb_sigmas) != len(infiles):
raise ValueError("expected %d values in bnb_disp file "
"(one for each input file) but got %d"
% (len(infiles), len(bnb_sigmas)))
else:
bnb_sigmas = [0.001]*len(infiles)
return bnb_sigmas
def read_as_sigmas(options, infiles):
"""Read overdispersion parameters for allele-specific test
(Beta-Binomial). Expect one for each individual."""
if (options.as_disp):
disp_file = open(options.as_disp)
line = disp_file.readline()
as_sigmas = []
while line:
val = np.float64(line.strip())
if val < 0.0 or val > 1.0:
raise ValueError("expected as_sigma values to be "
" in range 0.0-1.0, but got %g" %
val)
as_sigmas.append(np.float64(line.strip()))
line = disp_file.readline()
disp_file.close()
if len(as_sigmas) != len(infiles):
raise ValueError("expected %d values in as_disp file "
"(one for each input file) but got "
"%d" % (len(infiles), len(as_sigmas)))
else:
as_sigmas = [0.001] * len(infiles)
return as_sigmas
def write_results(outfile, snpinfo, loglike1par, loglike2par,
best2par, totcounts, all_counts):
"""Write result to output file. Tab-delimited columns are:
1. chromosome,
2. SNP position,
3. Log likelihood 1 parameter model (Null)
4. Log likelihood 2 parameter model (Alternative)
3. Chi-squared statistic,
4. P-value
5. alpha parameter estimate (expression level
of reference allele)
6. beta parameter estimate (expression level of
alternative allele)
7. phi parameter estimate (beta-negative-binomial
overdispersion
parameter for this region)
8. total number of allele-specific read counts for this
region summed across individuals
9. total number of mapped reads for this region,
summed across individuals"""
# compute likelihood ratio test statistic:
chisq = 2 * (loglike1par - loglike2par)
pval = (1-scipy.stats.chi2.cdf(chisq,1)),
outfile.write("\t".join([snpinfo[0][0], snpinfo[0][1],
"%.2f" % -loglike1par,
"%.2f" % -loglike2par,
"%.3f" % chisq,
"%g" % pval,
"%.3f" % best2par[0],
"%.3f" % best2par[1],
"%g" % best2par[2],
"%d" % totcounts,
"%d" % all_counts]) + '\n')
outfile.flush()
def write_empty_result(outfile, snpinfo):
"""Write all zeros in the even that the test failed"""
outfile.write("\t".join([snpinfo[0][0], snpinfo[0][1], "0", "0",
"0", "NA", "0", "0", "0", "0"]) + '\n')
def main():
options = parse_options()
if options.pc_file:
pc_matrix = load_covariates(options.pc_file)
num_pcs = options.num_pcs
else:
pc_matrix = []
num_pcs = 0
if options.out_file.endswith(".gz"):
outfile = gzip.open(options.out_file, "wb")
else:
outfile = open(options.out_file, 'w')
if options.benchmark:
if options.benchmark == "-":
bench_file = sys.stderr
else:
bench_file = open(options.benchmark, "w")
bench_file.write("TEST.TYPE TIME\n")
write_header(outfile)
# read list of input files (one for each individual)
infiles = open_input_files(options.infile_list)
# read dispersion parameters for each individual
bnb_sigmas = read_bnb_sigmas(options, infiles)
as_sigmas = read_as_sigmas(options, infiles)
# add first row of each input file to snpinfo list
snpinfo = []
for f in infiles:
snpinfo.append(f.readline().strip().split())
row_count = 0
finished=False
while not finished:
try:
test_snps = []
# parse test SNP and associated info from input file row
for i in range(len(infiles)):
test_snps.append(parse_test_snp(snpinfo[i], options))
# how many allele-specific reads |
ADKosm/Recipes | Recipes/rcps/admin.py | Python | mit | 1,503 | 0.000698 | from django.contrib import admin
# Register your models here.
from rcps.models import *
class IngredientToRecipeInline(admin.TabularInline):
model = Ingredient.recipes.through
verbose_name = 'Ингредиент'
verbose_name_plural = 'Ингредиенты'
class EquipmentInline(admin.TabularInline):
model = Equipment.equipment_recipes.through
verbose_name = 'Инструмент'
verbose_name_plural = 'Инструменты'
class TagInline(admin.TabularInline):
model = Tag.tag_recipes.through
v | erbose_name = 'Тег'
verbose_name_plural = 'Теги'
class RecipeAdmin(admin.ModelAdmin):
model = Recipe
fields = ['recipe_name', 'recipe_link']
inlines = (
IngredientToRecipeInline,
EquipmentInline,
TagInline,
)
class IngredientComponentInAlternativeInline(admin.TabularInline):
model = IngredientAlternative.ingredients.through
verbose_name = 'Ингредиент'
verbose_name_plural = 'Ингредиенты'
class IngredientAlte | rnativeAdmin(admin.ModelAdmin):
model = IngredientAlternative
inlines = (
IngredientComponentInAlternativeInline,
)
admin.site.register(Recipe, RecipeAdmin)
admin.site.register(Ingredient)
admin.site.register(IngredientAlternative, IngredientAlternativeAdmin)
admin.site.register(IngredientCategory)
admin.site.register(Equipment)
admin.site.register(EquipmentCategory)
admin.site.register(IngredientReplacement)
admin.site.register(Tag) |
eviljeff/olympia | src/olympia/devhub/tests/test_models.py | Python | bsd-3-clause | 2,120 | 0 | from olympia import amo
from olympia.addons.models import Addon
from olympia.amo.tests import TestCase
from olympia.devhub.models import BlogPost
from olympia.files.models import File
from olympia.versions.models import Version
class TestVersion(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestVersion, self).setUp()
self.addon = Addon.objects.get(pk=3615)
self.version = Version.objects.get(pk=81551)
self.file = File.objects.get(pk=67442)
def test_version_delete_status_null(self):
self.version.delete()
assert self.addon.versions.count() == 0
assert Addon.objects.get(pk=3615).status == amo.STATUS_NULL
def _extra_version_and_file(self, status):
version = Version.objects.get(pk=81551)
version_two = Version(addon=self.addon,
license=version.license,
version='1.2 | .3')
version_two.save()
file_two = File(status=status, version=version_two)
file_two.save()
return version_two, file_two
def test_version_delete_status_unreviewed(self):
self._extra_version_and_ | file(amo.STATUS_AWAITING_REVIEW)
self.version.delete()
assert self.addon.versions.count() == 1
assert Addon.objects.get(id=3615).status == amo.STATUS_NOMINATED
def test_file_delete_status_null(self):
assert self.addon.versions.count() == 1
self.file.delete()
assert self.addon.versions.count() == 1
assert Addon.objects.get(pk=3615).status == amo.STATUS_NULL
def test_file_delete_status_null_multiple(self):
version_two, file_two = self._extra_version_and_file(amo.STATUS_NULL)
self.file.delete()
assert self.addon.status == amo.STATUS_APPROVED
file_two.delete()
assert self.addon.status == amo.STATUS_NULL
class TestBlogPosts(TestCase):
def test_blog_posts(self):
BlogPost.objects.create(title='hi')
bp = BlogPost.objects.all()
assert bp.count() == 1
assert bp[0].title == "hi"
|
q2apro/graph-padowan | Lib/Settings.py | Python | gpl-2.0 | 25,024 | 0.013827 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.4
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_Settings', [dirname(__file__)])
except ImportError:
import _Settings
return _Settings
if fp is not None:
try:
_mod = imp.load_module('_Settings', fp, pathname, description)
finally:
fp.close()
return _mod
_Settings = swig_import_helper()
del swig_import_helper
else:
import _Settings
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static): |
self.__dict__[name] = value
else:
raise AttributeError | ("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def GetAxes() -> "Graph::TAxes *" :
return _Settings.GetAxes()
GetAxes = _Settings.GetAxes
Radian = _Settings.Radian
Degree = _Settings.Degree
asNone = _Settings.asNone
asCrossed = _Settings.asCrossed
asBoxed = _Settings.asBoxed
lpCustom = _Settings.lpCustom
lpTopRight = _Settings.lpTopRight
lpBottomRight = _Settings.lpBottomRight
lpTopLeft = _Settings.lpTopLeft
lpBottomLeft = _Settings.lpBottomLeft
npCenter = _Settings.npCenter
npBefore = _Settings.npBefore
class TAxis(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TAxis, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TAxis, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_setmethods__["Min"] = _Settings.TAxis_Min_set
__swig_getmethods__["Min"] = _Settings.TAxis_Min_get
if _newclass:Min = _swig_property(_Settings.TAxis_Min_get, _Settings.TAxis_Min_set)
__swig_setmethods__["Max"] = _Settings.TAxis_Max_set
__swig_getmethods__["Max"] = _Settings.TAxis_Max_get
if _newclass:Max = _swig_property(_Settings.TAxis_Max_get, _Settings.TAxis_Max_set)
__swig_setmethods__["LogScl"] = _Settings.TAxis_LogScl_set
__swig_getmethods__["LogScl"] = _Settings.TAxis_LogScl_get
if _newclass:LogScl = _swig_property(_Settings.TAxis_LogScl_get, _Settings.TAxis_LogScl_set)
__swig_setmethods__["MultipleOfPi"] = _Settings.TAxis_MultipleOfPi_set
__swig_getmethods__["MultipleOfPi"] = _Settings.TAxis_MultipleOfPi_get
if _newclass:MultipleOfPi = _swig_property(_Settings.TAxis_MultipleOfPi_get, _Settings.TAxis_MultipleOfPi_set)
__swig_setmethods__["ShowLabel"] = _Settings.TAxis_ShowLabel_set
__swig_getmethods__["ShowLabel"] = _Settings.TAxis_ShowLabel_get
if _newclass:ShowLabel = _swig_property(_Settings.TAxis_ShowLabel_get, _Settings.TAxis_ShowLabel_set)
__swig_setmethods__["ShowNumbers"] = _Settings.TAxis_ShowNumbers_set
__swig_getmethods__["ShowNumbers"] = _Settings.TAxis_ShowNumbers_get
if _newclass:ShowNumbers = _swig_property(_Settings.TAxis_ShowNumbers_get, _Settings.TAxis_ShowNumbers_set)
__swig_setmethods__["ShowTicks"] = _Settings.TAxis_ShowTicks_set
__swig_getmethods__["ShowTicks"] = _Settings.TAxis_ShowTicks_get
if _newclass:ShowTicks = _swig_property(_Settings.TAxis_ShowTicks_get, _Settings.TAxis_ShowTicks_set)
__swig_setmethods__["ShowGrid"] = _Settings.TAxis_ShowGrid_set
__swig_getmethods__["ShowGrid"] = _Settings.TAxis_ShowGrid_get
if _newclass:ShowGrid = _swig_property(_Settings.TAxis_ShowGrid_get, _Settings.TAxis_ShowGrid_set)
__swig_setmethods__["AutoTick"] = _Settings.TAxis_AutoTick_set
__swig_getmethods__["AutoTick"] = _Settings.TAxis_AutoTick_get
if _newclass:AutoTick = _swig_property(_Settings.TAxis_AutoTick_get, _Settings.TAxis_AutoTick_set)
__swig_setmethods__["AutoGrid"] = _Settings.TAxis_AutoGrid_set
__swig_getmethods__["AutoGrid"] = _Settings.TAxis_AutoGrid_get
if _newclass:AutoGrid = _swig_property(_Settings.TAxis_AutoGrid_get, _Settings.TAxis_AutoGrid_set)
__swig_setmethods__["Label"] = _Settings.TAxis_Label_set
__swig_getmethods__["Label"] = _Settings.TAxis_Label_get
if _newclass:Label = _swig_property(_Settings.TAxis_Label_get, _Settings.TAxis_Label_set)
__swig_setmethods__["AxisCross"] = _Settings.TAxis_AxisCross_set
__swig_getmethods__["AxisCross"] = _Settings.TAxis_AxisCross_get
if _newclass:AxisCross = _swig_property(_Settings.TAxis_AxisCross_get, _Settings.TAxis_AxisCross_set)
__swig_setmethods__["TickUnit"] = _Settings.TAxis_TickUnit_set
__swig_getmethods__["TickUnit"] = _Settings.TAxis_TickUnit_get
if _newclass:TickUnit = _swig_property(_Settings.TAxis_TickUnit_get, _Settings.TAxis_TickUnit_set)
__swig_setmethods__["GridUnit"] = _Settings.TAxis_GridUnit_set
__swig_getmethods__["GridUnit"] = _Settings.TAxis_GridUnit_get
if _newclass:GridUnit = _swig_property(_Settings.TAxis_GridUnit_get, _Settings.TAxis_GridUnit_set)
__swig_setmethods__["Visible"] = _Settings.TAxis_Visible_set
__swig_getmethods__["Visible"] = _Settings.TAxis_Visible_get
if _newclass:Visible = _swig_property(_Settings.TAxis_Visible_get, _Settings.TAxis_Visible_set)
__swig_setmethods__["ShowPositiveArrow"] = _Settings.TAxis_ShowPositiveArrow_set
__swig_getmethods__["ShowPositiveArrow"] = _Settings.TAxis_ShowPositiveArrow_get
if _newclass:ShowPositiveArrow = _swig_property(_Settings.TAxis_ShowPositiveArrow_get, _Settings.TAxis_ShowPositiveArrow_set)
__swig_setmethods__["ShowNegativeArrow"] = _Settings.TAxis_ShowNegativeArrow_set
__swig_getmethods__["ShowNegativeArrow"] = _Settings.TAxis_ShowNegativeArrow_get
if _newclass:ShowNegativeArrow = _swig_property(_Settings.TAxis_ShowNegativeArrow_get, _Settings.TAxis_ShowNegativeArrow_set)
__swig_setmethods__["NumberPlacement"] = _Settings.TAxis_NumberPlacement_set
__swig_getmethods__["NumberPlacement"] = _Settings.TAxis_NumberPlacement_get
if _newclass:NumberPlacement = _swig_property(_Settings.TAxis_NumberPlacement_get, _Settings.TAxis_NumberPlacement_set)
TAxis_swigregister = _Settings.TAxis_swigregister
TAxis_swigregister(TAxis)
gsLines = _Settings.gsLines
gsDots = _Settings.gsDots
class TAxes(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TAxes, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TAxes, name)
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_setmethods__["xAxis"] = _Settings.TAxes_xAxis_set
__swig_getmethods__["xAxis"] = _Settings.TAxes_xAxis_get
if _newclass:xAxis = _swig_property(_Settings.TAxes_xAxis_get, _Settings.TAxes_xAxis_set)
__swig_setmethods__["yAxis"] = _Settings.TAxes_yAxis_set
__swig_getmethods__["yAxis"] = _Settings.TAxes_yAxis_get
if _newclass:yAxis = _swig_property(_Settings.TAxes_y |
loopCM/chromium | tools/perf/perf_tools/dom_perf.py | Python | bsd-3-clause | 2,746 | 0.016387 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import math
import os
from telemetry.core import util
from telemetry.page import page_measurement
from telemetry.page import page_set
def _GeometricMean(values):
"""Compute a rounded geometric mean from an array of values."""
if not values:
return None
# To avoid infinite value errors, make sure no value is less than 0.001.
new_values = []
for value in values:
if value > 0.001:
new_values.append(value)
else:
new_values.append(0.001)
# Compute the sum of the log of the values.
log_sum = sum(map(math.log, new_values))
# Raise e to that sum over the number of values.
mean = math.pow(math.e, (log_sum / len(new_values)))
# Return the rounded mean.
return int(round(mean))
SCORE_UNIT = 'score (bigger is better)'
SCORE_TRACE_NAME = 'score'
class DomPerf(page_measurement.PageMeasurement):
def CreatePageSet(self, options):
BASE_PAGE = 'file:///../../../data/dom_perf/run.html?reportInJS=1&run='
return page_set.PageSet.FromDict({
'pages': [
{ 'url': BASE_PAGE + 'Accessors' },
{ 'url': BASE_PAGE + 'CloneNodes' },
{ 'url': BASE_PAGE + 'CreateNodes' },
{ 'url': BASE_PAGE + 'DOMDivWalk' },
{ 'url': BASE_PAGE + 'DOMTable' },
{ 'url': BASE_PAGE | + 'DOMWalk' },
{ 'url': BASE_PAGE + 'Events' },
{ 'url': BASE_PAGE + 'Get+Elements' },
{ 'url': BASE_PAGE + 'GridSort' },
{ 'url': BASE_PAGE + 'Template' }
]
}, os.path.abspath(__file__))
@property
def results_are_the_same_on_every_page(self):
return False
def MeasurePage(self, page, tab, results):
try:
def | _IsDone():
return tab.GetCookieByName('__domperf_finished') == '1'
util.WaitFor(_IsDone, 600, poll_interval=5)
data = json.loads(tab.EvaluateJavaScript('__domperf_result'))
for suite in data['BenchmarkSuites']:
# Skip benchmarks that we didn't actually run this time around.
if len(suite['Benchmarks']) or suite['score']:
results.Add(SCORE_TRACE_NAME, SCORE_UNIT,
suite['score'], suite['name'], 'unimportant')
finally:
tab.EvaluateJavaScript('document.cookie = "__domperf_finished=0"')
def DidRunPageSet(self, tab, results):
# Now give the geometric mean as the total for the combined runs.
scores = []
for result in results.page_results:
scores.append(result[SCORE_TRACE_NAME].output_value)
total = _GeometricMean(scores)
results.AddSummary(SCORE_TRACE_NAME, SCORE_UNIT, total, 'Total')
|
ThiefMaster/indico | indico/migrations/versions/20200402_1113_933665578547_migrate_review_conditions_from_settings.py | Python | mit | 2,739 | 0.002556 | """Migrate review conditions from settings
Revision ID: 933665578547
Revises: 02bf20df06b3
Create Date: 2020-04-02 11:13:58.931020
"""
import json
from collections import defaultdict
from uuid import uuid4
from alembic import context, op
from indico.modules.events.editing.models.editable import EditableType
# revision identifiers, used by Alembic.
revision = '933665578547'
down_revision = '02bf20df06b3'
branch_labels = None
depends_on = None
def upgrade():
if context.is_offline_mode():
raise Exception('This upgrade is only possible in online mode')
conn = op.get_bind()
for type_ in EditableType:
res = conn.execute(
"SELECT event_id, value FROM events.settings WHERE module = 'editing' AND name = %s",
(f'{type_.name}_review_conditions',),
)
for event_id, value in res:
for condition in value:
res2 = conn.execut | e(
| 'INSERT INTO event_editing.review_conditions (type, event_id) VALUES (%s, %s) RETURNING id',
(type_, event_id),
)
revcon_id = res2.fetchone()[0]
for file_type in condition[1]:
conn.execute('''
INSERT INTO event_editing.review_condition_file_types (file_type_id, review_condition_id)
VALUES (%s, %s)
''', (file_type, revcon_id),
)
conn.execute(
"DELETE FROM events.settings WHERE module = 'editing' AND name = %s",
(f'{type_.name}_review_conditions',),
)
def downgrade():
if context.is_offline_mode():
raise Exception('This upgrade is only possible in online mode')
conn = op.get_bind()
for type_ in EditableType:
res = conn.execute('SELECT id, event_id FROM event_editing.review_conditions WHERE type = %s', (type_.value,))
review_conditions = defaultdict(list)
for id, event_id in res:
file_types = conn.execute(
'SELECT file_type_id FROM event_editing.review_condition_file_types WHERE review_condition_id = %s',
(id,),
)
value = [str(uuid4()), [f[0] for f in file_types.fetchall()]]
review_conditions[event_id].append(value)
for key, value in review_conditions.items():
conn.execute(
"INSERT INTO events.settings (event_id, module, name, value) VALUES (%s, 'editing', %s, %s)",
(key, f'{type_.name}_review_conditions', json.dumps(value)),
)
conn.execute('DELETE FROM event_editing.review_condition_file_types')
conn.execute('DELETE FROM event_editing.review_conditions')
|
CS2014/USM | usm/autoslug/settings.py | Python | mit | 2,051 | 0.000488 | # coding: utf-8
#
# Copyright (c) 2008—2014 Andy Mikhailenko
#
# This file is part of django-autoslug.
#
# django-autoslug is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README for copying conditions.
#
"""
Django settings that affect django-autoslug:
`AUTOSLUG_SLUGIFY_FUNCTION`
Allows to define a custom slugifying function.
The function can be repsesented as string or callable, e.g.::
# custom function, path as string:
AUTOSLUG_SLUGIFY_FUN | CTION = 'some_app.slugify_func'
# custom function, callable:
AUTOSLUG_SLUGIFY_FUNCTION = some_app.slugify_func
# custom function, defined inline:
AUTOSLUG_SLUGIFY_FUNCTION = lambda slug: 'can i haz %s?' % slug
If no value is given, default value is used.
Default value is one of these depen | ding on availability in given order:
* `unidecode.unidecode()` if Unidecode_ is available;
* `pytils.translit.slugify()` if pytils_ is available;
* `django.template.defaultfilters.slugify()` bundled with Django.
django-autoslug also ships a couple of slugify functions that use
the translitcodec_ Python library, e.g.::
# using as many characters as needed to make a natural replacement
AUTOSLUG_SLUGIFY_FUNCTION = 'autoslug.utils.translit_long'
# using the minimum number of characters to make a replacement
AUTOSLUG_SLUGIFY_FUNCTION = 'autoslug.utils.translit_short'
# only performing single character replacements
AUTOSLUG_SLUGIFY_FUNCTION = 'autoslug.utils.translit_one'
.. _Unidecode: http://pypi.python.org/pypi/Unidecode
.. _pytils: http://pypi.python.org/pypi/pytils
.. _translitcodec: http://pypi.python.org/pypi/translitcodec
"""
from django.conf import settings
from django.core.urlresolvers import get_callable
# use custom slugifying function if any
slugify_function_path = getattr(settings, 'AUTOSLUG_SLUGIFY_FUNCTION', 'autoslug.utils.slugify')
slugify = get_callable(slugify_function_path)
|
ellipsis14/dolfin-adjoint | tests_dolfin/test.py | Python | lgpl-3.0 | 3,724 | 0.011547 | #!/usr/bin/env python
import os, os.path
import sys
import subprocess
import multiprocessing
import time
from optparse import OptionParser
test_cmds = {'tlm_simple': 'mpirun -n 2 python tlm_simple.py',
'svd_simple': 'mpirun -n 2 python svd_simple.py',
'gst_mass': 'mpirun -n 2 python gst_mass.py',
'hessian_eps': 'mpirun -n 2 python hessian_eps.py',
'optimization_scipy': 'mpirun -n 2 python optimization_scipy.py',
'optimization_checkpointing': 'python optimization_checkpointing.py',
'optimal_control_mms': 'mpirun -n 2 python optimal_control_mms.py',
'preassembly_efficiency': 'mpirun -n 1 python preassembly_efficiency.py --ignore; mpirun -n 1 python preassembly_efficiency.py',
'differentiability-dg-upwind': None,
'differentiability-stokes': None,
'checkpoint_online': None,
'changing_vector': None,
'matrix_free_burgers': None,
'matrix_free_heat': None,
'matrix_free_simple': None,
'ode_tentusscher': None,
'svd_burgers': None,
'function_assigner': None,
'mantle_convection': None}
parser = OptionParser()
parser.add_option("-n", type="int", dest="num_procs", default = 1, help = "To run on N cores, use -n N; to use all processors available, run test.py -n 0.")
parser.add_opt | ion("-t", type="string", dest="test_name", help = "To run one specific test, use -t TESTNAME. By default all test are run.")
parser.add_option("-s", dest="short_only", default = False, action="store_true", help = "To run the short tests only, use -s. By default all test are run.")
parser.add_option("--timings", dest="timings", default=False, action="store_true", help = "Print timings of tests.")
(options, args) = parser.parse_args(sys.argv)
if options.num_procs <= 0:
options.num_procs = None
b | asedir = os.path.dirname(os.path.abspath(sys.argv[0]))
subdirs = [x for x in os.listdir(basedir) if os.path.isdir(os.path.join(basedir, x))]
if options.test_name:
if not options.test_name in subdirs:
print "Specified test not found."
sys.exit(1)
else:
subdirs = [options.test_name]
long_tests = ["viscoelasticity", "cahn_hilliard", "optimization_scipy", "svd_burgers_perturb", "supg", "mpec"] # special case the very long tests for speed
for test in long_tests:
subdirs.remove(test)
# Keep path variables (for buildbot's sake for instance)
orig_pythonpath = os.getenv('PYTHONPATH', '')
pythonpath = os.pathsep.join([os.path.abspath(os.path.join(basedir, os.path.pardir)), orig_pythonpath])
os.putenv('PYTHONPATH', pythonpath)
timings = {}
def f(subdir):
test_cmd = test_cmds.get(subdir, 'python %s.py' % subdir)
if test_cmd is not None:
print "--------------------------------------------------------"
print "Running %s " % subdir
print "--------------------------------------------------------"
start_time = time.time()
handle = subprocess.Popen(test_cmd, shell=True, cwd=os.path.join(basedir, subdir))
exit = handle.wait()
end_time = time.time()
timings[subdir] = end_time - start_time
if exit != 0:
print "subdir: ", subdir
print "exit: ", exit
return subdir
else:
return None
tests = sorted(subdirs)
if not options.short_only:
tests = long_tests + tests
pool = multiprocessing.Pool(options.num_procs)
fails = pool.map(f, tests)
# Remove Nones
fails = [fail for fail in fails if fail is not None]
if options.timings:
for subdir in sorted(timings, key=timings.get, reverse=True):
print "%s : %s s" % (subdir, timings[subdir])
if len(fails) > 0:
print "Failures: ", set(fails)
sys.exit(1)
|
wdmchaft/taskcoach | taskcoachlib/gui/splash.py | Python | gpl-3.0 | 1,689 | 0.003552 | '''
Task Coach - Your friendly task manager
Copyright (C) 2004-2010 Task | Coach developers <developers@taskcoach.org>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later ver | sion.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import wx
from taskcoachlib import i18n
try:
import icons
except ImportError: # pragma: no cover
print "ERROR: couldn't import icons.py."
print 'You need to generate the icons file.'
print 'Run "make prepare" in the Task Coach root folder.'
import sys
sys.exit(1)
class SplashScreen(wx.SplashScreen):
def __init__(self):
splash = icons.catalog['splash']
if i18n.currentLanguageIsRightToLeft():
# RTL languages cause the bitmap to be mirrored too, but because
# the splash image is not internationalized, we have to mirror it
# (back). Unfortunately using SetLayoutDirection() on the
# SplashWindow doesn't work.
bitmap = wx.BitmapFromImage(splash.getImage().Mirror())
else:
bitmap = splash.getBitmap()
super(SplashScreen, self).__init__(bitmap,
wx.SPLASH_CENTRE_ON_SCREEN|wx.SPLASH_TIMEOUT, 4000, None, -1)
|
GunnerJnr/_CodeInstitute | Stream-3/Full-Stack-Development/4.Hello-Django-Administration/4.-Wire-Up-A-Model-To-A-Template/challenge_solution/QuoteOfTheDay/Quotes_app/migrations/0001_initial.py | Python | mit | 702 | 0.001425 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-14 11:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name= | 'Quotes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quoter_first_name', models.CharField(max_length=255)),
('quoter_last_name', models.CharField(max_length=255)),
| ('quote_text', models.CharField(max_length=255)),
],
),
]
|
sameerparekh/pants | src/python/pants/engine/round_manager.py | Python | apache-2.0 | 2,387 | 0.009217 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from collections import defaultdict, namedtuple
from pants.goal.goal import Goal
class ProducerInfo(namedtuple('ProducerInfo', ['product_type', 'task_type', 'goal'])):
| """Describes the producer of a given product type."""
class RoundManager(object):
class MissingProductError(KeyError):
"""Indicates a required product type is provided by non-one."""
@staticmethod
def _index_products():
producer_info_by_product_typ | e = defaultdict(set)
for goal in Goal.all():
for task_type in goal.task_types():
for product_type in task_type.product_types():
producer_info = ProducerInfo(product_type, task_type, goal)
producer_info_by_product_type[product_type].add(producer_info)
return producer_info_by_product_type
def __init__(self, context):
self._dependencies = set()
self._context = context
self._producer_infos_by_product_type = None
def require(self, product_type):
"""Schedules the tasks that produce product_type to be executed before the requesting task."""
self._dependencies.add(product_type)
self._context.products.require(product_type)
def require_data(self, product_type):
"""Schedules the tasks that produce product_type to be executed before the requesting task."""
self._dependencies.add(product_type)
self._context.products.require_data(product_type)
def get_dependencies(self):
"""Returns the set of data dependencies as producer infos corresponding to data requirements."""
producer_infos = set()
for product_type in self._dependencies:
producer_infos.update(self._get_producer_infos_by_product_type(product_type))
return producer_infos
def _get_producer_infos_by_product_type(self, product_type):
if self._producer_infos_by_product_type is None:
self._producer_infos_by_product_type = self._index_products()
producer_infos = self._producer_infos_by_product_type[product_type]
if not producer_infos:
raise self.MissingProductError("No producers registered for '{0}'".format(product_type))
return producer_infos
|
lmazuel/azure-sdk-for-python | azure-batch/azure/batch/models/pool_upgrade_os_options.py | Python | mit | 3,129 | 0.00032 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class PoolUpgradeOsOptions(Model):
"""Additional parameters for upgrade_os operation.
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
:param if_match: An ETag value asso | ciated with the version of the resource
known to the client. The operation will be performed only if the
resource's current ETag on the service exactly matches the value specified
by the client.
:type if_match: str
:param if_none_match: An ETag value associated with the version of the
resource known to the client. The operation will be performed only if the
resource's current ETag on the service does not match the value specified
by the client.
| :type if_none_match: str
:param if_modified_since: A timestamp indicating the last modified time of
the resource known to the client. The operation will be performed only if
the resource on the service has been modified since the specified time.
:type if_modified_since: datetime
:param if_unmodified_since: A timestamp indicating the last modified time
of the resource known to the client. The operation will be performed only
if the resource on the service has not been modified since the specified
time.
:type if_unmodified_since: datetime
"""
def __init__(self, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None):
super(PoolUpgradeOsOptions, self).__init__()
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
self.if_match = if_match
self.if_none_match = if_none_match
self.if_modified_since = if_modified_since
self.if_unmodified_since = if_unmodified_since
|
leyondlee/HoneyPy-Docker | HoneyPy-0.6.2/plugins/FTPUnix/FTPUnix.py | Python | gpl-3.0 | 2,806 | 0.029223 | from twisted.internet import protocol
from twisted.python import log
import uuid
import re
class FTPUnix(protocol.Protocol):
localhost = None
remote_host = None
session = None
### START CUSTOM VARIABLES ###############################################################
username = None
UNAUTH, INAUTH = range(2)
state = UNAUTH
##########################################################################################
# handle events
def connectionMade(self):
self.connect()
### START CUSTOM CODE ####################################################################
self.tx('220 (vsFTPd 3.0.2)')
##########################################################################################
def dataReceived(self, data):
self.rx(data)
### START CUSTOM CODE ####################################################################
cmd, args = re.match(r'(\S+)\s*(.*)$', data.rstrip()).groups()
cmd = cmd.upper()
if cmd == 'USER':
if self.state != self.UNAUTH:
self.tx('530 Please login with USER and PASS.')
else:
self.username = args
self.state = self.INAUTH
self.tx('331 Please specify the password.')
elif cmd == 'PASS':
if self.state != self.INAUTH:
self.tx('503 Login with USER first.')
else:
self.state = self.UNAUTH
self.tx('230 Login successful.\n')
self.tx('227 Entering Passive Mode')
else:
self.tx('451 Requested action aborted. Local error in processing.')
#############################################################################
### START CUSTOM FUNCTIONS ###################################################################
##############################################################################################
def connect(self):
self.local_host = self.transport.getHost()
self.remote_host = self.transport.g | etPeer()
self.session = uuid.uuid1()
log.msg('%s %s CONNECT %s %s %s %s %s' % (self.session, self.remote_host.type, self.local_host.host, self.local_host.port, self.factory.name, self.remote_host.host, self.remote_host.port))
def clientConnectionLost(self):
sel | f.transport.loseConnection()
def tx(self, data):
log.msg('%s %s TX %s %s %s %s %s %s' % (self.session, self.remote_host.type, self.local_host.host, self.local_host.port, self.factory.name, self.remote_host.host, self.remote_host.port, data.encode("hex")))
self.transport.write(data + '\r\r\n')
def rx(self, data):
log.msg('%s %s RX %s %s %s %s %s %s' % (self.session, self.remote_host.type, self.local_host.host, self.local_host.port, self.factory.name, self.remote_host.host, self.remote_host.port, data.encode("hex")))
class pluginFactory(protocol.Factory):
protocol = FTPUnix ### Set protocol to custom protocol class name
def __init__(self, name=None):
self.name = name or 'HoneyPy' |
30loops/nova | nova/tests/test_compute.py | Python | apache-2.0 | 59,837 | 0.00137 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Compute
"""
from nova import compute
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import rpc
from nova import test
from nova import utils
from nova.compute import instance_types
from nova.compute import manager as compute_manager
from nova.compute import power_state
from nova.compute import vm_states
from nova.db.sqlalchemy import models
from nova.image import fake as fake_image
from nova.notifier import test_notifier
from nova.tests import fake_network
LOG = logging.getLogger('nova.tests.compute')
FLAGS = flags.FLAGS
flags.DECLARE('stub_network', 'nova.compute.manager')
flags.DECLARE('live_migration_retry_count', 'nova.compute.manager')
class FakeTime(object):
def __init__(self):
self.counter = 0
def sleep(self, t):
self.counter += t
def nop_report_driver_status(self):
pass
class ComputeTestCase(test.TestCase):
"""Test case for compute"""
def setUp(self):
super(ComputeTestCase, self).setUp()
self.flags(connection_type='fake',
stub_network=True,
notification_driver='nova.notifier.test_notifier',
network_manager='nova.network.manager.FlatManager')
self.compute = utils.import_object(FLAGS.compute_manager)
self.compute_api = compute.API()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
test_notifier | .NOTIFICATIONS = []
def fake_show(meh, context, id):
return {'id': 1, 'properties': {'kernel_id': 1, 'ramdisk_id': 1}}
self.stubs.Set(fake_image._FakeImageService, 'show', fake_show)
def _create_instance(self, params=None):
"""Create a test instance"""
| if not params:
params = {}
inst = {}
inst['image_ref'] = 1
inst['reservation_id'] = 'r-fakeres'
inst['launch_time'] = '10'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
type_id = instance_types.get_instance_type_by_name('m1.tiny')['id']
inst['instance_type_id'] = type_id
inst['ami_launch_index'] = 0
inst.update(params)
return db.instance_create(self.context, inst)['id']
def _create_instance_type(self, params=None):
"""Create a test instance"""
if not params:
params = {}
context = self.context.elevated()
inst = {}
inst['name'] = 'm1.small'
inst['memory_mb'] = '1024'
inst['vcpus'] = '1'
inst['local_gb'] = '20'
inst['flavorid'] = '1'
inst['swap'] = '2048'
inst['rxtx_quota'] = 100
inst['rxtx_cap'] = 200
inst.update(params)
return db.instance_type_create(context, inst)['id']
def _create_group(self):
values = {'name': 'testgroup',
'description': 'testgroup',
'user_id': self.user_id,
'project_id': self.project_id}
return db.security_group_create(self.context, values)
def _get_dummy_instance(self):
"""Get mock-return-value instance object
Use this when any testcase executed later than test_run_terminate
"""
vol1 = models.Volume()
vol1['id'] = 1
vol2 = models.Volume()
vol2['id'] = 2
instance_ref = models.Instance()
instance_ref['id'] = 1
instance_ref['volumes'] = [vol1, vol2]
instance_ref['hostname'] = 'hostname-1'
instance_ref['host'] = 'dummy'
return instance_ref
def test_create_instance_defaults_display_name(self):
"""Verify that an instance cannot be created without a display_name."""
cases = [dict(), dict(display_name=None)]
for instance in cases:
ref = self.compute_api.create(self.context,
instance_types.get_default_instance_type(), None, **instance)
try:
self.assertNotEqual(ref[0]['display_name'], None)
finally:
db.instance_destroy(self.context, ref[0]['id'])
def test_create_instance_associates_security_groups(self):
"""Make sure create associates security groups"""
group = self._create_group()
ref = self.compute_api.create(
self.context,
instance_type=instance_types.get_default_instance_type(),
image_href=None,
security_group=['testgroup'])
try:
self.assertEqual(len(db.security_group_get_by_instance(
self.context, ref[0]['id'])), 1)
group = db.security_group_get(self.context, group['id'])
self.assert_(len(group.instances) == 1)
finally:
db.security_group_destroy(self.context, group['id'])
db.instance_destroy(self.context, ref[0]['id'])
def test_create_instance_with_invalid_security_group_raises(self):
instance_type = instance_types.get_default_instance_type()
pre_build_len = len(db.instance_get_all(context.get_admin_context()))
self.assertRaises(exception.SecurityGroupNotFoundForProject,
self.compute_api.create,
self.context,
instance_type=instance_type,
image_href=None,
security_group=['this_is_a_fake_sec_group'])
self.assertEqual(pre_build_len,
len(db.instance_get_all(context.get_admin_context())))
def test_create_instance_with_img_ref_associates_config_drive(self):
"""Make sure create associates a config drive."""
instance_id = self._create_instance(params={'config_drive': '1234', })
try:
self.compute.run_instance(self.context, instance_id)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertTrue(instance.config_drive)
finally:
db.instance_destroy(self.context, instance_id)
def test_create_instance_associates_config_drive(self):
"""Make sure create associates a config drive."""
instance_id = self._create_instance(params={'config_drive': True, })
try:
self.compute.run_instance(self.context, instance_id)
instances = db.instance_get_all(context.get_admin_context())
instance = instances[0]
self.assertTrue(instance.config_drive)
finally:
db.instance_destroy(self.context, instance_id)
def test_default_hostname_generator(self):
cases = [(None, 'server-1'), ('Hello, Server!', 'hello-server'),
('<}\x1fh\x10e\x08l\x02l\x05o\x12!{>', 'hello'),
('hello_server', 'hello-server')]
for display_name, hostname in cases:
ref = self.compute_api.create(self.context,
instance_types.get_default_instance_type(), None,
display_name=display_name)
try:
self.assertEqual(ref[0]['hostname'], hostname)
finally:
db.instance_destroy(self.context, ref[0]['id'])
def test_destroy_instance_disassociates_sec |
pam-bot/SMSBeds | lib/jinja/constants.py | Python | gpl-2.0 | 1,622 | 0 | # -*- coding: utf-8 -*-
"""
jinja.constants
~~~~~~~~~~~~~~~
Various constants.
:copyright: 2007 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
#: list of lorem ipsum words used by the lipsum() helper function
LOREM_IPSUM_WORDS = u'''\
a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
auctor augue bibendum blandit class commodo condimentum congue consectetuer
co | nsequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse ha | c
hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
penatibus per pharetra phasellus placerat platea porta porttitor posuere
potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
viverra volutpat vulputate'''
|
c3cashdesk/c6sh | src/tests/troubleshooter/test_troubleshooter_views_ping.py | Python | agpl-3.0 | 624 | 0 | import pytest
from postix.core.models import Ping
from ..factories import cashdesk_factory, ping_factory
@pytest.mark.django_db
def test_troubleshooter_ping_view(troubleshooter_client):
[ping_factory(ponged=(index % | 3 != 0)) for index in range(10)]
desk = cashdesk_factory()
assert Ping.objects.count() == 10
response = troubleshooter_client.get('/troubleshooter/ping/')
assert response.status_code == 200
response = troubleshooter_client.post(
'/troubleshooter/ping/', {'cashdesk': desk.pk}, follow=True
)
assert response.status_code == 200
assert Ping.objec | ts.count() == 11
|
apacha/MusicSymbolClassifier | ModelTrainer/reporting/TrainingHistoryPlotter.py | Python | mit | 3,064 | 0.006854 | import numpy
from tensorflow.keras.callbacks import History
from matplotlib import pyplot
class TrainingHistoryPlotter:
@staticmethod
def plot_history(history: History, file_name: str, show_plot: bool = False):
epoch_list = numpy.add(history.epoch, 1) # Add 1 so it starts with epoch 1 instead of 0
fig = pyplot.figure(1)
# fig.suptitle('TRAINNING vs VALIDATION', fontsize=14, fontweight='bold')
# Regular plot for classification only
if "val_accuracy" in history.history:
TrainingHistoryPlotter.add_subplot(epoch_list, fig, history, 211, "Loss", "loss", "Training loss",
"val_loss",
"Validation loss", "upper right")
TrainingHistoryPlotter.add_subplot(epoch_list, fig, history, 212, "Accuracy", "accuracy", "Training accuracy",
"val_accuracy",
"Validation accuracy", "lower right")
else:
TrainingHistoryPlotter.add_subplot(epoch_list, fig, history, 221, "Classification Loss",
"output_class_loss",
"Training loss", "val_output_class_loss",
"Validation loss", "upper right")
TrainingHistoryPlotter.add_subplot(epoch_list, fig, history, 222, "Classification Accuracy",
"output_class_acc", "Training accuracy", "val_output_class_acc",
"Validation accuracy", "lower right")
TrainingHistoryPlotter.add_subplot(epoch_list, fig, history, 223, "Bounding-Box Loss",
"output_bounding_box_loss", "Training loss",
"v | al_output_bounding_box_loss", "Validation loss", "upper right")
TrainingHistoryPlotter.add_subplot(epoch_list, fig, history, 224, "Bounding-Box Accuracy",
"output_bounding_box_acc", "Training accuracy",
| "val_output_bounding_box_acc", "Validation accuracy", "lower right")
# pyplot.subplots_adjust(wspace=0.1)
pyplot.tight_layout()
pyplot.savefig(file_name)
if show_plot:
pyplot.show()
@staticmethod
def add_subplot(epoch_list, fig, history, subplot_region, y_axis_label,
history_parameter1, parameter1_label,
history_parameter2, parameter2_label, legend_position):
fig.add_subplot(subplot_region)
pyplot.xlabel("Epoch")
pyplot.ylabel(y_axis_label)
pyplot.plot(epoch_list, history.history[history_parameter1], '--', linewidth=2, label=parameter1_label)
pyplot.plot(epoch_list, history.history[history_parameter2], label=parameter2_label)
pyplot.legend(loc=legend_position)
|
neuropoly/spinalcordtoolbox | dev/denoise/ornlm/wavelet/cshift3D.py | Python | mit | 224 | 0.0625 | import nu | mpy as np
def cshift3D(x, m, d):
| s=x.shape
idx=(np.array(range(s[d]))+(s[d]-m%s[d]))%s[d]
if d==0:
return x[idx,:,:]
elif d==1:
return x[:,idx,:]
else:
return x[:,:,idx];
|
tomjelinek/pcs | pcs_test/tier0/lib/communication/test_qdevice_net.py | Python | gpl-2.0 | 690 | 0 | from uni | ttest import TestCase
class GetCaCert(TestCase):
"""
tested in:
pcs_test.tier0.lib.commands.test_quorum.AddDeviceNetTest
"""
class ClientSetup(TestCase):
"""
tested in:
pcs_test.tier0.lib.commands.test_quorum.AddDeviceNetTest
"""
class SignCertificate(TestCase):
"""
tested in: |
pcs_test.tier0.lib.commands.test_quorum.AddDeviceNetTest
"""
class ClientImportCertificateAndKey(TestCase):
"""
tested in:
pcs_test.tier0.lib.commands.test_quorum.AddDeviceNetTest
"""
class ClientDestroy(TestCase):
"""
tested in:
pcs_test.tier0.lib.commands.test_quorum.RemoveDeviceNetTest
"""
|
armikhael/software-center | softwarecenter/backend/channel.py | Python | gpl-3.0 | 12,773 | 0.003523 | # Copyright (C) 2010 Canonical
#
# Authors:
# Gary Lasker
# Michael Vogt
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import xapian
from gettext import gettext as _
from softwarecenter.distro import get_distro
from softwarecenter.enums import (SortMethods,
Icons,
ViewPages,
)
LOG = logging.getLogger(__name__)
class ChannelsManager(object):
def __init__(self, db, **kwargs):
self.distro = get_distro()
self.db = db
# public
@property
def channels(self):
return self._get_channels_from_db()
@property
def channels_installed_only(self):
return self._get_channels_from_db(True)
@classmethod
def channel_available(kls, channelname):
pass
# private
def _get_channels_from_db(self, installed_only=False):
"""
(internal) implements 'channels()' and 'channels_installed_only()' properties
"""
distro_channel_name = self.distro.get_distro_channel_name()
# gather the set of software channels and order them
other_channel_list = []
cached_origins = []
for channel_iter in self.db.xapiandb.allterms("XOL"):
if len(channel_iter.term) == 3:
continue
channel_name = channel_iter.term[3:]
channel_origin = ""
# get origin information for this channel
m = self.db.xapiandb.postlist_begin(channel_iter.term)
doc = self.db.xapiandb.get_document(m.get_docid())
for term_iter in doc.termlist():
if term_iter.term.startswith("XOO") and len(term_iter.term) > 3:
channel_origin = term_iter.term[3:]
break
LOG.debug("channel_name: %s" % channel_name)
LOG.debug("channel_origin: %s" % channel_origin)
if channel_origin not in cached_origins:
other_channel_list.append((channel_name, channel_origin))
cached_origins.append(channel_origin)
dist | _channel = None
other_channels = []
unknown_channel = []
local_channel = None
for (channel_name, channel_origin) in other_channel_list:
if not channel_name:
unknown_channel.append(SoftwareChannel(channel_name,
channel_origin,
| None,
installed_only=installed_only))
elif channel_name == distro_channel_name:
dist_channel = (SoftwareChannel(distro_channel_name,
channel_origin,
None,
installed_only=installed_only))
elif channel_name == "notdownloadable":
if installed_only:
local_channel = SoftwareChannel(channel_name,
None,
None,
installed_only=installed_only)
else:
other_channels.append(SoftwareChannel(channel_name,
channel_origin,
None,
installed_only=installed_only))
# set them in order
channels = []
if dist_channel is not None:
channels.append(dist_channel)
channels.extend(other_channels)
channels.extend(unknown_channel)
if local_channel is not None:
channels.append(local_channel)
for channel in channels:
if installed_only:
channel._channel_view_id = ViewPages.INSTALLED
else:
channel._channel_view_id = ViewPages.AVAILABLE
return channels
class SoftwareChannel(object):
"""
class to represent a software channel
"""
ICON_SIZE = 24
def __init__(self, channel_name, channel_origin, channel_component,
source_entry=None, installed_only=False,
channel_icon=None, channel_query=None,
channel_sort_mode=SortMethods.BY_ALPHABET):
"""
configure the software channel object based on channel name,
origin, and component (the latter for detecting the partner
channel)
"""
self._channel_name = channel_name
self._channel_origin = channel_origin
self._channel_component = channel_component
self._channel_color = None
self._channel_view_id = None
self.installed_only = installed_only
self._channel_sort_mode = channel_sort_mode
# distro specific stuff
self.distro = get_distro()
# configure the channel
self._channel_display_name = self._get_display_name_for_channel(channel_name, channel_component)
if channel_icon is None:
self._channel_icon = self._get_icon_for_channel(channel_name, channel_origin, channel_component)
else:
self._channel_icon = channel_icon
if channel_query is None:
self._channel_query = self._get_channel_query_for_channel(channel_name, channel_origin, channel_component)
else:
self._channel_query = channel_query
# a sources.list entry attached to the channel (this is currently
# only used for not-yet-enabled channels)
self._source_entry = source_entry
# when the channel needs to be added to the systems sources.list
self.needs_adding = False
@property
def name(self):
"""
return the channel name as represented in the xapian database
"""
return self._channel_name
@property
def origin(self):
"""
return the channel origin as represented in the xapian database
"""
return self._channel_origin
@property
def component(self):
"""
return the channel component as represented in the xapian database
"""
return self._channel_component
@property
def display_name(self):
"""
return the display name for the corresponding channel for use in the UI
"""
return self._channel_display_name
@property
def icon(self):
"""
return the icon that corresponds to each channel based
on the channel name, its origin string or its component
"""
return self._channel_icon
@property
def query(self):
"""
return the xapian query to be used with this software channel
"""
return self._channel_query
@property
def sort_mode(self):
"""
return the sort mode for this software channel
"""
return self._channel_sort_mode
# TODO: implement __cmp__ so that sort for channels is encapsulated
# here as well
def _get_display_name_for_channel(self, channel_name, channel_component):
if channel_component == "partner":
channel_display_name = _("Canonical Partners")
elif not channel_name:
channel_display_name = _("Unknown")
elif channel_name == self. |
tancredi/python-console-snake | snake/config.py | Python | mit | 282 | 0 |
frame_len = .1
keys = {
'DOWN': 0x42,
'LEFT': 0x44,
'RIGHT': | 0x43,
'UP': 0x41,
'Q': 0x71,
'ENTER': 0x0a,
}
apple_domain = 1000
food_values = {
'apple': 3,
}
game_sizes = {
's': (25, 20),
'm': (50, 40),
'l': (80, 40),
}
|
initial_size = 4
|
encukou/qdex | qdex/delegate.py | Python | mit | 2,413 | 0.00083 | #!/usr/bin/env python
# Encoding: UTF-8
"""Part of qdex: a Pokédex using PySide and veekun's pokedex library.
A query models for pokémon
"""
from PySide import QtGui, QtCore
Qt = QtCore.Qt
class PokemonDelegate(QtGui.QStyledItemDelegate):
"""Delegate for a Pokémon
Shows summary information when the group of forms is collapsed.
"""
def __init__(self, view):
super(PokemonDelegate, self).__init__()
self.view = view
def indexToShow(self, index, summary=None):
"""Get the index to show instead of this one
summary can be True to show the all-forms summary information (if
the row is expandable at all), False to show the notmal data,
or None to choose based on the state of the view
It's not too easy to hijack the QItemDelegate pipeling with custom
data, so we hack around this by shifting the children: the first form
is stored in in row -1 (which isn't accessible from normal views).
The parent switches to show this special child when it is expanded.
"""
if summary is False:
return index
parent = index.sibling(index.row(), 0)
hasChildren = index.model().hasChildren(parent)
if summary is None:
summary = hasChildren and not self.view.isExpanded(parent)
if not summary and hasChildren:
return parent.child(-1, index.column())
else:
return index
def paint(self, painter, option, index):
index = self.indexToShow(index)
super(PokemonDelegate, self).paint(painter, option, index)
def sizeHint(self, option, index):
hint = super(PokemonDelegate, self).sizeHint
summaryHint = hint(option, self.indexToShow(index, True))
return hint(option, index).expandedTo(summaryHint)
class PokemonNameDelegate(PokemonDelegate):
"""Delegate for the Pokémon icon/name column"""
| def sizeHint(self, option, index):
option.decorationSize = QtCore.QSize(0, 0)
self.view.model()._hack_small_icons = True
hint = super(PokemonNameDelegate, self).sizeHint(option, index)
self.view.model()._hack_small_icons = False
return hint
def paint(self, painter, option, index):
| option.decorationAlignment = Qt.AlignBottom | Qt.AlignHCenter
super(PokemonNameDelegate, self).paint(painter, option, index)
|
projectscara2014/scara | working_directory/setup/block_position_setup_main.py | Python | mit | 5,886 | 0.040605 | #------------------------------------ SETUP ----------------------------------------------
import sys
WORKING_DIRECTORY = ''
SPLITTING_CHARACTER = ''
if sys.platform.startswith('win') :
SPLITTING_CHARACTER = '\{}'.format('')
elif sys.platform.startswith('darwin') :
SPLITTING_CHARACTER = '/'
def setup() :
def locate_working_directory() :
working_directory = ''
for element in __file__.split(SPLITTING_CHARACTER)[:-2] :
working_directory += element + '{}'.format(SPLITTING_CHARACTER)
return working_directory
global WORKING_DIRECTORY
WORKING_DIRECTORY = locate_working_directory()
print('working_directory --> ',WORKING_DIRECTORY)
sys.path.append(WORKING_DIRECTORY)
setup()
#---------------------- DYNAMIXEL AND ARDUINO CONTROLLING FUNCITONS -----------------------------
from comm import comm_init
if(comm_init.arduino1_connected):
arduino1_serial_object = comm_init.arduino1_serial_object
from comm import arduino1
arduino1.init(arduino1_serial_object)
if(comm_init.arduino1_connected and comm_init.dynamixel_connected):
arduino1.dynamixel_initialization1()
from comm import dynamixel
arduino1.dynamixel_initialization2()
else:
pass
# get dummy dynamixel object here
if(comm_init.arduino2_connected):
arduino2_serial_object = comm_init.arduino2_serial_object
from comm import arduino2
arduino2.init(arduino2_serial_object)
from core import lookup
from setup import this_to_that
import time
def print_dynamixel_position() :
print("dynamixel 1 position --> {0}".format(dynamixel.GO_TO_DYNA_1_POS))
print("dynamixel 2 position --> {0}".format(dynamixel.GO_TO_DYNA_2_POS))
def move_dynamixel(dynamixel_1_movement=0,dynamixel_2_movement=0) :
dynamixel.GO_TO_DYNA_1_POS += dynamixel_1_movement
dynamixel.GO_TO_DYNA_2_POS += dynamixel_2_movement
dynamixel.dyna_move()
def move_servo(servo_movement) :
arduino2.GO_TO_SERVO_POS += servo_movement
arduino2.rotate()
def print_everything() :
text.insert('end','dynamixel_movement_per_command --> {0}\nservo_movement_per_command --> {1}\n'.\
format(dynamixel_movement_per_command,servo_movement_per_command))
text.insert('end','dynamixel1 --> {0}\ndynamixel2 --> {1}\n'.\
format(dynamixel.GO_TO_DYNA_1_POS,dynamixel.GO_TO_DYNA_2_POS))
text.insert('end','servo_hand --> {0}\n'.format(arduino2.GO_TO_SERVO_POS))
text.insert('end','\n')
#---------------------- TKINTER RELATED FUNCTIONS ---------------------------------------------
import Tkinter as tk
dynamixel_movement_per_command = 1
servo_movement_per_command = 1
FLAG = 0
def do_nothing(event) :
pass
def on_key_press(event):
global DYNA_POS_1,DYNA_POS_2
dynamixel_movement_per_command = 1
servo_movement_per_command = 1
def char_to_int(character) :
for i in range(256) :
if chr(i) == character :
return i
return 256
def move(keypress) :
global dynamixel_movement_per_command,servo_movement_per_command
global entire_block_position_list
if keypress == 119 :
# "w" pressed
move_dynamixel(dynamixel_1_movement = -1 * dynamixel_movement_per_command)
elif(keypress == 113) :
# "q" pressed
move_dynamixel(dynamixel_1_movement = +1 * dynamixel_movement_per_command)
elif keypress == 115 :
# "s" pressed
move_dynamixel(dynamixel_2_movement = -1 * dynamixel_movement_per_command)
elif(keypress == 97) :
# "a" pressed
move_dynamixel(dynamixel_2_movement = +1 * dynamixel_movement_per_command)
elif keypress == 120 :
# "x" pressed
if arduino2.GO_TO_SERVO_POS > 0 :
move_servo(-1 * servo_movement_per_command)
elif keypress == 122 :
# "z" pressed
if arduino2.GO_TO_SERVO_POS < 180 :
move_servo(+1 * servo_movement_per_command)
elif keypress == 101 :
# "e" pressed
dynamixel_movement_per_command *= 3
elif keypress == 100 :
# "d" pressed
if dynamixel_movement_per_command > 1:
dynamixel_movement_per_command /= 3
elif keypress == 114 :
# "r" pressed
if servo_movement_per_command < 180 :
servo_movement_per_command *= 5
elif keypress == 102 :
# "f" pressed
if servo_movement_per_command > 1 :
servo_movement_per_command /= 5
elif keypress == 112 :
# "p" pressed
arduino2.pick()
elif keypress == 108 :
# "l" pressed
| arduino2.place()
elif keypress == 116 :
# "t" pressed
arduino2.pick()
entire_block_ | position_list = this_to_that.calculate_entire_block_position_list(dynamixel.GO_TO_DYNA_1_POS,\
dynamixel.GO_TO_DYNA_2_POS,arduino2.GO_TO_SERVO_POS)
print(entire_block_position_list)
if(dynamixel.GO_TO_DYNA_2_POS>0):
dynamixel.GO_TO_DYNA_1_POS = entire_block_position_list[0]
dynamixel.GO_TO_DYNA_2_POS = entire_block_position_list[1]
arduino2.GO_TO_SERVO_POS = int(entire_block_position_list[4])
else:
dynamixel.GO_TO_DYNA_1_POS = entire_block_position_list[2]
dynamixel.GO_TO_DYNA_2_POS = entire_block_position_list[3]
arduino2.GO_TO_SERVO_POS = int(entire_block_position_list[5])
dynamixel.dyna_move()
arduino2.rotate()
arduino2.place()
else :
print('INVALID KEY PRESSED')
print_everything()
if keypress == 111 :
# "o" pressed
# arduino2.pi
root.destroy()
keypress = char_to_int(event.char)
move(keypress)
print(dynamixel_movement_per_command)
print(servo_movement_per_command)
# root.bind('<KeyPress>',do_nothing)
root = tk.Tk()
root.geometry('600x400')
text = tk.Text(root, background='black', foreground='white', font=('Comic Sans MS', 12))
text.pack()
root.bind('<KeyPress>', on_key_press)
def block_position_setup() :
#blocks = lookup.some_function()
def setup_one_block() :
root.bind('<KeyPress>',on_key_press)
root.mainloop()
global entire_block_position_list
with open('saved_positions.txt','a') as f:
list_ = str(entire_block_position_list)
f.write(list_+'\n')
print('done writing to the text file')
setup_one_block()
block_position_setup()
arduino1.initialize_to_default() |
vertigo235/Sick-Beard-XEM | sickbeard/clients/deluge.py | Python | gpl-3.0 | 8,453 | 0.009464 | # Author: Mr_Orange <mr_orange@hotmail.it>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import json
from base64 import b64encode
import sickbeard
from sickbeard import logger
from sickbeard.clients.generic import GenericClient
class DelugeAPI(GenericClient):
def __init__(self, host=None, username=None, password=None):
super(DelugeAPI, self).__init__('Deluge', host, username, password)
self.url = self.host + 'json'
def _get_auth(self):
post_data = json.dumps({"method": "auth.login",
"params": [self.password],
"id": 1
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
except:
return None
self.auth = self.response.json()["result"]
post_data = json.dumps({"method": "web.connected",
"params": [],
"id": 10
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
except:
return None
connected = self.response.json()['result']
if not connected:
post_data = json.dumps({"method": "web.get_hosts",
"params": [],
"id": 11
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
except:
return None
hosts = self.response.json()['result']
if len(hosts) == 0:
logger.log(self.name + u': WebUI does not contain daemons', logger.ERROR)
return None
post_data = json.dumps({"method": "web.connect",
"params": [hosts[0][0]],
"id": 11
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
except:
return None
post_data = json.dumps({"method": "web.connected",
"params": [],
"id": 10
})
try:
self.response = self.session.post(self.url, data=post_data.encode('utf-8'))
except:
return None
connected = self.response.json()['result']
if not connected:
logger.log(self.name + u': WebUI could not connect to daemon', logger.ERROR)
return None
return self.auth
def _add_torrent_uri(self, result):
post_data = json.dumps({"method": "core.add_torrent_magnet",
"params": [result.url,{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
"id": 2
})
self._request(method='post', data=post_data)
result.hash = self.response.json()['result']
return self.response.json()['result']
def _add_torrent_file(self, result):
post_data = json.dumps({"method": "core.add_torrent_file",
"params": [result.name + '.torrent', b64encode(result.content),{"move_completed": "true", "move_completed_path": sickbeard.TV_DOWNLOAD_DIR}],
"id": 2
})
self._request(method='post', data=post_data)
result.hash = self.response.json()['result']
return self.response.json()['result']
def _set_torrent_label(self, result):
label = sickbeard.TORRENT_LABEL.lower()
if label:
# check if label already exists and create it if not
post_data = json.dumps({"method": 'label.get_labels',
"params": [],
"id": 3
})
self._request(method='post', data=post_data)
labels = self.response.json()['result']
if labels != None:
if label not in labels:
logger.log(self.name + ': ' + label +u" label does not exist in Deluge we must add it", logger.DEBUG)
post_data = json.dumps({"method": 'label.add',
"params": [label],
"id": 4
})
self._request(method='post', data=post_data)
logger.log(self.name + ': ' + label +u" label added to Deluge", logger.DEBUG)
# add label to torrent
post_data = json.dumps({ "method": 'label.set_torrent',
"params": [result.hash, label],
"id": 5
})
self._request(method='post', data=post_data)
logger.log(self.name + ': ' + label +u" label added to torrent", logger.DEBUG)
else:
logger.log(self.name + ': ' + u"label plugin not detected", logger.DEBUG)
return False
return not self | .response.json()['error']
def _set_torrent_ratio(self, result):
if sickbeard.TORRENT_RATIO:
post_data = json.dumps({"method": "core.set_torrent_stop_at_ratio",
"params": [result.hash, True],
"id": 5
| })
self._request(method='post', data=post_data)
post_data = json.dumps({"method": "core.set_torrent_stop_ratio",
"params": [result.hash,float(sickbeard.TORRENT_RATIO)],
"id": 6
})
self._request(method='post', data=post_data)
return not self.response.json()['error']
return True
def _set_torrent_path(self, result):
if sickbeard.TORRENT_PATH:
post_data = json.dumps({"method": "core.set_torrent_move_completed",
"params": [result.hash, True],
"id": 7
})
self._request(method='post', data=post_data)
post_data = json.dumps({"method": "core.set_torrent_move_completed_path",
"params": [result.hash, sickbeard.TORRENT_PATH],
"id": 8
})
self._request(method='post', data=post_data)
return not self.response.json()['error']
return True
def _set_torrent_pause(self, result):
if sickbeard.TORRENT_PAUSED:
post_data = json.dumps({"method": "core.pause_torrent",
" |
crystal150/CS350 | collector.py | Python | mit | 17,514 | 0.016844 | import io
import csv
import requests
import datetime
import time
from bs4 import BeautifulSoup
TRAIN_FILE = "train.csv"
DEFAULT_INTERVAL = 1800
TRAIN_FILE_HEAD = ["Insult", "Date", "Comment"]
def writeCsv(fname, text, delimiter = ","):
prev_data = list()
try:
file = open(fname, "r")
reader = csv.reader(file, delimiter = delimiter)
for row in reader:
if (len(row) > 0):
prev_data.append(row)
file.close()
if (len(prev_data) > 0 and prev_data[0] == TRAIN_FILE_HEAD):
prev_data = prev_data[1:]
except FileNotFoundError as e:
print("train.csv is created.")
prev_comment = list()
for t in prev_data:
prev_comment.append(t[2])
prev_comme | nt.sort()
#print(prev_comment)
duplicated = [False] * len(text)
for i in range(0, len(text)):
if (text[i][2] | in prev_comment):
duplicated[i] = True
#print(duplicated)
try:
file = open(fname, "w")
writer = csv.writer(file, delimiter = delimiter, dialect = "excel")
writer.writerow(TRAIN_FILE_HEAD)
writer.writerows(prev_data)
for i in range(0, len(text)):
if (not duplicated[i]):
writer.writerow(text[i])
file.close()
return 0
except IOError as e:
print(str(e))
return -1
def webCrawl (url, tag = "", tag_attr_type = "", tag_attr_val = ""):
if (url == None or tag == None or tag_attr_type == None or tag_attr_val == None):
return []
page = 1
source_code = requests.get(url)
plain_text = source_code.text
parsed_text = BeautifulSoup(plain_text, "html.parser")
comment_text = parsed_text.find_all(tag, attrs={tag_attr_type:tag_attr_val})
#comment_date = parsed_text.find_all("a", attrs={"class":"tweet-timestamp js-permalink js-nav js-tooltip"})
#print(plain_text)
text_list = list()
for t in comment_text:
text_list.append(t.get_text(strip=True))
#print(type(plain_text))
return text_list
def webCrawlTwitter (url, tag, datetime_start = list(), datetime_end = list()):
page = 1
source_code = requests.get(url)
plain_text = source_code.text
parsed_text = BeautifulSoup(plain_text, "html.parser")
comment_text = parsed_text.find_all(tag, attrs={"lang":"en"})
text_list = list()
for t in comment_text:
text_list.append(t.get_text(strip=True))
return text_list
def webCrawlInsults (url, tag, datetime_start = list(), datetime_end = list()):
page = 1
source_code = requests.get(url)
plain_text = source_code.text
parsed_text = BeautifulSoup(plain_text, "html.parser")
comment_text = parsed_text.find_all(tag, attrs={"lang":"en"})
text_list = list()
for t in comment_text:
text_list.append(t.get_text(strip=True))
return text_list
def evalTexts (text, datetime_str = "", react_pos = list(), react_neg = list(),
min_react_rate = 0.75, min_react_num = 5, eval_default = False):
result_list = list()
for i in range(0, len(text)):
current_list = list()
if (i >= len(react_pos) or i >= len(react_neg) or (min_react_rate < 0 and min_react_num < 0)):
if (eval_default == 1):
current_list.append("1")
elif (eval_default == 0):
current_list.append("0")
elif (react_neg[i] / (react_pos[i] + react_neg[i]) >= min_react_rate
and react_neg[i] >= min_react_num):
current_list.append("1")
else:
current_list.append("0")
current_list.append(datetime_str)
current_list.append("\"" + text[i] + "\"")
result_list.append(current_list)
return result_list
def setTimeInterval (newInterval):
t = newInterval
def operate ():
time_interval = DEFAULT_INTERVAL
time_prev = time.clock()
trial_limit = 1
trial_cur = 0
config_txt = open("crawler-config.txt", 'r')
config_lines = list()
config_pages = list()
while True:
line = config_txt.readline()
config_lines.append(line)
if not line:
break
for l in config_lines:
l = l.replace("\n", "")
l = l.replace(" ", "")
config_line_variable = ""
config_line_syntax = True
config_line_define = l.split("=")
if (len(config_line_define) != 2):
config_line_syntax = False
if (config_line_syntax == True):
config_line_p_str = config_line_define[0].split("[")
config_line_p_int = [None] * (len(config_line_p_str) - 1)
for i in range (0, len(config_line_p_int)):
config_line_p_int[i] = int(config_line_p_str[i + 1][:-1])
#print(config_line_p_int)
config_line_variable = config_line_p_str[0]
#print(config_line_variable)
if (config_line_variable == "pages_num"):
config_pages_num = int(config_line_define[1])
config_pages = [None] * config_pages_num
config_tags = [None] * config_pages_num
config_tags_at = [None] * config_pages_num
config_tags_av = [None] * config_pages_num
config_pos_react_tags = [None] * config_pages_num
config_pos_react_tags_at = [None] * config_pages_num
config_pos_react_tags_av = [None] * config_pages_num
config_neg_react_tags = [None] * config_pages_num
config_neg_react_tags_at = [None] * config_pages_num
config_neg_react_tags_av = [None] * config_pages_num
config_eval_neg_rate = [-1] * config_pages_num
config_eval_neg_num = [-1] * config_pages_num
config_eval_default = [False] * config_pages_num
config_exclude_str = [[]] * config_pages_num
config_trim_start_str = [[]] * config_pages_num
config_trim_end_str = [[]] * config_pages_num
if (config_line_variable == "pages"):
config_pages[config_line_p_int[0]] = config_line_define[1]
if (config_line_variable == "timeinterval"):
time_interval = int(config_line_define[1])
if (config_line_variable == "trial_limit"):
trial_limit = int(config_line_define[1])
if (config_line_variable == "tag"):
config_tags[config_line_p_int[0]] = config_line_define[1]
if (config_line_variable == "tag_attr_type"):
config_tags_at[config_line_p_int[0]] = config_line_define[1]
if (config_line_variable == "tag_attr_val"):
|
hanlind/nova | nova/policies/security_groups.py | Python | apache-2.0 | 1,102 | 0 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# | not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law o | r agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-security-groups'
POLICY_ROOT = 'os_compute_api:os-security-groups:%s'
security_groups_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
policy.RuleDefault(
name=POLICY_ROOT % 'discoverable',
check_str=base.RULE_ANY),
]
def list_rules():
return security_groups_policies
|
pblottiere/QGIS | python/plugins/processing/algs/gdal/rasterize_over.py | Python | gpl-2.0 | 5,222 | 0.002489 | # -*- coding: utf-8 -*-
"""
***************************************************************************
rasterize_over.py
---------------------
Date : September 2013
Copyright : (C) 2013 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
************************************************************* | **************
"""
__author__ = 'Alexander Bruy'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Alexander Bruy'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingException,
Qgs | ProcessingParameterDefinition,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterNumber,
QgsProcessingParameterString,
QgsProcessingParameterBoolean,
QgsProcessingOutputRasterLayer)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class rasterize_over(GdalAlgorithm):
INPUT = 'INPUT'
FIELD = 'FIELD'
INPUT_RASTER = 'INPUT_RASTER'
ADD = 'ADD'
EXTRA = 'EXTRA'
OUTPUT = 'OUTPUT'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input vector layer')))
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT_RASTER,
self.tr('Input raster layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Field to use for burn in value'),
None,
self.INPUT,
QgsProcessingParameterField.Numeric,
optional=False))
params = [
QgsProcessingParameterBoolean(self.ADD,
self.tr('Add burn in values to existing raster values'),
defaultValue=False,
),
QgsProcessingParameterString(self.EXTRA,
self.tr('Additional command-line parameters'),
defaultValue=None,
optional=True)
]
for p in params:
p.setFlags(p.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(p)
self.addOutput(QgsProcessingOutputRasterLayer(self.OUTPUT,
self.tr('Rasterized')))
def name(self):
return 'rasterize_over'
def displayName(self):
return self.tr('Rasterize (overwrite with attribute)')
def group(self):
return self.tr('Vector conversion')
def groupId(self):
return 'vectorconversion'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'rasterize.png'))
def commandName(self):
return 'gdal_rasterize'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
ogrLayer, layerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing)
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT_RASTER, context)
if inLayer is None:
raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT_RASTER))
fieldName = self.parameterAsString(parameters, self.FIELD, context)
self.setOutputValue(self.OUTPUT, inLayer.source())
arguments = [
'-l',
layerName,
'-a',
fieldName
]
if self.parameterAsBool(parameters, self.ADD, context):
arguments.append('-add')
if self.EXTRA in parameters and parameters[self.EXTRA] not in (None, ''):
extra = self.parameterAsString(parameters, self.EXTRA, context)
arguments.append(extra)
arguments.append(ogrLayer)
arguments.append(inLayer.source())
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
|
LeeKamentsky/CellProfiler | tutorial/example1e_groups.py | Python | gpl-2.0 | 8,354 | 0.003232 | '''<b>Example1e</b> demonstrates SettingsGroup
<hr>
There are many circumstances where it would be useful to let a user specify
an arbitrary number of a group of settings. For instance, you might want to
sum an arbitrary number of images together in your module or perform the
same operation on every listed image or object. This is done using
cps.SettingsGroup and by overriding the prepare_settings method.
'''
import cellprofiler.cpmodule as cpm
import cellprofiler.settings as cps
class Example1e(cpm.CPModule):
variable_revision_number = 1
module_name = "Example1e"
category = "Other"
def create_settings(self):
self.groups = []
#
# A hidden count setting saves the number of items in a list
# to the pipeline. When you load the module, you can read the
# hidden count and make sure your module has the correct number of
# groups before the settings are transferred.
#
self.group_count = cps.HiddenCount(self.groups)
#
# A DoSomething setting looks like a button. When you press the button,
# it does something: it calls the callback function which, in this case,
# is "self.add_group()"
#
self.add_group_button = cps.DoSomething(
"Add another group", "Add", self.add_group,
doc = "Press this button to add another group")
#
# We call "add_group()" with a False argument. This tells add_group
# to make sure and not add a button that would allow the user to
# remove the first group
#
self.add_group(False)
def add_group(self, can_delete = True):
'''Add a group to the list of groups
can_delete - if true, add a button that removes the entry
'''
#
# Make a new settings group to hold the settings
#
group = cps.SettingsGroup()
#
# if you can delete, that means there's a setting above this one,
# so it's nice to add a divider in that case.
#
if can_delete:
group.append("divider", cps.Divider())
#
# Each instance has an addend and a multiplicand. We run through
# them all, first adding, then multiplying to get our final answer
#
# group.append takes two arguments. The first is the name for the
# attribute. In this case, we used "addend" so if you want to get
# the addend, you say, "group.addend.value"
#
group.append("addend", cps.Float("Addend", 0))
group.append("multiplicand", cps.Float("Multiplicand", 1))
#
# Only add the RemoveSettingButton if we can delete
#
if can_delete:
group.append("remover", cps.RemoveSettingButton(
"Remove this entry", "Remove",
self.groups, group))
#
# Add the new group to the list.
#
self.groups.append(group)
def settings(self):
result = [self.group_count]
#
# Loop over all the elements in the group
#
for group in self.groups:
assert isinstance(group, cps.SettingsGroup)
#
# Add the settings that go into the pipeline. SettingsGroup is
# smart enough to know that DoSomething and Divider are UI elements
# and don't go in the pipeline.
#
result += group.pipeline_settings()
return result
def visible_settings(self):
#
# Don't put in the hidden count...doh it's HIDDEN!
result = []
for group in self.groups:
assert isinstance(group, cps.SettingsGroup)
#
# Add the visible settings for each group member
#
result += group.visible_settings()
#
# Put the add button at the end
#
result.append(self.add_group_button)
return result
#
# by convention, "run" goes next.
# Let's add up the values and print to the console
#
def run(self, workspace):
accumulator = 0
for group in self.groups:
accumulat | or += group.addend.value
accumulator += group.multiplicand.value
#
# You can put strings, numbers, lists, tuples, dictionaries and
# | numpy arrrays into workspace_display data as well as lists,
# tuples and dictionaries of the above.
#
# The workspace will magically transfer these to itself when
# display() is called - this might happen in a different process
# or possibly on a different machine.
#
workspace.display_data.accumulator = accumulator
def display(self, workspace, figure = None):
#
# We added the figure argument for the FileUI version. This is
# a recipe for a display method that works with both.
#
if figure is None:
#
# In the old UI, you'd tell the workspace to make you a figure
# or to find the old one.
# subplots tells you how many subplots in the x and y directions
#
figure = workspace.create_or_find_figure(subplots = (1, 1))
else:
#
# In the new UI, the figure is created for you and you set the
# number of subplots like this
#
figure.set_subplots((1, 1))
#
# retrieve the accumulator value
#
accumulator = workspace.display_data.accumulator
#
# This is a Matplotlib Axes instance for you to draw on.
# Google for Matplotlib's documentation on what super-special stuff
# you can do with them. Also see examples - we do some special handing
# for imshow which displays images on axes. I bet you didn't see that
# coming ;-)
axes = figure.subplot(0, 0)
#
# This keeps Matplotlib from drawing table on top of table.
#
axes.clear()
#
# We use a list into the table to organize it in columns. A header
# and then the values.
axes.table(cellText = [["Property", "Value"],
["accumulator", str(accumulator)]],
loc='center')
#
# You have to do this in order to get rid of the plot display
#
axes.set_frame_on(False)
axes.set_axis_off()
#
# Prior to the current release, a module had to tell CellProfiler whether
# it interacted with the user interface inside the "run" method and by
# default, a module was marked interactive just in case it did use the
# user interface.
#
# CellProfiler would use the indicator to figure out whether "run" had
# to be run in the user interface thread (CellProfiler would crash under
# OS-X otherwise).
#
# In the upcoming release, "run" isn't allowed to interact with the user
# interface directly, so you will not need to override is_interactive
# in the future.
#
# We'll cover the new UI interaction mechanism in example2c.
#
def is_interactive(self):
return False
#
# Finally, there's prepare_settings. setting_values are stored as unicode
# strings (after a fashion) in your pipeline. Your module gets to have
# as many as it returns from its settings() method. If you have settings
# groups, you have to make sure that settings() returns the correct number
# of settings... so you need to cheat and look at the setting values
# before settings() is called so you can know how many to return.
#
# We thought about this a *little* and this loosey-goosey way of doing it
# is typically pythonic in that it gives the implementor a lot of
# flexibility, but there's less structure to keep things from going wrong.
#
def prepare_settings(self, setting_values):
# The first setting tells how many are present
count = int(setting_values[0])
#
# Delete all but the first group. Python won't choke if there is
# no self.gr |
patta42/pySICM | pySICM/_runGui.py | Python | gpl-3.0 | 969 | 0.001032 | # Copyright (C) 2015 Patrick Happel <patrick.happel@rub.de>
#
# This file is part of pySICM.
#
# pySICM is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Pub | lic License as published by the Free Software
# Foundation, either version 2 of the License, or (at your option | ) any later
# version.
#
# pySICM is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pySICM. If not, see <http://www.gnu.org/licenses/>.
import gui.pySICMGUImain
import sys
def run():
app = QtGui.QApplication(sys.argv)
PySICMGUIMainWindow = QtGui.QMainWindow()
ui = Ui_PySICMGUIMainWindow()
ui.setupUi(PySICMGUIMainWindow)
PySICMGUIMainWindow.show()
sys.exit(app.exec_())
|
StackStorm/python-mistralclient | mistralclient/tests/unit/v2/test_cli_members.py | Python | apache-2.0 | 3,091 | 0 | # Copyright 2016 Catalyst IT Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from mistralclient.api.v2 import members
from mistralclient.commands.v2 import members as member_cmd
from mistralclient.tests.unit import base
MEMBER_DICT = {
'id': '123',
'resource_id': '456',
'resource_type': 'workflow',
'project_id': '1111',
'member_id': '2222',
'status': 'pending',
'created_at': '1',
'updated_at': '1'
}
MEMBER = members.Member(mock, MEMBER_DICT)
class TestCLIWorkflowMembers(base.BaseCommandTest):
def test_create(self):
self.client.members.create.return_value = MEMBER
result = self.call(
| member_cmd.Create,
app_args=[MEMBER_DICT['resource_id'], MEMBER_DICT['resource_type'],
MEMBE | R_DICT['member_id']]
)
self.assertEqual(
('456', 'workflow', '1111', '2222', 'pending', '1', '1'),
result[1]
)
def test_update(self):
self.client.members.update.return_value = MEMBER
result = self.call(
member_cmd.Update,
app_args=[MEMBER_DICT['resource_id'], MEMBER_DICT['resource_type'],
'-m', MEMBER_DICT['member_id']]
)
self.assertEqual(
('456', 'workflow', '1111', '2222', 'pending', '1', '1'),
result[1]
)
def test_list(self):
self.client.members.list.return_value = [MEMBER]
result = self.call(
member_cmd.List,
app_args=[MEMBER_DICT['resource_id'], MEMBER_DICT['resource_type']]
)
self.assertListEqual(
[('456', 'workflow', '1111', '2222', 'pending', '1', '1')],
result[1]
)
def test_get(self):
self.client.members.get.return_value = MEMBER
result = self.call(
member_cmd.Get,
app_args=[MEMBER_DICT['resource_id'], MEMBER_DICT['resource_type'],
'-m', MEMBER_DICT['member_id']]
)
self.assertEqual(
('456', 'workflow', '1111', '2222', 'pending', '1', '1'),
result[1]
)
def test_delete(self):
self.call(
member_cmd.Delete,
app_args=[MEMBER_DICT['resource_id'], MEMBER_DICT['resource_type'],
MEMBER_DICT['member_id']]
)
self.client.members.delete.assert_called_once_with(
MEMBER_DICT['resource_id'],
MEMBER_DICT['resource_type'],
MEMBER_DICT['member_id']
)
|
ichuang/sympy | sympy/core/cache.py | Python | bsd-3-clause | 3,434 | 0.004077 | """ Caching facility for SymPy """
# TODO: refactor CACHE & friends into class?
# global cache registry:
CACHE = [] # [] of
# (item, {} or tuple of {})
from sympy.core.decorators import wraps
def print_cache():
"""print cache content"""
for item, cache in CACHE:
item = str(item)
head = '='*len(item)
print head
print item
print head
if not isinstance(cache, tuple):
cache = (cache,)
shown = False
else:
shown = True
for i, kv in enumerate(cache):
if shown:
print '\n*** %i ***\n' % i
for k, v in kv.iteritems():
print ' %s :\t%s' % (k, v)
def clear_cache():
"""clear cache content"""
for item, cache in CACHE:
if not isinstance(cache, tuple):
cache = (cache,)
for kv in cache:
kv.clear()
########################################
def __cacheit_nocache(func):
return func
def __cacheit(func):
"""caching decorator.
important: the result of cached function must be *immutable*
Examples
========
>>> from sympy.core.cache import cacheit
>>> @cacheit
... def f(a,b):
... return a+b
>>> @cacheit
... def f(a,b):
... return [a,b] # <-- WRONG, returns mutable object
to force cacheit to check returned results mutability and consistency,
set environment variable SYMPY_USE_CACHE to 'debug'
"""
func._cache_it_cache = func_cache_it_cache = {}
CACHE.append((func, func_cache_it_cache))
@wraps(func)
def wrapper(*args, **kw_args):
"""
Assemble the args and kw_args to compute the hash.
"""
if kw_args:
keys = kw_args.keys()
keys.sort()
items = [(k+'=', kw_args[k]) for k in keys]
k = args + tuple(items)
else:
k = args
k = k + tuple(map(lambda x: type(x), k))
try:
return func_cache_it_cache[k]
except KeyError:
pass
func_cache_it_cache[k] = r = func(*args, **kw_args)
return r
return wrapper
def __cacheit_debug(func):
"""cacheit + code to check cache consistency"""
cfunc = __cacheit(func)
@wraps(func)
def wrapper(*args, **kw_arg | s):
# always call function itself and compare it with cached version
r1 = func (*args, **kw_args)
r2 = cfunc(*args, **kw_args)
# try to see if the result is immutable
#
# this works because:
#
# hash([1,2,3]) -> raise TypeError
# hash({'a':1, 'b':2}) -> raise TypeError
# hash | ((1,[2,3])) -> raise TypeError
#
# hash((1,2,3)) -> just computes the hash
hash(r1), hash(r2)
# also see if returned values are the same
assert r1 == r2
return r1
return wrapper
def _getenv(key, default=None):
from os import getenv
return getenv(key, default)
# SYMPY_USE_CACHE=yes/no/debug
USE_CACHE = _getenv('SYMPY_USE_CACHE', 'yes').lower()
if USE_CACHE == 'no':
cacheit = __cacheit_nocache
elif USE_CACHE == 'yes':
cacheit = __cacheit
elif USE_CACHE == 'debug':
cacheit = __cacheit_debug # a lot slower
else:
raise RuntimeError('unrecognized value for SYMPY_USE_CACHE: %s' % USE_CACHE)
|
jaumemarti/l10n-spain-txerpa | account_balance_reporting/account_balance_reporting_template.py | Python | agpl-3.0 | 9,107 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP - Account balance reporting engine
# Copyright (C) 2009 Pexego Sistemas Informáticos.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
Account balance report templates
Generic account balance report template that will be used to define
accounting concepts with formulas to calculate its values/balance.
Designed following the needs of the Spanish/Spain localization.
"""
from openerp.osv import orm, fields
from openerp.tools.translate import _
_BALANCE_MODE_HELP = """
Formula calculation mode: Depending on it, the balance is calculated as
follows:
Mode 0: debit-credit (default);
Mode 1: debit-credit, credit-debit for accounts in brackets;
Mode 2: credit-debit;
Mode 3: credit-debit, debit-credit for accounts in brackets."""
_VALUE_FORMULA_HELP = """
Value calculation formula: Depending on this formula the final value is
calculated as follows:
Empy template value: sum of (this concept) children values.
Number with decimal point ("10.2"): that value (constant).
Account numbers separated by commas ("430,431,(437)"): Sum of the account
balances (the sign of the balance depends on the balance mode).
Concept codes separated by "+" ("11000+12000"): Sum of those concepts values.
"""
# CSS classes for the account lines
CSS_CLASSES = [('default', 'Default'),
('l1', 'Level 1'),
('l2', 'Level 2'),
('l3', 'Level 3'),
('l4', 'Level 4'),
('l5', 'Level 5')]
class account_balance_reporting_template(orm.Model):
"""Account balance report template.
It stores the header fields of an account balance report template,
and the linked lines of detail with the formulas to calculate
the accounting concepts of the report.
"""
_name = "account.balance.reporting.template"
_columns = {
'name': fields.char('Name', size=64, required=True, select=True),
'type': fields.selection([('system', 'System'),
('user', 'User')], 'Type'),
'report_xml_id': fields.many2one('ir.actions.report.xml',
'Report design', ondelete='set null'),
'description': fields.text('Description'),
'balance_mode': fields.selection(
[('0', 'Debit-Credit'),
('1', 'Debit-Credit, reversed with brackets'),
('2', 'Credit-Debit'),
('3', 'Credit-Debit, reversed with brackets')],
'Balance mode', help=_BALANCE_MODE_HELP),
'line_ids': fields.one2many('account.balance.reporting.template.line',
'template_id', 'Lines'),
}
_defaults = {
'type': 'user',
'balance_mode': '0',
}
def copy(self, cr, uid, rec_id, default=None, context=None):
"""Redefine the copy method to perform it correctly as the line
structure is a graph.
"""
line_obj = self.pool['account.balance | .reporting.template.line']
# Read the current item data:
template = self.browse(cr, uid, rec_id, context=context)
# Create the template
new_id = self.create(
cr, uid, {
'name': '%s*' % template.name,
'type': 'user', # Copies are always user templates
'report_xml_id': template.repor | t_xml_id.id,
'description': template.description,
'balance_mode': template.balance_mode,
'line_ids': None,
}, context=context)
# Now create the lines (without parents)
for line in template.line_ids:
line_obj.create(
cr, uid, {
'template_id': new_id,
'sequence': line.sequence,
'css_class': line.css_class,
'code': line.code,
'name': line.name,
'current_value': line.current_value,
'previous_value': line.previous_value,
'negate': line.negate,
'parent_id': None,
'child_ids': None,
}, context=context)
# Now set the (lines) parents
for line in template.line_ids:
if line.parent_id:
# Search for the copied line
new_line_id = line_obj.search(
cr, uid, [('template_id', '=', new_id),
('code', '=', line.code)], context=context)[0]
# Search for the copied parent line
new_parent_id = line_obj.search(
cr, uid, [
('template_id', '=', new_id),
('code', '=', line.parent_id.code),
], context=context)[0]
# Set the parent
line_obj.write(cr, uid, new_line_id,
{'parent_id': new_parent_id}, context=context)
return new_id
class account_balance_reporting_template_line(orm.Model):
"""
Account balance report template line / Accounting concept template
One line of detail of the balance report representing an accounting
concept with the formulas to calculate its values.
The accounting concepts follow a parent-children hierarchy.
"""
_name = "account.balance.reporting.template.line"
_columns = {
'template_id': fields.many2one('account.balance.reporting.template',
'Template', ondelete='cascade'),
'sequence': fields.integer(
'Sequence', required=True,
help="Lines will be sorted/grouped by this field"),
'css_class': fields.selection(CSS_CLASSES, 'CSS Class', required=False,
help="Style-sheet class"),
'code': fields.char('Code', size=64, required=True, select=True,
help="Concept code, may be used on formulas to "
"reference this line"),
'name': fields.char('Name', size=256, required=True, select=True,
help="Concept name/description"),
'current_value': fields.text('Fiscal year 1 formula',
help=_VALUE_FORMULA_HELP),
'previous_value': fields.text('Fiscal year 2 formula',
help=_VALUE_FORMULA_HELP),
'negate': fields.boolean(
'Negate',
help="Negate the value (change the sign of the balance)"),
'parent_id': fields.many2one('account.balance.reporting.template.line',
'Parent', ondelete='cascade'),
'child_ids': fields.one2many('account.balance.reporting.template.line',
'parent_id', 'Children'),
}
_defaults = {
'template_id': (lambda self, cr, uid, context=None:
context.get('template_id', None)),
'negate': False,
'css_class': 'default',
'sequence': 10,
}
_order = "sequence, code"
_sql_constraints = [
('report_code_uniq', 'unique(template_id, code)',
_("The code must be unique for this report!"))
]
def name_get(self, cr, uid, ids, context=None):
"""
Redefine the name_get method to show the code in the name
("[code] |
vadimadr/generator-djdj | generators/app/templates/django_project_template/wsgi.py | Python | mit | 428 | 0 | """
WSGI config for <%= slug %> | project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
" | <%= slug %>.settings.local")
application = get_wsgi_application()
|
linsalrob/PyFBA | PyFBA/fba/bounds.py | Python | mit | 4,803 | 0.003748 | import sys
from PyFBA import lp, log_and_message
def reaction_bounds(reactions, reactions_with_upsr, media, lower=-1000.0, mid=0.0, upper=1000.0, verbose=False):
"""
Set the bounds for each reaction. We set the reactions to run between
either lower/mid, mid/upper, or lower/upper depending on whether the
reaction runs <=, =>, or <=> respectively.
:param reactions: The dict of all reactions we know about
:type reactions: dict of metabolism.Reaction
:param reactions_with_upsr: The sorted list of reactions to run
:type reactions_with_upsr: set
:param media: The media compounds
:type media: set
:param lower: The default lower bound
:type lower: float
:param mid: The default mid value (typically 0)
:type mid: float
:param upper: The default upper bound
:type upper: float
:return: A dict of the reaction ID and the tuple of bounds
:rtype: dict
"""
rbvals = {}
media_uptake_secretion_count = 0
other_uptake_secretion_count = 0
for r in reactions_with_upsr:
if r == 'BIOMASS_EQN':
rbvals[r] = (mid, upper)
continue
# if we already know the bounds, eg from an SBML file or from our uptake/secretion reactions
if reactions[r].lower_bound != None and reactions[r].upper_bound != None:
rbvals[r] = (reactions[r].lower_bound, reactions[r].upper_bound)
continue
if | r in reactions:
direction = reactions[r].direction
else:
sys.stderr.write("Did not find {} in reactions\n".format(r))
direction = "="
"""
RAE 16/6/21
We no longer use this block to check for media components. Instead, we us the uptake_and_secre | tion_reactions
in external_reactions.py to do so.
We assume that if you provide uptake_and_secretion_reactions you have already culled them for the media, though
perhaps we should add a test for that.
"""
if False and (reactions[r].is_uptake_secretion or reactions[r].is_transport or reactions[r].is_input_reaction()):
in_media = False
override = False # if we have external compounds that are not in the media, we don't want to run this as a media reaction
for c in reactions[r].left_compounds:
if c.location == 'e':
if c in media:
in_media = True
else:
override = True
# in this case, we have some external compounds that we should not import.
# for example, H+ is used to translocate things
if override:
in_media = False
if in_media:
# This is what I think it should be:
rbvals[r] = (lower, upper)
#rbvals[r] = (0.0, upper)
media_uptake_secretion_count += 1
else:
rbvals[r] = (0.0, upper)
#rbvals[r] = (lower, upper)
other_uptake_secretion_count += 1
continue
if direction == "=":
# This is what I think it should be:
rbvals[r] = (lower, upper)
# rbvals[r] = (mid, upper)
elif direction == ">":
# This is what I think it should be:
rbvals[r] = (mid, upper)
# rbvals[r] = (lower, upper)
elif direction == "<":
# This is what I think it should be:
# rbvals[r] = (lower, mid)
rbvals[r] = (lower, upper)
else:
sys.stderr.write("DO NOT UNDERSTAND DIRECTION " + direction + " for " + r + "\n")
rbvals[r] = (mid, upper)
if verbose:
sys.stderr.write("In parsing the bounds we found {} media uptake ".format(media_uptake_secretion_count) +
"and secretion reactions and {} other u/s reactions\n".format(other_uptake_secretion_count))
rbounds = [rbvals[r] for r in reactions_with_upsr]
for r in reactions_with_upsr:
if r in reactions:
reactions[r].lower_bound, reactions[r].upper_bound = rbvals[r]
lp.col_bounds(rbounds)
return rbvals
def compound_bounds(cp, lower=0, upper=0):
"""
Impose constraints on the compounds. These constraints limit what
the variation of each compound can be and is essentially 0 for
most compounds except those that are in the media or otherwise
external.
This is the zero flux vector.
Parameters:
cp: the list of compound ids
lower: the default lower value
upper: the default upper value
"""
cbounds = [(lower, upper) for c in cp]
cbvals = {c: (lower, upper) for c in cp}
lp.row_bounds(cbounds)
return cbvals
|
henry-chao/moltres-radio | dbInit.py | Python | mit | 668 | 0.013473 | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://moltres:sertlom@localhost/moltres_radio'
db = SQLAlchemy(app)
class UserDAO(db.Model):
__tablename__ = 'users'
username = db.Column(db.String(), primary_key=True)
active = db.Column(db.String())
activationKey = db.Col | umn(db.String())
password = db.Column(db.String())
salt = db.Column(db.String())
def __init__(self, username, active, activationKey, password, salt):
self.username = u | sername
self.active = active
self.activationKey = activationKey
self.password = password
self.salt = salt
|
zeroq/amun | vuln_modules/vuln-arkeia/arkeia_shellcodes.py | Python | gpl-2.0 | 60 | 0 | arkeia_request_stage_1 = "\x00\x | 4d\x00\x03\x00\x01\x03\x | e8"
|
dpgaspar/Flask-AppBuilder | flask_appbuilder/tests/security/test_auth_ldap.py | Python | bsd-3-clause | 41,672 | 0.000648 | import logging
import os
import unittest
from unittest.mock import Mock, patch
from flask import Flask
from flask_appbuilder import AppBuilder, SQLA
from flask_appbuilder.security.manager import AUTH_LDAP
import jinja2
import ldap
from mockldap import MockLdap
from ..const import USERNAME_ADMIN, USERNAME_READONLY
logging.basicConfig(format="%(asctime)s:%(levelname)s:%(name)s:%(message)s")
logging.getLogger().setLevel(logging.DEBUG)
log = logging.getLogger(__name__)
class LDAPSearchTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.mockldap = MockLdap(cls.directory)
@classmethod
def tearDownClass(cls):
del cls.mockldap
def setUp(self):
# start MockLdap
self.mockldap.start()
self.ldapobj = self.mockldap["ldap://localhost/"]
# start Flask
self.app = Flask(__name__)
self.app.jinja_env.undefined = jinja2.StrictUndefined
self.app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
"SQLALCHEMY_DATABASE_URI"
)
self.app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
self.app.config["AUTH_TYPE"] = AUTH_LDAP
self.app.config["AUTH_LDAP_SERVER"] = "ldap://localhost/"
self.app.config["AUTH_LDAP_UID_FIELD"] = "uid"
self.app.config["AUTH_LDAP_FIRSTNAME_FIELD"] = "givenName"
self.app.config["AUTH_LDAP_LASTNAME_FIELD"] = "sn"
self.app.config["AUTH_LDAP_EMAIL_FIELD"] = "email"
# start Database
self.db = SQLA(self.app)
def tearDown(self):
# Remove test user
user_alice = self.appbuilder.sm.find_user("alice")
if user_alice:
self.db.session.delete(user_alice)
self.db.session.commit()
user_natalie = self.appbuilder.sm.find_user("natalie")
if user_natalie:
self.db.session.delete(user_natalie)
self.db.session.commit()
# stop MockLdap
self.mockldap.stop()
del self.ldapobj
# stop Flask
self.app = None
# stop Flask-AppBuilder
self.appbuilder = None
# stop Database
self.db.session.remove()
self.db = None
def assertOnlyDefaultUsers(self):
users = self.appbuilder.sm.get_all_users()
user_names = [user.username for user in users]
self.assertEqual(user_names, [USERNAME_ADMIN, USERNAME_READONLY])
# ----------------
# LDAP Directory
# ----------------
top = ("o=test", {"o": [ | "test"]})
ou_users = ("ou=users, | o=test", {"ou": ["users"]})
ou_groups = ("ou=groups,o=test", {"ou": ["groups"]})
user_admin = (
"uid=admin,ou=users,o=test",
{"uid": ["admin"], "userPassword": ["admin_password"]},
)
user_alice = (
"uid=alice,ou=users,o=test",
{
"uid": ["alice"],
"userPassword": ["alice_password"],
"memberOf": [b"cn=staff,ou=groups,o=test"],
"givenName": [b"Alice"],
"sn": [b"Doe"],
"email": [b"alice@example.com"],
},
)
user_natalie = (
"uid=natalie,ou=users,o=test",
{
"uid": ["natalie"],
"userPassword": ["natalie_password"],
"memberOf": [
b"cn=staff,ou=groups,o=test",
b"cn=admin,ou=groups,o=test",
b"cn=exec,ou=groups,o=test",
],
"givenName": [b"Natalie"],
"sn": [b"Smith"],
"email": [b"natalie@example.com"],
},
)
group_admins = (
"cn=admins,ou=groups,o=test",
{"cn": ["admins"], "member": [user_admin[0]]},
)
group_staff = (
"cn=staff,ou=groups,o=test",
{"cn": ["staff"], "member": [user_alice[0]]},
)
directory = dict(
[
top,
ou_users,
ou_groups,
user_admin,
user_alice,
user_natalie,
group_admins,
group_staff,
]
)
# ----------------
# LDAP Queries
# ----------------
call_initialize = ("initialize", tuple(["ldap://localhost/"]), {})
call_set_option = ("set_option", tuple([ldap.OPT_REFERRALS, 0]), {})
call_bind_admin = (
"simple_bind_s",
tuple(["uid=admin,ou=users,o=test", "admin_password"]),
{},
)
call_bind_alice = (
"simple_bind_s",
tuple(["uid=alice,ou=users,o=test", "alice_password"]),
{},
)
call_bind_natalie = (
"simple_bind_s",
tuple(["uid=natalie,ou=users,o=test", "natalie_password"]),
{},
)
call_search_alice = (
"search_s",
tuple(["ou=users,o=test", 2, "(uid=alice)", ["givenName", "sn", "email"]]),
{},
)
call_search_alice_memberof = (
"search_s",
tuple(
[
"ou=users,o=test",
2,
"(uid=alice)",
["givenName", "sn", "email", "memberOf"],
]
),
{},
)
call_search_natalie_memberof = (
"search_s",
tuple(
[
"ou=users,o=test",
2,
"(uid=natalie)",
["givenName", "sn", "email", "memberOf"],
]
),
{},
)
call_search_alice_filter = (
"search_s",
tuple(
[
"ou=users,o=test",
2,
"(&(memberOf=cn=staff,ou=groups,o=test)(uid=alice))",
["givenName", "sn", "email"],
]
),
{},
)
# ----------------
# Unit Tests
# ----------------
def test___search_ldap(self):
"""
LDAP: test `_search_ldap` method
"""
self.app.config["AUTH_LDAP_BIND_USER"] = "uid=admin,ou=users,o=test"
self.app.config["AUTH_LDAP_BIND_PASSWORD"] = "admin_password"
self.app.config["AUTH_LDAP_SEARCH"] = "ou=users,o=test"
self.appbuilder = AppBuilder(self.app, self.db.session)
sm = self.appbuilder.sm
# prepare `con` object
con = ldap.initialize("ldap://localhost/")
sm._ldap_bind_indirect(ldap, con)
# run `_search_ldap` method
user_dn, user_attributes = sm._search_ldap(ldap, con, "alice")
# validate - search returned expected data
self.assertEqual(user_dn, self.user_alice[0])
self.assertEqual(user_attributes["givenName"], self.user_alice[1]["givenName"])
self.assertEqual(user_attributes["sn"], self.user_alice[1]["sn"])
self.assertEqual(user_attributes["email"], self.user_alice[1]["email"])
# validate - expected LDAP methods were called
self.assertEqual(
self.ldapobj.methods_called(with_args=True),
[self.call_initialize, self.call_bind_admin, self.call_search_alice],
)
def test___search_ldap_filter(self):
"""
LDAP: test `_search_ldap` method (with AUTH_LDAP_SEARCH_FILTER)
"""
# MockLdap needs non-bytes for search filters, so we patch `memberOf`
# to a string, only for this test
with patch.dict(
self.directory[self.user_alice[0]],
{
"memberOf": [
i.decode() for i in self.directory[self.user_alice[0]]["memberOf"]
]
},
):
_mockldap = MockLdap(self.directory)
_mockldap.start()
_ldapobj = _mockldap["ldap://localhost/"]
self.app.config["AUTH_LDAP_BIND_USER"] = "uid=admin,ou=users,o=test"
self.app.config["AUTH_LDAP_BIND_PASSWORD"] = "admin_password"
self.app.config["AUTH_LDAP_SEARCH"] = "ou=users,o=test"
self.app.config[
"AUTH_LDAP_SEARCH_FILTER"
] = "(memberOf=cn=staff,ou=groups,o=test)"
self.appbuilder = AppBuilder(self.app, self.db.session)
sm = self.appbuilder.sm
# prepare `con` object
con = ldap.initialize("ldap://localhost/")
sm._ldap_bind_indirect(ldap, con)
# run `_search_ldap` method
|
ivanlyon/exercises | test/test_k_hardwoodspecies.py | Python | mit | 1,481 | 0.00135 | import io
import unittest
from unittest.mock import patch
from kattis import k_hardwoodspecies
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input(self):
'''Run and assert problem statement sample input and output.'''
inputs = '''
Red Alder
Ash
Aspen
Basswood
Ash
Beech
Yellow Birch
Ash
Cherry
Cottonwood
Ash
Cypress
Red Elm
Gum
Hackberry
White Oak
Hickory
Pecan
Hard Maple
White Oak
Soft Maple
Red Oak
Red Oak
White Oak
Poplan
Sassafras
Sycamore
Black Walnut
Willow
'''
outputs = '''
Ash 13.793103
Aspen 3.448276
Basswood 3.448276
Beech 3.448276
Black Walnut 3.448276
Cherry 3.448276
Cottonwood 3.448276
Cypress 3.448276
Gum 3.448276
Hackberry 3.448276
Hard Maple 3.448276
Hickory 3.448276
Pecan 3.448276
Poplan 3.448276
Red Alder 3.448276
Red Elm 3.448276
Red Oak 6.896552
Sassafras 3.448276
Soft Maple 3.448276
Sycamore 3. | 448276
White Oak 10.344828
Willow 3.448276
Yellow Birch 3.448276
'''
with patch('sys.stdin', io.StringIO(inputs[1:])) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as s | tdout:
k_hardwoodspecies.main()
self.assertEqual(stdout.getvalue(), outputs[1:])
self.assertEqual(stdin.read(), '')
###############################################################################
if __name__ == '__main__':
unittest.main()
|
internship2016/sovolo | app/user/templatetags/user_tags.py | Python | mit | 902 | 0 | from django import template
register = template.Library()
@register.inclusion_tag('user/user_list.html', takes_context=True)
def user_list(context, users, title):
info = {'users': users, 'title': title}
if 'event' in context:
info['object'] = context['event']
return info
@register.inclusion_tag('user/user_list_large.html', takes_context=True)
def user_l | ist_large(context, | users, title):
return {'users': users, 'title': title}
@register.inclusion_tag('user/template_skilllist.html', takes_context=True)
def skill_list(context, skills, title):
request = context['request']
return {
'skills': skills,
'title': title,
'user': request.user,
}
@register.simple_tag
def query_transform(request, **kwargs):
updated = request.GET.copy()
for key, value in kwargs.items():
updated[key] = value
return updated.urlencode()
|
Tan0/ironic | ironic/drivers/modules/ipmitool.py | Python | apache-2.0 | 44,249 | 0.000023 | # coding=utf-8
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 NTT DOCOMO, INC.
# Copyright 2014 International Business Machines Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
IPMI power manager driver.
Uses the 'ipmitool' command (http://ipmitool.sourceforge.net/) to remotely
manage hardware. This includes setting the boot device, getting a
serial-over-LAN console, and controlling the power state of the machine.
NOTE THAT CERTAIN DISTROS MAY INSTALL openipmi BY DEFAULT, INSTEAD OF ipmitool,
WHICH PROVIDES DIFFERENT COMMAND-LINE OPTIONS AND *IS NOT SUPPORTED* BY THIS
DRIVER.
"""
import contextlib
import os
import re
import subprocess
import tempfile
import time
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import excutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.common.i18n import _LI
from ironic.common.i18n import _LW
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.drivers import base
from ironic.drivers.modules import console_utils
CONF = cfg.CONF
CONF.import_opt('retry_timeout',
'ironic.drivers.modules.ipminative',
group='ipmi')
CONF.import_opt('min_command_interval',
'ironic.drivers.modules.ipminative',
group='ipmi')
LOG = logging.getLogger(__name__)
VALID_PRIV_LEVELS = ['ADMINISTRATOR', 'CALLBACK', 'OPERATOR', 'USER']
VALID_PROTO_VERSIONS = ('2.0', '1.5')
REQUIRED_PROPERTIES = {
'ipmi_address': _("IP address or hostname of the node. Required.")
}
OPTIONAL_PROPERTIES = {
'ipmi_password': _("password. Optional."),
'ipmi_priv_level': _("privilege level; default is ADMINISTRATOR. One of "
"%s. Optional.") % ', '.join(VALID_PRIV_LEVELS),
'ipmi_username': _("username; default is NULL user. Optional."),
'ipmi_bridging': _("bridging_type; default is \"no\". One of \"single\", "
"\"dual\", \"no\". Optional."),
'ipmi_transit_channel': _("transit channel for bridged request. Required "
"only if ipmi_bridging is set to \"dual\"."),
'ipmi_transit_address': _("transit address for bridged request. Required "
"only if ipmi_bridging is set to \"dual\"."),
'ipmi_target_channel': _("destination channel for bridged request. "
"Required only if ipmi_bridging is set to "
"\"single\" or \"dual\"."),
'ipmi_target_address': _("destination address for bridged request. "
"Required only if ipmi_bridging is set "
"to \"single\" or \"dual\"."),
'ipmi_local_address': _("local IPMB address for bridged requests. "
"Used only if ipmi_bridging is set "
"to \"single\" or \"dual\". Optional."),
'ipmi_protocol_version': _('the version of the IPMI protocol; default '
'is "2.0". One of "1.5", "2.0". Optional.'),
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
CONSOLE_PROPERTIES = {
'ipmi_terminal_port': _("node's UDP port to connect to. Only required for "
"console access.")
}
BRIDGING_OPTIONS = [('local_address', '-m'),
('transit_channel', '-B'), ('transit_address', '-T'),
('target_channel', '-b'), ('target_address', '-t')]
LAST_CMD_TIME = {}
TIMING_SUPPORT = None
SINGLE_BRIDGE_SUPPORT = None
DUAL_BRIDGE_SUPPORT = None
TMP_DIR_CHECKED = None
ipmitool_command_options = {
'timing': ['ipmitool', '-N', '0', '-R', '0', '-h'],
'single_bridge': ['ipmitool', '-m', '0', '-b', '0', '-t', '0', '-h'],
'dual_bridge': ['ipmitool', '-m', '0', '-b', '0', '-t', '0',
'-B', '0', '-T', '0', '-h']}
# Note(TheJulia): This string is hardcoded in ipmitool's lanplus driver
# and is substituted in return for the error code received from the IPMI
# controller. As of 1.8.15, no internationalization support appears to
# be in ipmitool which means the string should always be returned in this
# form regardless of locale.
IPMITOOL_RETRYABLE_FAILURES = ['insufficient resources for session']
def _check_option_support(options):
"""Checks if the specific ipmitool options are supported on host.
This method updates the module-level variables indicating whether
an option is supported so that it is accessible by any driver
interface class in this module. It is intended to be called from
the __init__ method of such classes only.
:param options: list of ipmitool options to be checked
:raises: OSError
"""
for opt in options:
if _is_option_supported(opt) is None:
try:
cmd = ipmitool_command_options[opt]
# NOTE(cinerama): use subprocess.check_call to
# check options & suppress ipmitool output to
# avoid alarming people
with open(os.devnull, 'wb') as nullfile:
subprocess.check_call(cmd, stdout=nullfile,
stderr=nullfile)
except subprocess.CalledProcessError:
LOG.info(_LI("Option %(opt)s is not supported by ipmitool"),
{'opt': opt})
_is_option_supported(opt, False)
else:
LOG.info(_LI("Option %(opt)s is supported by ipmitool"),
{'opt': opt})
_is_option_supported(opt, True)
def _is_option_supported(option, is_supported=None):
"""Indicates whether the particular ipmitool option is supported.
:param option: specific ipmitool option
:param is_supported: Optional Boolean. when specified, this value
is assigned to the module-level variable indicating
whether the option is supported. Used only if a value
is not already assigned.
:returns: True, indicates the option is supported
:returns: False, indicates the option is not supported
:returns: None, indicates that it is not aware whether the option
is supported
"""
global SINGLE_BRIDGE_SUPPORT
global DUAL_BRIDGE_SUPPORT
global TIMING_SUPPORT
if option == 'single_bridge':
if (SINGLE_BRIDGE_SUPPORT is None) and (is_supported is not None):
SINGLE_BRIDGE_SUPPORT = is_supported
return SINGLE_BRIDGE_SUPPORT
elif option == 'dual_bridge':
if (DUAL_BRIDGE_SUPPORT is None) and (is_supported is not None):
DUAL_BRIDGE_SUPPORT = is_supported
return DUAL_BRIDGE_SUPPORT
elif option == 'timing':
if (TIMING_SUPPORT is None) and (is_supported is not None):
TIMING_SUPPORT = is_su | pported
return TIMING_SUPPORT
def _console_pwfile_path(uuid):
"""Return the file path for storing the ipmi password for a console."""
file_name = "%(uuid)s.pw" % {'uuid': uuid}
return os.path.join(CONF.tempdir, file_name)
@contextlib.contextmanager
def _make_password_file(password):
"""Makes a temporary file that contains the password.
:param password: the password
:returns: the absolute pathname of the tem | porary file
:raises: PasswordFileFailedToCreate from creat |
marcharper/stationary | stationary/utils/math_helpers.py | Python | mit | 5,555 | 0.00072 | import numpy
from numpy import log
try:
from scipy.misc import logsumexp
except ImportError:
from numpy import logaddexp
logsumexp = logaddexp.reduce
def slice_dictionary(d, N, slice_index=0, slice_value=0):
"""
Take a three dimensional slice from a four dimensional
dictionary.
"""
slice_dict = dict()
for state in simplex_generator(N, 2):
new_state = list(state)
new_state.insert(slice_index, slice_value)
slice_dict[state] = d[tuple(new_state)]
return slice_dict
def squared_error(d1, d2):
"""
Compute the squared error between two vectors.
"""
s = 0.
for k in range(len(d1)):
s += (d1[k] - d2[k])**2
return numpy.sqrt(s)
def squared_error_dict(d1, d2):
"""
Compute the squared error between two vectors, stored as dictionaries.
"""
s = 0.
for k in d1.keys():
s += (d1[k] - d2[k])**2
return numpy.sqrt(s)
def multiply_vectors(a, b):
c = []
for i in range(len(a)):
c.append(a[i]*b[i])
return c
def dot_product(a, b):
c = 0
for i in range(len(a)):
c += a[i] * b[i]
return c
def normalize(x):
s = float(sum(x))
for j in range(len(x)):
x[j] /= s
return x
def normalize_dictionary(x):
s = float(sum(x.values()))
for k in x.keys():
x[k] /= s
return x
def inc_factorial(x, n):
p = 1.
for i in range(0, n):
p *= (x + i)
return p
def factorial(i):
p = 1.
for j in range(2, i+1):
p *= j
return p
def log_inc_factorial(x,n):
p = 1.
for i in range(0, n):
p += log(x + i)
return p
def log_factorial(i):
p = 1.
for j in range(2, i+1):
p += log(j)
return p
def simplex_generator(N, d=2):
"""
Generates a discretation of the simplex.
Parameters
----------
N: int
The number of subdivsions in each dimension
d: int, 2
The dimension of the simplex (the number of population types is d+1
Yields
------
(d+1)-tuples of numbers summing to N. The total number of yielded tuples is
equal to the simplicial polytopic number corresponding to N and d,
binom{N + d - 1}{d} (see https://en.wikipedia.org/wiki/Figurate_number )
"""
if d == 1:
for i in range(N+1):
yield (i, N - i)
if d > 1:
for j in range(N+1):
for s in simplex_generator(N - j, d - 1):
t = [j]
t.extend(s)
yield tuple(t)
def one_step_generator(d):
"""
Generates the arrays needed to construct neighboring states one step away
from a state in the dimension d simplex.
"""
if d == 1:
yield [1, -1]
yield [-1, 1]
return
for plus_index in range(d + 1):
for minus_index in range(d + 1):
if minus_index == plus_index:
continue
step = [0] * (d + 1)
step[plus_index] = 1
step[minus_index] = -1
yield step
def one_step_indicies_generator(d):
"""
Generates the indices that form all the neighboring states, by adding +1 in
one index and -1 in another.
"""
if d == 1:
yield [0, 1]
yield [1, 0]
return
for plus_index in range(d + 1):
for minus_index in range(d + 1):
if minus_index == plus_index:
continue
yield (plus_index, minus_index)
def kl_divergence(p, q):
"""
Computes the KL-divergence or relative entropy of to input distributions.
Parameters
----------
p, q: lists
The probability distributions to compute the KL-divergence for
Returns
-------
float, the KL-divergence of p and q
"""
s = 0.
for i in range(len(p)):
if p[i] == 0:
continue
if q[i] == 0:
return float('nan')
try:
s += p[i] * log(p[i])
except (ValueError, ZeroDivisionError):
continue
try:
s -= p[i] * log(q[i])
except (ValueError, ZeroDivisionError):
continue
return s
def kl_divergence_dict(p, q):
"""
Computes the KL-divergence of distributions given as dictionaries.
"""
s = 0.
p_list = []
q_list = []
for i in p.keys():
p_list.append(p[i])
q_list.append(q[i])
return kl_divergence(p_list, q_list)
def q_divergence(q):
"""
Returns the divergence function corresponding to the parameter value q. For
q == 0 this function is one-half the squared Euclidean distance. For q == 1
this function returns the KL-divergence.
"""
if q == 0:
def d(x, y):
re | turn 0.5 * numpy.dot((x - y), (x - y))
return d
if q == 1:
return kl_divergence
if q == 2:
def d(x, y):
s = 0.
| for i in range(len(x)):
s += log(x[i] / y[i]) + 1 - x[i] / y[i]
return -s
return d
q = float(q)
def d(x, y):
s = 0.
for i in range(len(x)):
s += (numpy.power(y[i], 2 - q) - numpy.power(x[i], 2 - q)) / (2 - q)
s -= numpy.power(y[i], 1 - q) * (y[i] - x[i])
s = -s / (1 - q)
return s
return d
def shannon_entropy(p):
s = 0.
for i in range(len(p)):
try:
s += p[i] * log(p[i])
except ValueError:
continue
return -1. * s
def binary_entropy(p):
return -p * log(p) - (1 - p) * log(1 - p)
|
flavour/eden | modules/templates/historic/CRMT/controllers.py | Python | mit | 3,111 | 0.007715 | # -*- coding | : utf-8 -*-
from gluon import current
#from gluon.html import *
from gluon.storage import Storage
from s3 import S3CustomController
THEME = "historic.CRMT"
# =============================================================================
class index(S3CustomController):
""" Custom Home Page """
def __call__(self):
output = {}
# Latest Activities
db = current.db
s3db = current.s3db
atable = s3db.project_activity
| query = (atable.deleted == False)
output["total_activities"] = db(query).count()
#gtable = s3db.gis_location
#query &= (atable.location_id == gtable.id)
ogtable = s3db.org_group
ltable = s3db.project_activity_group
query &= (atable.id == ltable.activity_id) & \
(ogtable.id == ltable.group_id)
rows = db(query).select(atable.id,
atable.name,
atable.date,
#gtable.L3,
ogtable.name,
limitby = (0, 3),
orderby = ~atable.date
)
latest_activities = []
current.deployment_settings.L10n.date_format = "%d %b %y"
drepresent = atable.date.represent
for row in rows:
date = row["project_activity.date"]
if date:
nice_date = drepresent(date)
else:
nice_date = ""
latest_activities.append(Storage(id = row["project_activity.id"],
name = row["project_activity.name"],
date = nice_date,
date_iso = date or "",
org_group = row["org_group.name"],
#location = row["gis_location.L3"],
))
output["latest_activities"] = latest_activities
# Which Map should we link to in "Know your community"?
auth = current.auth
table = s3db.gis_config
if auth.is_logged_in() and auth.user.org_group_id:
# Coalition Map
ogtable = s3db.org_group
og = db(ogtable.id == auth.user.org_group_id).select(ogtable.pe_id,
limitby=(0, 1)
).first()
query = (table.pe_id == og.pe_id)
else:
# Default Map
query = (table.uuid == "SITE_DEFAULT")
config = db(query).select(table.id,
limitby=(0, 1)
).first()
try:
output["config_id"] = config.id
except:
output["config_id"] = None
self._view(THEME, "index.html")
return output
# END =========================================================================
|
diego0020/PySurfer | examples/plot_freesurfer_normalization.py | Python | bsd-3-clause | 1,238 | 0 | """
Plot Freesurfer Normalization
=============================
This example shows how PySurfer can be used t | o examine the quality of
Freesurfer's curvature-driven normalization to a common template.
We are going to plot the contour of the subject's curvature estimate after
transforming that map into the common space (this step is performed outside
of PySurfer using the Freesurfer program ``mri_surf2surf``).
With a perfect transformation, the contour | lines should follow the light/dark
gray boundary on the fsaverage surface. Large deviations may reflect problems
with the underlying data that you should investigate.
"""
import nibabel as nib
from surfer import Brain
print(__doc__)
brain = Brain("fsaverage", "both", "inflated")
for hemi in ["lh", "rh"]:
# This file was created with mri_surf2surf
curv = nib.load("example_data/%s.curv.fsaverage.mgz" % hemi)
# Binarize the curvature at 0
curv_bin = (curv.get_data() > 0).squeeze()
# Add the data as a contour overlay, but turn off the colorbar
brain.add_contour_overlay(curv_bin, min=0, max=1.5, n_contours=2,
line_width=3, hemi=hemi)
brain.contour_list[-1]["colorbar"].visible = False
brain.show_view("dorsal")
|
sebp/scikit-survival | tests/test_column.py | Python | gpl-3.0 | 14,897 | 0.002215 | from collections import OrderedDict
import numpy
from numpy.testing import assert_array_almost_equal, assert_array_equal
import pandas
import pandas.testing as tm
import pytest
from sksurv import column
@pytest.fixture()
def numeric_data():
data = pandas.DataFrame(numpy.arange(50, dtype=float).reshape(10, 5))
expected = numpy.array([[-1.486301, -1.486301, -1.486301, -1.486301, -1.486301],
[-1.156012, -1.156012, -1.156012, -1.156012, -1.156012],
[-0.825723, -0.825723, -0.825723, -0.825723, -0.825723],
[-0.495434, -0.495434, -0.495434, -0.495434, -0.495434],
[-0.165145, -0.165145, -0.165145, -0.165145, -0.165145],
[0.165145, 0.165145, 0.165145, 0.165145, 0.165145],
[0.495434, 0.495434, 0.495434, 0.495434, 0.495434],
[0.825723, 0.825723, 0.825723, 0.825723, 0.825723],
[1.156012, 1.156012, 1.156012, 1.156012, 1.156012],
[1.486301, 1.486301, 1.486301, 1.486301, 1.486301]])
return data, expected
@pytest.fixture()
def non_numeric_data_frame():
data = pandas.DataFrame({'q1': ['no', 'no', 'yes', 'yes', 'no', 'no', None, 'yes', 'no', None],
'q2': ['maybe', 'no', 'yes', 'maybe', 'yes', 'no', None, 'maybe', 'no',
'yes'],
'q3': [1, 2, 1, 3, 1, 2, numpy.nan, numpy.nan, 3, 2]})
data['q3'] = data['q3'].astype('category')
return data
class TestColumn:
@staticmethod
def test_standardize_numeric(numeric_data):
numeric_data_frame, expected = numeric_data
result = column.standardize(numeric_data_frame)
assert isinstance(result, pandas.DataFrame)
assert_array_almost_equal(expected, result)
@staticmethod
def test_standardize_float_numpy_array(numeric_data):
numeric_data_frame, expected = numeric_data
result = column.standardize(numeric_data_frame.values)
assert isinstance(result, numpy.ndarray)
assert_array_almost_equal(expected, result)
@staticmethod
def test_standardize_int_numpy_array(numeric_data):
numeric_data_frame, expected = numeric_data
result = column.standardize(numeric_data_frame.values.astype(int))
assert isinstance(result, numpy.ndarray)
assert_array_almost_equal(expected, result)
@staticmethod
def test_standardize_not_inplace(numeric_data):
numeric_data_frame, expected = numeric_data
numeric_array = numeric_data_frame.values
before = numeric_array.copy()
result = column.standardize(numeric_array)
assert_array_almost_equal(expected, result)
assert_array_almost_equal(before, numeric_array)
@staticmethod
def test_standardize_non_numeric(non_numeric_data_frame):
result = column.standardize(non_numeric_data_frame)
assert isinstance(result, pandas.DataFrame)
tm.assert_frame_equal(non_numeric_data_frame, result)
@staticmethod
def test_standardize_non_numeric_numpy_array(non_numeric_data_frame):
result = column.standardize(non_numeric_data_frame.values)
assert isinstance(result, numpy.ndarray)
assert_array_equal(pandas.isnull(non_numeric_data_frame),
pandas.isnull(result))
non_nan_idx = [0, 1, 2, 3, 4, 5, 8, 9]
assert_array_equal(non_numeric_data_frame.iloc[non_nan_idx, :].values,
result[non_nan_idx, :])
@staticmethod
def test_standardize_mixed(numeric_data, non_numeric_data_frame):
numeric_data_frame, expected = numeric_data
mixed_data_frame = pandas.concat((numeric_data_frame, non_numeric_data_frame), axis=1)
result = column.standardize(mixed_data_frame)
assert isinstance(result, pandas.DataFrame)
assert_array_almost_equal(expected, result.iloc[:, :numeric_data_frame.shape[1]].values)
tm.assert_frame_equal(non_numeric_data_frame, result.iloc[:, numeric_data_frame.shape[1]:])
@staticmethod
def test_standardize_mixed_numpy_array(numeric_data, non_numeric_data_frame):
numeric_data_frame, _ = numeric_data
mixed_data_frame = pandas.concat((numeric_data_frame, non_numeric_data_frame), axis=1)
result = column.standardize(mixed_data_frame.values)
assert_array_equal(pandas.isnull(mixed_data_frame),
pandas.isnull(result))
assert_array_almost_equal(numeric_data_frame, result[:, :numeric_data_frame.shape[1]])
non_nan_idx = [0, 1, 2, 3, 4, 5, | 8, 9]
assert_array_equal(non_numeric_data_frame.iloc[non_nan_idx, :].value | s,
result[:, numeric_data_frame.shape[1]:][non_nan_idx, :])
class TestEncodeCategorical:
@staticmethod
def test_series_categorical():
input_series = pandas.Series(pandas.Categorical.from_codes([1, 1, 0, 2, 0, 1, 2, 1, 2, 0, 0, 1, 2, 2],
["small", "medium", "large"], ordered=False),
name="a_series")
expected_df = pandas.DataFrame.from_dict(OrderedDict(
[("a_series=medium", numpy.array([1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0], dtype=float)),
("a_series=large", numpy.array([0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1], dtype=float))
])
)
actual_df = column.encode_categorical(input_series)
tm.assert_frame_equal(actual_df, expected_df, check_exact=True)
@staticmethod
def test_series_numeric():
input_series = pandas.Series([0.5, 0.1, 10, 25, 3.8, 11, 2256, -1, -0.2, 3.14], name="a_series")
with pytest.raises(TypeError, match="series must be of categorical dtype, but was float"):
column.encode_categorical(input_series)
@staticmethod
def test_case1():
a = numpy.r_[
numpy.repeat(["large"], 10),
numpy.repeat(["small"], 5),
numpy.repeat(["tiny"], 13),
numpy.repeat(["medium"], 3)]
b = numpy.r_[
numpy.repeat(["yes"], 8),
numpy.repeat(["no"], 23)]
rnd = numpy.random.RandomState(0)
c = rnd.randn(len(a))
df = pandas.DataFrame.from_dict(
OrderedDict([
("a_category", a),
("a_binary", b),
("a_number", c.copy())])
)
actual_df = column.encode_categorical(df)
eb = numpy.r_[
numpy.repeat([1.], 8),
numpy.repeat([0.], 23)]
a_tiny = numpy.zeros(31, dtype=float)
a_tiny[15:28] = 1
a_small = numpy.zeros(31, dtype=float)
a_small[10:15] = 1
a_medium = numpy.zeros(31, dtype=float)
a_medium[-3:] = 1
expected_df = pandas.DataFrame.from_dict(
OrderedDict([
("a_category=medium", a_medium),
("a_category=small", a_small),
("a_category=tiny", a_tiny),
("a_binary=yes", eb),
("a_number", c.copy())])
)
assert actual_df.shape == expected_df.shape
tm.assert_frame_equal(actual_df, expected_df, check_exact=True)
@staticmethod
def test_duplicate_index():
a = numpy.r_[
numpy.repeat(["large"], 10),
numpy.repeat(["small"], 6),
numpy.repeat(["tiny"], 13),
numpy.repeat(["medium"], 3)]
rnd = numpy.random.RandomState(0)
c = rnd.randn(len(a))
index = numpy.ceil(numpy.arange(0, len(a) // 2, 0.5))
df = pandas.DataFrame.from_dict(OrderedDict([
("a_category", pandas.Series(a, index=index)),
("a_number", pandas.Series(c, index=index, copy=True))
]))
actual_df = column.encode_categorical(df)
expected_df = pandas.DataFrame(numpy.zeros((32, 3), dtype=float),
index=index,
columns=["a_ |
slundberg/shap | shap/explainers/other/_maple.py | Python | mit | 11,403 | 0.007717 | from .._explainer import Explainer
import numpy as np
from sklearn.model_selection import train_test_split
class Maple(Explainer):
""" Simply wraps MAPLE into the common SHAP interface.
Parameters
----------
model : function
User supplied function that takes a matrix of samples (# samples x # features) and
computes a the output of the model for those samples. The output can be a vector
(# samples) or a matrix (# samples x # model outputs).
data : numpy.array
The background dataset.
"""
def __init__(self, model, data):
self.model = model
if str(type(data)).endswith("pandas.core.frame.DataFrame'>"):
data = data.values
self.data = data
self.data_mean = self.data.mean(0)
out = self.model(data)
if len(out.shape) == 1:
self.out_dim = 1
self.flat_out = True
else:
self.out_dim = out.shape[1]
self.flat_out = False
X_train, X_valid, y_train, y_valid = train_test_split(data, out, test_size=0.2, random_state=0)
self.explainer = MAPLE(X_train, y_train, X_valid, y_valid)
def attributions(self, X, multiply_by_input=False):
""" Compute the MAPLE coef attributions.
Parameters
----------
multiply_by_input : bool
If true, this multiplies the learned coeffients by the mean-centered input. This makes these
values roughly comparable to SHAP values.
"""
if str(type(X)).endswith("pandas.core.frame.DataFrame'>"):
X = X.values
out = [np.zeros(X.shape) for j in range(self.out_dim)]
for i in range(X.shape[0]):
exp = self.explainer.explain(X[i])["coefs"]
out[0][i,:] = exp[1:]
if multiply_by_input:
out[0][i,:] = out[0][i,:] * (X[i] - self.data_mean)
return out[0] if self.flat_out else out
class TreeMaple(Explainer):
""" Simply tree MAPLE into the common SHAP interface.
Parameters
----------
model : function
User supplied function that takes a matrix of samples (# samples x # features) and
computes a the output of the model for those samples. The output can be a vector
(# samples) or a matrix (# samples x # model outputs).
data : numpy.array
The background dataset.
"""
def __init__(self, model, data):
self.model = model
if str(type(model)).endswith("sklearn.ensemble.gradient_boosting.GradientBoostingRegressor'>"):
fe_type = "gbdt"
# elif str(type(model)).endswith("sklearn.tree.tree.DecisionTreeClassifier'>"):
# pass
elif str(type(model)).endswith("sklearn.ensemble.forest.RandomForestRegressor'>"):
fe_type = "rf"
# elif str(type(model)).endswith("sklearn.ensemble.forest.RandomForestClassifier'>"):
# pass
# elif str(type(model)).endswith("xgboost.sklearn.XGBRegressor'>"):
# pass
# elif str(type(model)).endswith("xgboost.sklearn.XGBClassifier'>"):
# pass
else:
raise Exception("The passed model is not yet supported by TreeMapleExplainer: " + str(type(model)))
if str(type(data)).endswith("pandas.core.frame.DataFrame'>"):
data = data.values
self.data = data
self.data_mean = self.data.mean(0)
out = self.model.predict(data[0:1])
if len(out.shape) == 1:
self.out_dim = 1
self.flat_out = True
else:
self.out_dim = self.model.predict(data[0:1]).shape[1]
self.flat_out = False
#_, X_valid, _, y_valid = train_test_split(data, self.model.predict(data), test_size=0.2, random_state=0)
preds = self.model.predict(data)
self.explainer = MAPLE(data, preds, data, preds, fe=self.model, fe_type=fe_type)
def attributions(self, X, multiply_by_input=False):
""" Compute the MAPLE coef attributions.
Parameters
----------
multiply_by_input : bool
If true, this multiplies the learned coeffients by the mean-centered input. This makes these
values roughly comparable to SHAP values.
"""
if str(type(X)).endswith("pandas.core.frame.DataFrame'>"):
X = X.values
out = [np.zeros(X.shape) for j in range(self.out_dim)]
for i in range(X.shape[0]):
exp = self.explainer.explain(X[i])["coefs"]
out[0][i,:] = exp[1:]
if multiply_by_input:
out[0][i,:] = out[0][i,:] * (X[i] - self.data_mean)
return out[0] if self.flat_out else out
#################################################
# The code below was authored by Gregory Plumb and is
# from: https://github.com/GDPlumb/MAPLE/blob/master/Code/MAPLE.py
# It has by copied here to allow for benchmark comparisions. Please see
# the original repo for the latest version, supporting material, and citations.
#################################################
# Notes:
# - Assumes any required data normalization has already been done
# - Can pass Y (desired response) instead of MR (model fit to Y) to make fitting MAPLE to datasets easy
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.lin | ear_model import Ridge
from sklearn.metrics import mean_squared_error
import numpy as np
class MAPLE:
def | __init__(self, X_train, MR_train, X_val, MR_val, fe_type = "rf", fe=None, n_estimators = 200, max_features = 0.5, min_samples_leaf = 10, regularization = 0.001):
# Features and the target model response
self.X_train = X_train
self.MR_train = MR_train
self.X_val = X_val
self.MR_val = MR_val
# Forest Ensemble Parameters
self.n_estimators = n_estimators
self.max_features = max_features
self.min_samples_leaf = min_samples_leaf
# Local Linear Model Parameters
self.regularization = regularization
# Data parameters
num_features = X_train.shape[1]
self.num_features = num_features
num_train = X_train.shape[0]
self.num_train = num_train
num_val = X_val.shape[0]
# Fit a Forest Ensemble to the model response
if fe is None:
if fe_type == "rf":
fe = RandomForestRegressor(n_estimators = n_estimators, min_samples_leaf = min_samples_leaf, max_features = max_features)
elif fe_type == "gbrt":
fe = GradientBoostingRegressor(n_estimators = n_estimators, min_samples_leaf = min_samples_leaf, max_features = max_features, max_depth = None)
else:
print("Unknown FE type ", fe)
import sys
sys.exit(0)
fe.fit(X_train, MR_train)
else:
self.n_estimators = n_estimators = len(fe.estimators_)
self.fe = fe
train_leaf_ids = fe.apply(X_train)
self.train_leaf_ids = train_leaf_ids
val_leaf_ids_list = fe.apply(X_val)
# Compute the feature importances: Non-normalized @ Root
scores = np.zeros(num_features)
if fe_type == "rf":
for i in range(n_estimators):
splits = fe[i].tree_.feature #-2 indicates leaf, index 0 is root
if splits[0] != -2:
scores[splits[0]] += fe[i].tree_.impurity[0] #impurity reduction not normalized per tree
elif fe_type == "gbrt":
for i in range(n_estimators):
splits = fe[i, 0].tree_.feature #-2 indicates leaf, index 0 is root
if splits[0] != -2:
scores[splits[0]] += fe[i, 0].tree_.impurity[0] #impurity reduction not normalized per tree
self.feature_scores = scores
mostImpFeats = np.argsort(-scores)
# Find the number of features to use for MAPLE
retain_best = 0
rmse_best = np.inf
for retain in range(1, num_features + 1):
# Drop less important features for local regression
X_train_p = np.delete(X_train, mostImpFeats[retain:], |
redhat-cip/python-dciclient | dciclient/v1/shell_commands/component.py | Python | apache-2.0 | 2,918 | 0.001028 | # -*- encoding: utf-8 -*-
#
# Copyright 2015-2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dciclient.v1.utils import active_string
from dciclient.v1.utils import validate_json
from dciclient.v1.api import component
from dciclient.v1.api import topic
def list(context, args):
params = {k: getattr(args, k) for k in ["id", "sort", "limit", "offset", "where"]}
return topic.list_components(context, **params)
def create(context, args):
params = {
k: getattr(args, k)
for k in [
"name",
"type",
"canonical_project_name",
"title",
"message",
"url",
"team_id",
"topic_id",
"state",
"data",
"tags",
"released_at"
]
}
params["data"] = validate_json(context, "data", params["data"])
params["state"] = active_string(params["state"])
return component.create(context, **params)
def delete(context, args):
return component.delete(context, args.id, args.etag)
def show(context, args):
return component.get(context, args.id)
def file_upload(context, args):
return component.file_upload(context, id=args.id, file_path=args.path)
def file_show(context, args):
return component.file_get(context, id=args.id, file_id=args.file_id)
def file_download(context, args):
params = {k: getattr(a | rgs, k) for k in ["id", "file_id", "target"]}
return component.file_download(context, **params)
def file_list(context, args):
params = {k: getattr(args, k) for k in ["id", "sort", "limit", "offset", "where"]}
return component.file_l | ist(context, **params)
def file_delete(context, args):
component.file_delete(context, id=args.id, file_id=args.file_id, etag=args.etag)
def update(context, args):
params = {
k: getattr(args, k)
for k in [
"name",
"type",
"canonical_project_name",
"title",
"message",
"url",
"state",
"data",
"tags",
]
}
component_info = component.get(context, args.id)
params["etag"] = component_info.json()["component"]["etag"]
params["data"] = validate_json(context, "data", params["data"])
params["state"] = active_string(params["state"])
return component.update(context, id=args.id, **params)
|
overtherain/scriptfile | software/googleAppEngine/lib/PyAMF/doc/tutorials/examples/actionscript/ohloh/python/client.py | Python | mit | 1,319 | 0.000758 | #!/usr/bin/python
#
# Copyright (c) The PyAMF Project.
# See LICENSE for details.
"""
This is an example of using the Ohloh API from a Python client.
Detailed information can be found at the Ohloh website:
http://www.ohloh.net/api
This example uses the ElementTree library for XML parsing
(included in Python 2.5 and newer):
http://effbot.org/zone/element-index.htm
This example retrieves basic Ohloh account information
and outputs it as simple name: value pairs.
Pass your Ohloh API key as the first parameter to this script.
Ohloh API keys are free. If you do not have one, you can obtain
one at the Ohloh website:
http://www.ohloh.net/api_keys/new
Pass the email address of the account as the second parameter
to this script.
"""
import sys
import ohloh
if len(sys.argv) == 3:
ap | i_key = sys.argv[1]
email = sys.argv[2]
else:
print "Usage: client.py <api-key> <email-address>"
sys.exit()
elem = ohloh.getAccount(email, api_key)
# Output all the immediate child properties of an Account
for node in elem.find("result/account"):
if node.tag == "kudo_score":
print "%s:" % node.tag
for score in elem.find("result/account/kudo_score"):
print "\t%s:\t%s" % (score.tag, score.text)
else:
| print "%s:\t%s" % (node.tag, node.text)
|
estaban/pyload | module/plugins/hooks/UnSkipOnFail.py | Python | gpl-3.0 | 4,031 | 0.000248 | # -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
from os.path import basename
from module.utils import fs_encode
from module.plugins.Hook import Hook
from module.PyFile import PyFile
class UnSkipOnFail(Hook):
__name__ = "UnSkipOnFail"
__version__ = "0.01"
__type__ = "hook"
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """When a download fails, restart skipped duplicates"""
__author_name__ = "hagg"
__author_mail__ = None
def downloadFailed(self, pyfile):
pyfile_name = basename(pyfile.name)
pid = pyfile.package().id
msg = 'look for skipped duplicates for %s (pid:%s)...'
self.logInfo(msg % (pyfile_name, pid))
dups = self.findDuplicates(pyfile)
for link in dups:
# check if link is "skipped"(=4)
if link.status == 4:
lpid = link.packageID
self.logInfo('restart "%s" (pid:%s)...' % (pyfile_name, lpid))
self.setLinkStatus(link, "queued")
def findDuplicates(self, pyfile):
""" Search all packages for duplicate links to "pyfile".
Duplicates are links that would overwrite "pyfile".
To test on duplicity the package-folder and link-name
of twolinks are compared (basename(link.name)).
So this method returns a list of all links with equal
package-folders and filenames as "pyfile", but except
the data for "pyfile" iotselöf.
It does MOT check the link's status.
"""
dups = []
pyfile_name = fs_encode(basename(pyfile.name))
# get packages (w/o files, as most file data is useless here)
queue = self.core.api.getQueue()
for package in queue:
# check if package-folder equals pyfile's package folder
if fs_encode(package.folder) == fs_encode(pyfile.package().folder):
# now get packaged data w/ files/links
pdata = self.core.api.getPackageData(package.pid)
if pdata.links:
for link in pdata.links:
link_name = fs_encode(basename(link.name))
# check if link name collides with pdata's name
if link_name == pyfile_name:
# at last check if it is not pyfile itself
if link.fid != pyfile.id:
dups.append(link)
return dups
def setLinkStatus(self, link, new_status):
""" Change status of "link" to "new_status".
"link" has to be a valid FileData object,
"new_status" has to be a valid sta | tus name
(i.e. "queued" for this Plugin)
It creates a temporary PyFile object using
"link" data, changes its status, and tells
the core.files-manager to save its data.
"""
pyfile = PyFile(self.core.files,
link.fid,
link.url,
link.name,
link.si | ze,
link.status,
link.error,
link.plugin,
link.packageID,
link.order)
pyfile.setStatus(new_status)
self.core.files.save()
pyfile.release()
|
MSEMJEJME/Get-Dumped | renpy/text/text.py | Python | gpl-2.0 | 43,754 | 0.010011 | # Copyright 2004-2012 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Sof | tware,
# and to | permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import math
import renpy.display
from renpy.text.textsupport import \
TAG, TEXT, PARAGRAPH, DISPLAYABLE
import renpy.text.textsupport as textsupport
import renpy.text.texwrap as texwrap
import renpy.text.font as font
import renpy.text.extras as extras
try:
from _renpybidi import log2vis, WRTL, RTL, ON
except:
pass
class Blit(object):
"""
Represents a blit command, which can be used to render a texture to a
render. This is a rectangle with an associated alpha.
"""
def __init__(self, x, y, w, h, alpha=1.0):
self.x = x
self.y = y
self.w = w
self.h = h
self.alpha = alpha
def __repr__(self):
return "<Blit ({0}, {1}, {2}, {3}) {4}>".format(self.x, self.y, self.w, self.h, self.alpha)
def outline_blits(blits, outline):
"""
Given a list of blits, adjusts it for the given outline size. That means
adding borders on the left and right of each line of blits. Returns a second
list of blit objects.
We assume that there are a discrete set of vertical areas that divide the
original blits, and that no blit covers two vertical areas. So something
like:
_____________________________________
|_____________________________________|
|___________|_________________|_______|
|_____________________|_______________|
is fine, but:
_____________________________________
| |_____________________|
|______________|_____________________|
is forbidden. That's an invariant that the blit_<method> functions are
required to enforce.
"""
# Sort the blits.
blits.sort(key=lambda b : (b.y, b.x))
# The y coordinate that everything in the current line shares. This can
# be adjusted in the output blits.
line_y = 0
# The y coordinate of the top of the current line.
top_y = 0
# The y coordinate of the bottom of the current line.
bottom_y = 0
# The maximum x coordinate of the previous blit on this line.
max_x = 0
rv = [ ]
for b in blits:
x0 = b.x
x1 = b.x + b.w + outline * 2
y0 = b.y
y1 = b.y + b.h + outline * 2
# Prevents some visual artifacting, where the two lines can overlap.
y1 -= 1
if line_y != y0:
line_y = y0
top_y = bottom_y
max_x = 0
y0 = top_y
if y1 > bottom_y:
bottom_y = y1
if max_x > x0:
x0 = max_x
max_x = x1
rv.append(Blit(x0, y0, x1 - x0, y1 - y0, b.alpha))
return rv
class DrawInfo(object):
"""
This object is supplied as a parameter to the draw method of the various
segments. It has the following fields:
`surface`
The surface to draw to.
`override_color`
If not None, a color that's used for this outline/shadow.
`outline`
The amount to outline the text by.
`displayable_blits`
If not none, this is a list of (displayable, xo, yo) tuples. The draw
method adds displayable blits to this list when this is not None.
"""
# No implementation, this is set up in the layout object.
class TextSegment(object):
"""
This represents a segment of text that has a single set of properties
applied to it.
"""
def __init__(self, source=None):
"""
Creates a new segment of text. If `source` is given, this starts off
a copy of that source segment. Otherwise, it's up to the code that
creates it to initialize it with defaults.
"""
if source is not None:
self.antialias = source.antialias
self.font = source.font
self.size = source.size
self.bold = source.bold
self.italic = source.italic
self.underline = source.underline
self.strikethrough = source.strikethrough
self.color = source.color
self.black_color = source.black_color
self.hyperlink = source.hyperlink
self.kerning = source.kerning
self.cps = source.cps
self.ruby_top = source.ruby_top
self.ruby_bottom = source.ruby_bottom
else:
self.hyperlink = 0
self.cps = 0
self.ruby_top = False
self.ruby_bottom = False
def __repr__(self):
return "<TextSegment font={font}, size={size}, bold={bold}, italic={italic}, underline={underline}, color={color}, black_color={black_color}, hyperlink={hyperlink}>".format(**self.__dict__)
def take_style(self, style):
"""
Takes the style of this text segement from the named style object.
"""
self.antialias = style.antialias
self.font = style.font
self.size = style.size
self.bold = style.bold
self.italic = style.italic
self.underline = style.underline
self.strikethrough = style.strikethrough
self.color = style.color
self.black_color = style.black_color
self.hyperlink = None
self.kerning = style.kerning
if style.slow_cps is True:
self.cps = renpy.game.preferences.text_cps
self.cps = self.cps * style.slow_cps_multiplier
# From here down is the public glyph API.
def glyphs(self, s):
"""
Return the list of glyphs corresponding to unicode string s.
"""
fo = font.get_font(self.font, self.size, self.bold, self.italic, 0, self.antialias)
rv = fo.glyphs(s)
# Apply kerning to the glyphs.
if self.kerning:
textsupport.kerning(rv, self.kerning)
if self.hyperlink:
for g in rv:
g.hyperlink = self.hyperlink
if self.ruby_bottom:
textsupport.mark_ruby_bottom(rv)
elif self.ruby_top:
textsupport.mark_ruby_top(rv)
return rv
def draw(self, glyphs, di):
"""
Draws the glyphs to surf.
"""
if di.override_color:
color = di.override_color
black_color = None
else:
color = self.color
black_color = self.black_color
fo = font.get_font(self.font, self.size, self.bold, self.italic, di.outline, self.antialias)
fo.draw(di.surface, 0, 0, color, glyphs, self.underline, self.strikethrough, black_color)
def assign_times(self, gt, glyphs):
"""
Assigns times to the glyphs. `gt` is the starting time of the first
glyph, and it returns the starting time of the first glyph in the next
segment.
"""
return textsupport.assign_times(gt, self.cps, glyphs)
class SpaceSegment(object):
"""
A segment that's used to render ho |
AsimmHirani/ISpyPi | tensorflow/contrib/tensorflow-master/tensorflow/contrib/linalg/python/kernel_tests/linear_operator_udvh_update_test.py | Python | apache-2.0 | 12,056 | 0.007963 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib import linalg as linalg_lib
from tensorflow.contrib.linalg.python.ops import linear_operator_test_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
linalg = linalg_lib
random_seed.set_random_seed(23)
rng = np.random.RandomState(0)
class BaseLinearOperatorUDVHUpdatetest(object):
"""Base test for this type of operator."""
# Subclasses should set these attributes to either True or False.
# If True, A = L + UDV^H
# If False, A = L + UV^H or A = L + UU^H, depending on _use_v.
_use_diag_perturbation = None
# If True, diag is > 0, which means D is symmetric positive definite.
_is_diag_positive = None
# If True, A = L + UDV^H
# If False, A = L + UDU^H or A = L + UU^H, depending on _use_diag_perturbation
_use_v = None
@property
def _dtypes_to_test(self):
# TODO(langmore) Test complex types once cholesky works with them.
# See comment in LinearOperatorUDVHUpdate.__init__.
return [dtypes.float32, dtypes.float64]
@property
def _shapes_to_test(self):
# Add the (2, 10, 10) shape at the end to get something slightly larger than
# the other tests. Doing this because | this operator makes use of inversion
# and determinant lemmas that are known to have stability issues.
return [(0, 0), (1, 1), (1, 3, 3), (3, 4, 4), (2, 1, 4, 4), (2, 10, 10)]
def _operator_and_mat_and_feed_dict(self, shape, dtype, use_placeholder):
# Recall A = L + UDV^H
shape = list(shape)
| diag_shape = shape[:-1]
k = shape[-2] // 2 + 1
u_perturbation_shape = shape[:-1] + [k]
diag_perturbation_shape = shape[:-2] + [k]
# base_operator L will be a symmetric positive definite diagonal linear
# operator, with condition number as high as 1e4.
base_diag = linear_operator_test_util.random_uniform(
diag_shape, minval=1e-4, maxval=1., dtype=dtype)
base_diag_ph = array_ops.placeholder(dtype=dtype)
# U
u = linear_operator_test_util.random_normal_correlated_columns(
u_perturbation_shape, dtype=dtype)
u_ph = array_ops.placeholder(dtype=dtype)
# V
v = linear_operator_test_util.random_normal_correlated_columns(
u_perturbation_shape, dtype=dtype)
v_ph = array_ops.placeholder(dtype=dtype)
# D
if self._is_diag_positive:
diag_perturbation = linear_operator_test_util.random_uniform(
diag_perturbation_shape, minval=1e-4, maxval=1., dtype=dtype)
else:
diag_perturbation = linear_operator_test_util.random_normal(
diag_perturbation_shape, stddev=1e-4, dtype=dtype)
diag_perturbation_ph = array_ops.placeholder(dtype=dtype)
if use_placeholder:
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
base_diag = base_diag.eval()
u = u.eval()
v = v.eval()
diag_perturbation = diag_perturbation.eval()
# In all cases, set base_operator to be positive definite.
base_operator = linalg.LinearOperatorDiag(
base_diag_ph, is_positive_definite=True)
operator = linalg.LinearOperatorUDVHUpdate(
base_operator,
u=u_ph,
v=v_ph if self._use_v else None,
diag=diag_perturbation_ph if self._use_diag_perturbation else None,
is_diag_positive=self._is_diag_positive)
feed_dict = {
base_diag_ph: base_diag,
u_ph: u,
v_ph: v,
diag_perturbation_ph: diag_perturbation}
else:
base_operator = linalg.LinearOperatorDiag(
base_diag, is_positive_definite=True)
operator = linalg.LinearOperatorUDVHUpdate(
base_operator,
u,
v=v if self._use_v else None,
diag=diag_perturbation if self._use_diag_perturbation else None,
is_diag_positive=self._is_diag_positive)
feed_dict = None
# The matrix representing L
base_diag_mat = array_ops.matrix_diag(base_diag)
# The matrix representing D
diag_perturbation_mat = array_ops.matrix_diag(diag_perturbation)
# Set up mat as some variant of A = L + UDV^H
if self._use_v and self._use_diag_perturbation:
# In this case, we have L + UDV^H and it isn't symmetric.
expect_use_cholesky = False
mat = base_diag_mat + math_ops.matmul(
u, math_ops.matmul(diag_perturbation_mat, v, adjoint_b=True))
elif self._use_v:
# In this case, we have L + UDV^H and it isn't symmetric.
expect_use_cholesky = False
mat = base_diag_mat + math_ops.matmul(u, v, adjoint_b=True)
elif self._use_diag_perturbation:
# In this case, we have L + UDU^H, which is PD if D > 0, since L > 0.
expect_use_cholesky = self._is_diag_positive
mat = base_diag_mat + math_ops.matmul(
u, math_ops.matmul(diag_perturbation_mat, u, adjoint_b=True))
else:
# In this case, we have L + UU^H, which is PD since L > 0.
expect_use_cholesky = True
mat = base_diag_mat + math_ops.matmul(u, u, adjoint_b=True)
if expect_use_cholesky:
self.assertTrue(operator._use_cholesky)
else:
self.assertFalse(operator._use_cholesky)
return operator, mat, feed_dict
class LinearOperatorUDVHUpdatetestWithDiagUseCholesky(
BaseLinearOperatorUDVHUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UDU^H, D > 0, L > 0 ==> A > 0 and we can use a Cholesky."""
_use_diag_perturbation = True
_is_diag_positive = True
_use_v = False
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
class LinearOperatorUDVHUpdatetestWithDiagCannotUseCholesky(
BaseLinearOperatorUDVHUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UDU^H, D !> 0, L > 0 ==> A !> 0 and we cannot use a Cholesky."""
_use_diag_perturbation = True
_is_diag_positive = False
_use_v = False
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4. This class does not use Cholesky, and thus needs even looser
# tolerance.
self._atol[dtypes.float32] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._atol[dtypes.float64] = 1e-9
self._rtol[dtypes.float64] = 1e-9
class LinearOperatorUDVHUpdatetestNoDiagUseCholesky(
BaseLinearOperatorUDVHUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UU^H, L > 0 ==> A > 0 and we can use a Cholesky."""
_use_diag_perturbation = False
_is_diag_positive = None
_use_v = False
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
class LinearOperatorUDVHUpdatetestNoDiagCannotUseCholesky(
BaseLinearOperatorUDVHUpdatetest,
linear_operator_test_util.SquareLinearOperatorD |
tjm-1990/blueking | conf/default.py | Python | gpl-3.0 | 11,273 | 0.000861 | # -*- coding: utf-8 -*-
"""
Django settings for app-framework project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
import sys
# Import global settings to make it easier to extend settings.
from django.conf.global_settings import *
# ==============================================================================
# 应用基本信息配置 (请按照说明修改)
# === | ===========================================================================
# 在蓝鲸智云开发者中心 -> 点击应用ID -> 基本信息 中获取 APP_ID 和 APP_TOKEN 的值
APP_ID = 'hello-world'
APP_TOKEN = 'c52de43b-ef43-49b2-8268-b53c5 | 271750a'
# 蓝鲸智云开发者中心的域名,形如:http://paas.bking.com
BK_PAAS_HOST = 'http://paas.bking.com'
# 是否启用celery任务
IS_USE_CELERY = True
# 本地开发的 celery 的消息队列(RabbitMQ)信息
BROKER_URL_DEV = 'amqp://guest:guest@127.0.0.1:5672/'
# TOCHANGE 调用celery任务的文件路径, List of modules to import when celery starts.
CELERY_IMPORTS = (
'home_application.celery_tasks',
)
# ==============================================================================
# 应用运行环境配置信息
# ==============================================================================
ENVIRONMENT = os.environ.get('BK_ENV', 'development')
# 应用基本信息从环境变量中获取,未设置环境变量(如:本地开发)时,则用用户在文件开头的填写的值
APP_ID = os.environ.get('APP_ID', APP_ID)
APP_TOKEN = os.environ.get('APP_TOKEN', APP_TOKEN)
BK_PAAS_HOST = os.environ.get('BK_PAAS_HOST', BK_PAAS_HOST)
# 应用访问路径
SITE_URL = '/'
# 运行模式, DEVELOP(开发模式), TEST(测试模式), PRODUCT(正式模式)
RUN_MODE = 'DEVELOP'
if ENVIRONMENT.endswith('production'):
RUN_MODE = 'PRODUCT'
DEBUG = False
SITE_URL = '/o/%s/' % APP_ID
elif ENVIRONMENT.endswith('testing'):
RUN_MODE = 'TEST'
DEBUG = False
SITE_URL = '/t/%s/' % APP_ID
else:
RUN_MODE = 'DEVELOP'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
try:
import pymysql
pymysql.install_as_MySQLdb()
except:
pass
# ===============================================================================
# 应用基本信息
# ===============================================================================
# 应用密钥
SECRET_KEY = 'MQtd_0cw&AiY5jT&&#w7%9sCK=HW$O_e%ch4xDd*AaP(xU0s3X'
# CSRF的COOKIE域,默认使用当前域
# CSRF_COOKIE_DOMAIN =''
CSRF_COOKIE_PATH = SITE_URL
ALLOWED_HOSTS = ['*']
# ==============================================================================
# Middleware and apps
# ==============================================================================
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'account.middlewares.LoginMiddleware', # 登录鉴权中间件
'common.middlewares.CheckXssMiddleware', # Xss攻击处理中间件
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# OTHER 3rd Party App
'app_control',
'account',
'home_application',
)
# ==============================================================================
# Django 项目配置
# ==============================================================================
TIME_ZONE = 'Asia/Shanghai'
LANGUAGE_CODE = 'zh-CN'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
# 项目路径
PROJECT_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_ROOT, PROJECT_MODULE_NAME = os.path.split(PROJECT_PATH)
BASE_DIR = os.path.dirname(os.path.dirname(PROJECT_PATH))
PYTHON_BIN = os.path.dirname(sys.executable)
# ===============================================================================
# 静态资源设置
# ===============================================================================
# 静态资源文件(js,css等)在应用上线更新后, 由于浏览器有缓存, 可能会造成没更新的情况.
# 所以在引用静态资源的地方,都需要加上这个版本号,如:<script src="/a.js?v=${STATIC_VERSION}"></script>;
# 如果静态资源修改了以后,上线前修改这个版本号即可
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
STATIC_VERSION = 0.1
# 应用本地静态资源目录
STATIC_URL = '%sstatic/' % SITE_URL
ROOT_URLCONF = 'urls'
# ==============================================================================
# Templates
# ==============================================================================
# mako template dir
MAKO_TEMPLATE_DIR = os.path.join(PROJECT_ROOT, 'templates')
MAKO_TEMPLATE_MODULE_DIR = os.path.join(BASE_DIR, 'templates_module', APP_ID)
if RUN_MODE not in ['DEVELOP']:
MAKO_TEMPLATE_MODULE_DIR = os.path.join(PROJECT_ROOT, 'templates_module', APP_ID)
# Django TEMPLATES配置
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_ROOT, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# the context to the templates
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.request',
'django.template.context_processors.csrf',
'common.context_processors.mysetting', # 自定义模版context,可在页面中使用STATIC_URL等变量
'django.template.context_processors.i18n',
],
},
},
]
# ==============================================================================
# session and cache
# ==============================================================================
SESSION_EXPIRE_AT_BROWSER_CLOSE = True # 默认为false,为true时SESSION_COOKIE_AGE无效
SESSION_COOKIE_PATH = SITE_URL # NOTE 不要改动,否则,可能会改成和其他app的一样,这样会影响登录
# ===============================================================================
# Authentication
# ===============================================================================
AUTH_USER_MODEL = 'account.BkUser'
AUTHENTICATION_BACKENDS = ('account.backends.BkBackend', 'django.contrib.auth.backends.ModelBackend')
LOGIN_URL = "%s/login/?app_id=%s" % (BK_PAAS_HOST, APP_ID)
LOGOUT_URL = '%saccount/logout/' % SITE_URL
LOGIN_REDIRECT_URL = SITE_URL
REDIRECT_FIELD_NAME = "c_url"
# 验证登录的cookie名
BK_COOKIE_NAME = 'bk_token'
# 数据库初始化 管理员列表
ADMIN_USERNAME_LIST = ['admin']
# ===============================================================================
# CELERY 配置
# ===============================================================================
if IS_USE_CELERY:
try:
import djcelery
INSTALLED_APPS += (
'djcelery', # djcelery
)
djcelery.setup_loader()
CELERY_ENABLE_UTC = False
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
if "celery" in sys.argv:
DEBUG = False
# celery 的消息队列(RabbitMQ)信息
BROKER_URL = os.environ.get('BK_BROKER_URL', BROKER_URL_DEV)
if RUN_MODE == 'DEVELOP':
from celery.signals import worker_process_init
@worker_process_init.connect
def configure_workers(*args, **kwargs):
import django
django.setup()
except:
pass
# ==============================================================================
# logging
# ==============================================================================
# 应用日志配置
BK_LOG_DIR = os.environ.get('BK_LOG_DIR', '/data/paas/apps/logs/')
LOGGING_DIR = os.path.join(BASE_DIR, 'logs', APP_ID)
LOG_CLASS = 'logging.handlers.RotatingFileHandler'
if RUN_MODE == 'DEVELOP':
LOG_LEVEL = 'DEBUG'
elif RUN_MODE == 'TEST':
LOGGING_DIR = os.path.join(BK_LOG_DIR, APP_ID)
LOG_LEVEL = 'INFO'
elif RUN_MODE == 'PRODUCT':
LOGGING_DIR = os.path.join(BK_LOG_DIR, APP_ID)
LOG_LEVEL = 'ERROR'
# 自动建立日志目录
if not os.path.exists(LOGGING_DIR):
try:
os.makedirs(LOGGING_DIR)
except:
pass
LOGGING = {
'v |
carljm/django-adminfiles | adminfiles/models.py | Python | bsd-3-clause | 3,651 | 0.003013 | import os
import mimetypes
from django.conf import settings as django_settings
from django.db import models
from django.template.defaultfilters import slugify
from django.core.files.images import get_image_dimensions
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from adminfiles import settings
if 'tagging' in django_settings.INSTALLED_APPS:
from tagging.fields import TagField
else:
TagField = None
class FileUpload(models.Model):
upload_date = models.DateTimeField(_('upload date'), auto_now_add=True)
upload = models.FileField(_('file'), upload_to=settings.ADMINFILES_UPLOAD_TO)
title = models.CharField(_('title'), max_length=100)
slug = models.SlugField(_('slug'), max_length=100, unique=True)
description = models.CharField(_('description'), blank=True, max_length=200)
content_type = models.CharField(editable=False, max_length=100)
sub_type = models.CharField(editable=False, max_length=100)
if TagField:
tags = TagField(_('tags'))
class Meta:
ordering = ['upload_date', 'title']
verbose_name = _('file upload')
verbose_name_plural = _('file uploads')
def __unicode__(self):
return self.title
def mime_type(self):
return '%s/%s' % (self.content_type, self.sub_type)
mime_type.short_description = _('mime type')
def type_slug(self):
return slugify(self.sub_type)
def is_image(self):
return self.content_type == 'image'
def _get_dimensions(self):
try:
return self._dimensions_cache
except AttributeError:
if self.is_image():
self._dimensions_cache = get_image_dimensions(self.upload.path)
else:
self._dimensions_cache = (None, None)
return self._dimensions_cache
def width(self):
return self._get_dimensions()[0]
def height(self):
return self._get_dimensions()[1]
def save(self, *args, **kwargs):
try:
uri = self.upload.path
except NotImplementedError:
uri = self.upload.url
(mime_type, encoding) = mimetypes.guess_type(uri)
try:
[self. | content_type, self.sub_type] = mime_type.split('/')
except:
self.content_type = 'text'
sel | f.sub_type = 'plain'
super(FileUpload, self).save()
def insert_links(self):
links = []
for key in [self.mime_type(), self.content_type, '']:
if key in settings.ADMINFILES_INSERT_LINKS:
links = settings.ADMINFILES_INSERT_LINKS[key]
break
for link in links:
ref = self.slug
opts = ':'.join(['%s=%s' % (k,v) for k,v in link[1].items()])
if opts:
ref += ':' + opts
yield {'desc': link[0],
'ref': ref}
def mime_image(self):
if not settings.ADMINFILES_STDICON_SET:
return None
return ('http://www.stdicon.com/%s/%s?size=64'
% (settings.ADMINFILES_STDICON_SET, self.mime_type()))
class FileUploadReference(models.Model):
"""
Tracks which ``FileUpload``s are referenced by which content models.
"""
upload = models.ForeignKey(FileUpload)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
class Meta:
unique_together = ('upload', 'content_type', 'object_id')
|
mayfieldrobotics/ros-webrtc | test/integration/ros_coverage.py | Python | bsd-3-clause | 652 | 0.001534 | import contextlib
import os
import shutil
import sys
import rostest
@contextlib.contextmanager
def ros_coverage():
"""
https://github.com/ros/ros_comm/issues/558
"""
coverage_mode = '--cov' | in sys.argv
if co | verage_mode:
sys.argv.remove('--cov')
src = os.path.join(os.path.dirname(os.path.abspath(__file__)), '.coveragerc')
dst = os.path.join(os.getcwd(), '.coveragerc')
if not os.path.exists(dst):
shutil.copyfile(src, dst)
rostest._start_coverage(['ros_webrtc'])
try:
yield
finally:
if coverage_mode:
rostest._stop_coverage(['ros_webrtc'])
|
mysidewalk/training-django | 01-middling-python/01-slicing/solutions/02-gradebook-solution.py | Python | mit | 2,924 | 0.00342 | class GradeBook(object):
""" Stores and retrieves grades for students. Slicing allows grades to be retrieved by a range
of dates specified as a zero based integer for the day of the year
"""
def __init__(self, name='Unknown Class Gradebook', grades=[]):
""" Set a name for the grade book and the list of grades
"""
self.name = name
self.grades = grades
def __getitem__(self, key):
""" Filter grades by slicing using the start and stop properties or get one by index
"""
if hasattr(key, 'start') and hasattr(key, 'stop'):
# Define an empty list that we can add our results to
grades_between_dates = []
# Check each grade that is a member of this Gradebook
for grade in self.grades:
# Mimic default slice behavior of >= start and < stop
if grade['date'] >= key.start and grade['date'] < key.stop:
# If it falls in the correct range, add it to the list to be returned
grades_between_dates.append(grade)
| else:
self.grades[key] = self.grades[key]
return grades_between_dates
def add_grades(self, new_grades):
""" Add the list of new grades to the | existing list of grades
"""
self.grades = self.grades + new_grades
# Define the list of grades to be used
raw_grades = [
{'student_name': 'Billy', 'date': 14, 'grade': 87},
{'student_name': 'Melissa', 'date': 14, 'grade': 90},
{'student_name': 'Sarah', 'date': 14, 'grade': 83},
{'student_name': 'Billy', 'date': 11, 'grade': 84},
{'student_name': 'Melissa', 'date': 11, 'grade': 92},
{'student_name': 'Sarah', 'date': 11, 'grade': 77},
{'student_name': 'Billy', 'date': 23, 'grade': 89},
{'student_name': 'Melissa', 'date': 23, 'grade': 95},
{'student_name': 'Sarah', 'date': 23, 'grade': 80},
{'student_name': 'Billy', 'date': 5, 'grade': 94},
{'student_name': 'Melissa', 'date': 5, 'grade': 90},
{'student_name': 'Sarah', 'date': 5, 'grade': 85},
]
# Create an instance of GradeBook for our math class using the grades defined above
math_class = GradeBook(name='Math GradeBook', grades=raw_grades)
# Request grades that have a "date" that falls between 1 and 9, including 1, excluding 9
print 'Request the grades from the first to the twelfth of the year:'
print math_class[0:12]
# We can add to the grades using the method we created "add_grades"
more_grades = [
{'student_name': 'Billy', 'date': 30, 'grade': 91},
{'student_name': 'Melissa', 'date': 30, 'grade': 86},
{'student_name': 'Sarah', 'date': 30, 'grade': 85},
]
# Add the new grades from the list above
math_class.add_grades(more_grades)
# Check that the slice has them included
print 'Request teh new grades by selecting grades from the twenty-first to the thirtieth'
print math_class[20:31] |
lob/lob-python | lob/__init__.py | Python | mit | 410 | 0 | # Resources
from lob.resource import (
Address,
BankAccount,
BillingGroup,
BulkUSVerification,
BulkIntlVerification,
Ca | rd,
CardOrder,
Check,
Letter,
Postcard,
SelfMailer,
USAutocompletion,
USReverseGeocodeLookup,
USVerification,
USZipLookup,
IntlVerification
)
from lob.version import VERSION
api_key = None
api_ | base = 'https://api.lob.com/v1'
|
idjaw/keystone | keystone/tests/unit/test_v3.py | Python | apache-2.0 | 51,696 | 0 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from testtools import matchers
from keystone import auth
from keystone.common import authorization
from keystone.common import cache
from keystone import exception
from keystone import middleware
from keystone.policy.backends import rules
from keystone.tests import unit
from keystone.tests.unit import rest
CONF = cfg.CONF
DEFAULT_DOMAIN_ID = 'default'
TIME_FORMAT = unit.TIME_FORMAT
class AuthTestMixin(object):
"""To hold auth building helper functions."""
def build_auth_scope(self, project_id=None, project_name=None,
project_domain_id=None, project_domain_name=None,
domain_id=None, domain_name=None, trust_id=None,
unscoped=None):
scope_data = {}
if unscoped:
scope_data['unscoped'] = {}
if project_id or project_name:
scope_data['project'] = {}
if project_id:
scope_data['project']['id'] = project_id
else:
scope_data['project']['name'] = project_name
if project_domain_id or project_domain_name:
project_domain_json = {}
if project_domain_id:
project_domain_json['id'] = project_domain_id
else:
project_domain_json['name'] = project_domain_name
scope_data['project']['domain'] = project_domain_json
if domain_id or domain_name:
scope_data['domain'] = {}
if domain_id:
scope_data['domain']['id'] = domain_id
else:
scope_data['domain']['name'] = domain_name
if trust_id:
scope_data['OS-TRUST:trust'] = {}
scope_data['OS-TRUST:trust']['id'] = trust_id
return scope_data
def build_password_auth(self, user_id=None, username=None,
user_domain_id=None, user_domain_name=None,
password=None):
password_data = {'user': {}}
if user_id:
password_data['user']['id'] = user_id
else:
password_data['user']['name'] = username
if user_domain_id or user_domain_name:
password_data['user']['domain'] = {}
if user_domain_id:
password_data['user']['domain']['id'] = user_domain_id
else:
password_data['user']['domain']['name'] = user_domain_name
password_data['user']['password'] = password
return password_data
def build_token_auth(self, token):
return {'id': token}
def build_authentication_request(self, token=None, user_id=None,
username=None, user_domain_id=None,
user_domain_name=None, password=None,
kerberos=False, **kwargs):
"""Build auth dictionary.
It will create an auth dictionary based on all the arguments
that it receives.
"""
auth_data = {}
auth_data['identity'] = {'methods': []}
if kerberos:
auth_data['identity']['methods'].append('kerberos')
auth_data['identity']['kerberos'] = {}
if token:
auth_data['identity']['methods'].append('token')
auth_data['identity']['token'] = self.build_token_auth(token)
if user_id or username:
auth_data['identity']['methods'].append('password')
auth_data['identity']['password'] = self.build_password_auth(
user_id, username, user_domain_id, user_domain_name, password)
if kwargs:
auth_data['scope'] = self.build_auth_scope(**kwargs)
return {'auth': auth_data}
class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase,
AuthTestMixin):
def config_files(self):
config_files = super(RestfulTestCase, self).config_files()
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
return config_files
def get_extensions(self):
extensions = set(['revoke'])
if hasattr(self, 'EXTENSION_NAME'):
extensions.add(self.EXTENSION_NAME)
return extensions
def generate_paste_config(self):
new_paste_file = None
try:
new_paste_file = unit.generate_paste_config(self.EXTENSION_TO_ADD)
except AttributeError:
# no need to report this error here, as most tests will not have
# EXTENSION_TO_ADD defined.
pass
finally:
return new_paste_file
| def remove_generated_paste_config(self):
try:
unit.remov | e_generated_paste_config(self.EXTENSION_TO_ADD)
except AttributeError:
pass
def setUp(self, app_conf='keystone'):
"""Setup for v3 Restful Test Cases.
"""
new_paste_file = self.generate_paste_config()
self.addCleanup(self.remove_generated_paste_config)
if new_paste_file:
app_conf = 'config:%s' % (new_paste_file)
super(RestfulTestCase, self).setUp(app_conf=app_conf)
self.empty_context = {'environment': {}}
# Initialize the policy engine and allow us to write to a temp
# file in each test to create the policies
rules.reset()
# drop the policy rules
self.addCleanup(rules.reset)
def load_backends(self):
# ensure the cache region instance is setup
cache.configure_cache_region(cache.REGION)
super(RestfulTestCase, self).load_backends()
def load_fixtures(self, fixtures):
self.load_sample_data()
def _populate_default_domain(self):
if CONF.database.connection == unit.IN_MEM_DB_CONN_STRING:
# NOTE(morganfainberg): If an in-memory db is being used, be sure
# to populate the default domain, this is typically done by
# a migration, but the in-mem db uses model definitions to create
# the schema (no migrations are run).
try:
self.resource_api.get_domain(DEFAULT_DOMAIN_ID)
except exception.DomainNotFound:
domain = {'description': (u'Owns users and tenants (i.e. '
u'projects) available on Identity '
u'API v2.'),
'enabled': True,
'id': DEFAULT_DOMAIN_ID,
'name': u'Default'}
self.resource_api.create_domain(DEFAULT_DOMAIN_ID, domain)
def load_sample_data(self):
self._populate_default_domain()
self.domain_id = uuid.uuid4().hex
self.domain = self.new_domain_ref()
self.domain['id'] = self.domain_id
self.resource_api.create_domain(self.domain_id, self.domain)
self.project_id = uuid.uuid4().hex
self.project = self.new_project_ref(
domain_id=self.domain_id)
self.project['id'] = self.project_id
self.resource_api.create_project(self.project_id, self.project)
self.user = self.new_user_ref(domain_id=self.domain_id)
password = self.user['password']
self.user = self.identity_api.create_user(self.user)
self.user['password'] = password
self.user_id = self.user['id']
self.default_domain_project_id = |
IdiosyncraticDragon/Reading-Notes | Python Parallel Programming Cookbook_Code/Chapter 3/kill_a_process.py | Python | apache-2.0 | 588 | 0.020408 | #kill a Process Chapter 3: Process Based Parallelism
import multiprocessing
import time
def foo():
print ('Starting function')
time.sleep(0.1)
print ('Finished function')
if __name__ == '__main__':
p = multiprocessing.Process(target=foo)
print ('Process before execut | ion:', p, p.is_alive())
p.start()
print ('Process running:', p, p.is_alive())
p.te | rminate()
print ('Process terminated:', p, p.is_alive())
p.join()
print ('Process joined:', p, p.is_alive())
print ('Process exit code:', p.exitcode)
|
spulec/moto | tests/test_core/test_mock_regions.py | Python | apache-2.0 | 2,220 | 0.000901 | import boto3
import mock
import os
import pytest
from moto import mock_dynamodb2, mock_sns, settings
from unittest import SkipTest
@mock_sns
def test_use_invalid_region():
if settings.TEST_SERVER_MODE:
raise SkipTest("ServerMode will throw different errors")
client = boto3.client("sns", region_name="any-region")
with pytest.raises(KeyError) as exc:
client.list_platform_applications()
str(exc.value).should.contain("any-region")
@mock_sns
@mock.patch.dict(os.environ, {"AWS_DEFAULT_REGION": "us-east-2"})
def test_use_region_from_env():
client = boto3.client("sns")
client.list_platform_applications()["PlatformApplications"].should.equal([])
@mock_sns
@mock.patch.dict(os.environ, {"AWS_DEFAULT_REGION": "any-region"})
def test_use_unknown_region_from_env():
if settings.TEST_SERVER_MODE:
raise SkipTest("Cannot set environemnt variables in ServerMode")
client = boto3.client("sns")
with pytest.raises(KeyError) as exc:
client.list_platform_applications()
str(exc.value).should.contain("any-region")
@mock_sns
@mock.patch.dict(os.environ, {"AWS_DEFAULT_REGION": "any- | region"})
@mock.patch.dict(os.environ, {"MOTO_ALLOW_NONEXISTENT_REGION": "trUe"})
def test_use_unknown_region_from_env_but_allow_it():
if settings.TEST_SERVER_MODE:
raise SkipTest("Cannot set environemnt variables in ServerMode")
client = boto3.client("sns")
client.list_platform_applicatio | ns()["PlatformApplications"].should.equal([])
@mock_dynamodb2
@mock.patch.dict(os.environ, {"MOTO_ALLOW_NONEXISTENT_REGION": "trUe"})
def test_use_unknown_region_from_env_but_allow_it__dynamo():
if settings.TEST_SERVER_MODE:
raise SkipTest("Cannot set environemnt variables in ServerMode")
dynamo_db = boto3.resource("dynamodb", region_name="test")
dynamo_db.create_table(
TableName="test_table",
KeySchema=[{"AttributeName": "key", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "key", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
tables = list(dynamo_db.tables.all())
tables.should.have.length_of(1)
[table.name for table in tables].should.equal(["test_table"])
|
tensorflow/probability | tensorflow_probability/python/distributions/finite_discrete.py | Python | apache-2.0 | 14,488 | 0.004694 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""FiniteDiscrete distribution class."""
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.bijectors import invert as invert_bijector
from tensorflow_probability.python.bijectors import ordered as ordered_bijector
from tensorflow_probability.python.bijectors import softmax_centered as softmax_centered_bijector
from tensorflow_probability.python.bijectors import softplus as softplus_bijector
from tensorflow_probability.python.distributions import categorical
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import distribution_util as dist_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import parameter_properties
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
'FiniteDiscrete',
]
class FiniteDiscrete(distribution.AutoCompositeTensorDistribution):
"""The finite discrete distribution.
The FiniteDiscrete distribution is parameterized by either probabilities or
log-probabilities of a set of `K` possible outcomes, which is defined by
a strictly ascending list of `K` values.
Note: log_prob, prob, cdf, mode, and entropy are differentiable with respect
to `logits` or `probs` but not with respect to `outcomes`.
#### Mathematical Details
The probability mass function (pmf) is,
```none
pmf(x; pi, qi) = prod_j pi_j**[x == qi_j]
```
#### Examples
```python
# Initialize a discrete distribution with 4 possible outcomes and the 2nd
# outcome being most likely.
dist = FiniteDiscrete([1., 2., 4., 8.], probs=[0.1, 0.4, 0.3, 0.2])
dist.prob(2.)
# ==> 0.4
# Using logits to initialize a discrete distribution with 4 possible outcomes
# and the 2nd outcome being most likely.
dist = FiniteDiscrete([1., 2., 4., 8.], logits=np.log([0.1, 0.4, 0.3, 0.2]))
dist.prob(2.)
# ==> 0.4
```
"""
def __init__(self,
outcomes,
logits=None,
probs=None,
rtol=None,
atol=None,
validate_args=False,
allow_nan_stats=True,
name='FiniteDiscrete'):
"""Construct a finite discrete contribution.
Args:
outcomes: A 1-D floating or integer `Tensor`, representing a list of
possible outcomes in strictly ascending order.
logits: A floating N-D `Tensor`, `N >= 1`, representing the log
probabilities of a set of FiniteDiscrete distributions. The first `N -
1` dimensions index into a batch of independent distributions and the
last dimension represents a vector of logits for each discrete value.
Only one of `logits` or `probs` should be passed in.
probs: A floating N-D `Tensor`, `N >= 1`, representing the probabilities
of a set of FiniteDiscrete distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of probabilities for each discrete value. Only one
of `logits` or `probs` should be passed in.
rtol: `Tensor` with same `dtype` as `outcomes`. The relative tolerance for
floating number comparison. Only effective when `outcomes` is a floating
`Tensor`. Default is `10 * eps`.
atol: `Tensor` with same `dtype` as `outcomes`. The absolute tolerance for
floating number comparison. Only effective when `outcomes` is a floating
`Tensor`. Default is `10 * eps`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may render incorrect outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value '`NaN`' to indicate the
result is undefined. When `False`, an exception is raised if one or more
of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with tf.name_scope(name) as name:
outcomes_dtype = dtype_util.common_dtype(
[outcomes], dtype_hint=tf.float32)
self._outcomes = tensor_util.convert_nonref_to_tensor(
outcomes, dtype_hint=outcomes_dtype, name='outcomes')
if dtype_util.is_floating(self._outcomes.dtype):
eps = np.finfo(dtype_util.as_numpy_dtype(outcomes_dtype)).eps
self._rtol = 10 * eps if rtol is None else rtol
self._atol = 10 * eps if atol is None else atol
else:
self._rtol = None
self._atol = None
self._categorical = categorical.Categorical(
logits=logits,
probs=probs,
dtype=tf.int32,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
super(FiniteDiscrete, self).__init__(
dtype=self._outcomes.dtype,
reparameterization_type=reparameterization.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _parameter_properties(cls, dtype, num_classes=None):
# pylint: disable=g-long-lambda
return dict(
outcomes=parameter_properties.ParameterProperties(
event_ndims=None,
shape_fn=lambda sample_shape: [num_classes],
default_constraining_bijector_fn=invert_bijector.Invert(
ordered_bijector.Ordered())),
logits=parameter_properties.ParameterProperties(
event_ndims=1,
shape_fn=lambda sample_shape: ps.concat(
[sample_shape, [num_classes]], axis=0)),
probs=parameter_properties.ParameterProperties(
event_ndims=1,
shape_fn=lambda sample_shape: ps.concat(
[sample_shape, [num_classes]], axis=0),
default_constraining_bijector_fn=softmax_centered_bijector
.SoftmaxCentered,
is_preferred=False),
rtol=parameter_properties.ParameterProperties(
event_ndims=None, # TODO(b/187469130): standardize batch semantics.
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype))),
is_preferred=False),
atol=parameter_properties.ParameterProperties(
event_ndims=None, # TODO(b/187469130): standardize batch semantics.
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util. | eps(dtype))),
is_preferred=False))
# pylint: enable=g-long-lambda
@property
def outcomes(self):
return self._outcomes
@property
def logits(self):
"""Input argument `logits`."""
return self._c | ategorical.logits
@property
def probs(self):
"""Input argument `probs`."""
return self._categorical.probs
def _event_shape_tensor(self):
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _cdf(self, x):
x = tf.convert_to_tensor(x, name='x')
fl |
superphy/backend | app/modules/loggingFunctions.py | Python | apache-2.0 | 1,056 | 0.000947 | #!/usr/bin/env python
"""
Set up the logging
"""
import logging
import tempfile
import os
def initialize_logging():
"""
Set up the screen and file logging.
:return: The log filename
"""
# set up DEBUG logging to file, INFO logging to STDERR
log_file = os.path.join(tempfile.gettempdir(), 'spfy.log')
formatter = logging.Form | atter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
# set up logging to file - see previous sec | tion for more details
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename=log_file,
filemode='w')
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setFormatter(formatter)
console.setLevel(logging.INFO)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
return log_file
|
google/timesketch | timesketch/lib/aggregators/term.py | Python | apache-2.0 | 7,953 | 0.000251 | # Copyright 2019 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Term aggregations."""
from __future__ import unicode_literals
from timesketch.lib.aggregators import manager
from timesketch.lib.aggregators import interface
def get_spec(field, limit=10, query='', query_dsl=''):
"""Returns aggregation specs for a term of filtered events.
The aggregation spec will summarize values of an attribute
whose events fall under a filter.
Args:
field (str): this denotes the event attribute that is used
for aggregation.
limit (int): How many buckets to return, defaults to 10.
query (str): the query field to run on all documents prior to
aggregating the results.
query_dsl (str): the query DSL field to run on all documents prior
to aggregating the results (optional). Either a query string
or a query DSL has to be present.
Raises:
ValueError: if neither query_string or query_dsl is provided.
Returns:
a dict value that can be used as an aggregation spec.
"""
if query:
query_filter = {
'bool': {
'must': [
{
'query_string': {
'query': query
}
}
]
}
}
elif query_dsl:
query_filter = query_dsl
else:
raise ValueError('Neither query nor query DSL provided.')
return {
'query': query_filter,
'aggs': {
'aggregation': {
'terms': {
'field': field,
'size': limit
}
}
}
}
class FilteredTermsAggregation(interface.BaseAggregator):
"""Query Filter Term Aggregation."""
NAME = 'query_bucket'
DISPLAY_NAME = 'Filtered Terms Aggregation'
DESCRIPTION = 'Aggregating values of a field after applying a filter'
SUPPORTED_CHARTS = frozenset(
['barchart', 'circlechart', 'hbarchart', 'linechart', 'table'])
FORM_FIELDS = [
{
'type': 'ts-dynamic-form-select-input',
'name': 'supported_charts',
'label': 'Chart type to render',
'options': list(SUPPORTED_CHARTS),
'display': True
},
{
'name': 'query_string',
'type': 'ts-dynamic-form-text-input',
'label': 'The filter query to narrow down the result set',
'placeholder': 'Query',
'default_value': '',
'display': True
},
{
'name': 'query_dsl',
'type': 'ts-dynamic-form-text-input',
'label': 'The filter query DSL to narrow down the result',
'placeholder': 'Query DSL',
'default_value': '',
'display': False
},
{
'name': 'field',
'type': 'ts-dynamic-form-text-input',
'label': 'What field to aggregate.',
'display': True
},
{
'type': 'ts-dynamic-form-datetime-input',
'name': 'start_time',
'label': (
'ISO formatted timestamp for the start time '
'of the aggregated data'),
'placeholder': 'Enter a start date for the aggregation',
'default_value': '',
'display': True
},
{
'type': 'ts-dynamic-form-datetime-input',
'name': 'end_time',
'label': 'ISO formatted end time for the aggregation',
'placeholder': 'Enter an end date for the aggregation',
'default_value': '',
'display': True
},
{
'type': 'ts-dynamic-form-text-input',
'name': 'limit',
'label': 'Number of results to return',
'placeholder': 'Enter number of results to return',
'default_value': '10', | 'display': True
}
]
@property
def chart_title(self):
"""Returns a title for the chart."""
if self.field:
return 'Top filtered results for "{0:s}"'.format(self.field)
return 'Top results for an unknown field after filtering'
# pylint: disable=arguments-differ
def run(
self, field, query_string='', query_dsl='',
supported_charts='table', start_time='', end_time='', limit=10):
"""Run the aggregation.
Args:
field (str): this denotes the event attribute that is used
for aggregation.
query_string (str): the query field to run on all documents prior to
aggregating the results.
query_dsl (str): the query DSL field to run on all documents prior
to aggregating the results. Either a query string or a query
DSL has to be present.
supported_charts: Chart type to render. Defaults to table.
start_time: Optional ISO formatted date string that limits the time
range for the aggregation.
end_time: Optional ISO formatted date string that limits the time
range for the aggregation.
limit (int): How many buckets to return, defaults to 10.
Returns:
Instance of interface.AggregationResult with aggregation result.
Raises:
ValueError: if neither query_string or query_dsl is provided.
"""
if not (query_string or query_dsl):
raise ValueError('Both query_string and query_dsl are missing')
self.field = field
formatted_field_name = self.format_field_by_type(field)
aggregation_spec = get_spec(
field=formatted_field_name, limit=limit, query=query_string,
query_dsl=query_dsl)
aggregation_spec = self._add_query_to_aggregation_spec(
aggregation_spec, start_time=start_time, end_time=end_time)
# Encoding information for Vega-Lite.
encoding = {
'x': {
'field': field,
'type': 'nominal',
'sort': {
'op': 'sum',
'field': 'count',
'order': 'descending'
}
},
'y': {'field': 'count', 'type': 'quantitative'},
'tooltip': [
{'field': field, 'type': 'nominal'},
{'field': 'count', 'type': 'quantitative'}],
}
response = self.opensearch_aggregation(aggregation_spec)
aggregations = response.get('aggregations', {})
aggregation = aggregations.get('aggregation', {})
buckets = aggregation.get('buckets', [])
values = []
for bucket in buckets:
d = {
field: bucket.get('key', 'N/A'),
'count': bucket.get('doc_count', 0)
}
values.append(d)
if query_string:
extra_query_url = 'AND {0:s}'.format(query_string)
else:
extra_query_url = ''
return interface.AggregationResult(
encoding=encoding, values=values, chart_type=supported_charts,
sketch_url=self._sketch_url, field=field,
extra_query_url=extra_query_url)
manager.AggregatorManager.register_aggregator(FilteredTermsAggregation)
| |
fbradyirl/home-assistant | homeassistant/components/streamlabswater/sensor.py | Python | apache-2.0 | 3,962 | 0.000252 | """Support for Streamlabs Water Monitor Usage."""
from datetime import timedelta
from homeassistant.components.streamlabswater import DOMAIN as STREAMLABSWATER_DOMAIN
from homeassistant.const import VOLUME_GALLONS
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
DEPENDENCIES = ["streamlabswater"]
WATER_ICON = "mdi:water"
MIN_TIME_BETWEEN_USAGE_UPDATES = timedelta(seconds=60)
NAME_DAILY_USAGE = "Daily Water"
NAME_MONTHLY_USAGE = "Monthly Water"
NAME_YEARLY_USAGE = "Yearly Water"
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up water usage sensors."""
client = hass.data[STREAMLABSWATER_DOMAIN]["client"]
location_id = hass.data[STREAMLABSWATER_DOMAIN]["location_id"]
location_name = hass.data[STREAMLABSWATER_DOMAIN]["location_name"]
streamlabs_usage_data = StreamlabsUsageData(location_id, client)
streamlabs_usage_data.update()
add_devices(
[
StreamLabsDailyUsage(location_name, streamlabs_usage_data),
StreamLabsMonthlyUsage(location_name, streamlabs_usage_data),
StreamLabsYearlyUsage(location_name, streamlabs_usage_data),
]
)
class StreamlabsUsageData:
"""Track and query usage data."""
def __init__(self, location_id, client):
"""Initialize the usage data."""
self._location_id = location_id
self._client = client
self._today = None
self._this_month = None
self._this_year = None
@Throttle(MIN_TIME_BETWEEN_USAGE_UPDATES)
def update(self):
"""Query and store usage data."""
water_usage = self._client.get_water_usage_summary(self._location_id)
self._today = round(water_usage["today"], 1)
self._this_month = round(water_usage["thisMonth"], 1)
self._this_year = round(water_usage["thisYear"], 1)
def get_daily_usage(self):
"""Return the day's usage."""
return self._today
def get_monthly_usage(self):
"""Return the month's usage."""
return self._this_month
def get_yearly_usage(self):
"""Return the year's usage."""
return self._this_year
class | StreamLabsDailyUsage(Entity):
"""Monitors the daily water usage."""
def __init__(self, location_name, streamlabs_usage_data):
"""Initialize the daily water usage device."""
self._location_name = location_name
self._streamlabs_usage_data = streamlabs_usage_data
self._state = None
@property
def name(self):
"""Return the name for daily usage."""
return "{} {}".format(self._location_name, NAME_DAILY_USAGE)
| @property
def icon(self):
"""Return the daily usage icon."""
return WATER_ICON
@property
def state(self):
"""Return the current daily usage."""
return self._streamlabs_usage_data.get_daily_usage()
@property
def unit_of_measurement(self):
"""Return gallons as the unit measurement for water."""
return VOLUME_GALLONS
def update(self):
"""Retrieve the latest daily usage."""
self._streamlabs_usage_data.update()
class StreamLabsMonthlyUsage(StreamLabsDailyUsage):
"""Monitors the monthly water usage."""
@property
def name(self):
"""Return the name for monthly usage."""
return "{} {}".format(self._location_name, NAME_MONTHLY_USAGE)
@property
def state(self):
"""Return the current monthly usage."""
return self._streamlabs_usage_data.get_monthly_usage()
class StreamLabsYearlyUsage(StreamLabsDailyUsage):
"""Monitors the yearly water usage."""
@property
def name(self):
"""Return the name for yearly usage."""
return "{} {}".format(self._location_name, NAME_YEARLY_USAGE)
@property
def state(self):
"""Return the current yearly usage."""
return self._streamlabs_usage_data.get_yearly_usage()
|
PressLabs/cobalt | src/utils/service.py | Python | apache-2.0 | 941 | 0.001063 | # Copyright 2016 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed un | der the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
class Service(ABC):
"""Abstract class for a service."""
@abstractmethod
def start(self):
| """Abstract start method, responsible for starting the service."""
pass
@abstractmethod
def stop(self):
"""Abstract stop method, responsible for gracefully stop the service."""
pass
|
DayGitH/Python-Challenges | DailyProgrammer/DP20160622B.py | Python | mit | 1,931 | 0.000518 | """
[2016-06-22] Challenge #272 [Intermediate] Dither that image
https://www.reddit.com/r/dailyprogrammer/comments/4paxp4/20160622_challenge_272_intermediate_dither_that/
# Description
[Dithering](https://en.wikipedia.org/wiki/Dither) is the intentional use of
noise to reduce the error of compression. If you start with a color image
and want to reduce it to two colors (black and white) the naive approach is
to threshold the image. However, the results are usually terrible.
* [Color Solids](http://i.imgur.com/kjWn2Q1.png)
* [Thresholded Solids](http://i.imgur.com/RDOMCfg.png)
One of the most popular dithering algorithms is
[Floyd-Steinberg](https://en.wikipedia.org/wiki/Floyd%E2%80%93Steinberg_dithering).
When a pixel is thresholded, the error (difference) between the original value
and the converted value is carried forward into nearby pixels.
* [Floyd-Steinberg Solids](http://i.imgur.com/w9DFOKS.png)
There are other approaches, such as
[Ordered Dithering](https://en.wikipedia.org/wiki/Ordered_dithering) with a
Bayer Matrix.
* [Bayer solids](http://i.imgur.com/mLKUyfn.png)
# Input
Your program will take a color or grayscale image as its input. You may choose
your input method appropriate to your language of choice. If you want to do it
yourself | , I suggest picking a [Netpbm](https://en.wikipedia.org/wiki/Netpbm)
format, which is easy to read.
# Output
Output a two-color (e.g. Black and White) dithered image in your choice of
format. Again, I suggest picking a Netpbm format, which is easy to write.
# Notes
* [Here](http://www.tannerhelland.com/4660/dithering-eleven-algorithms-source- | code/)
is a good resource for dithering algorithms.
# Finally
Have a good challenge idea?
Consider submitting it to /r/dailyprogrammer_ideas
Thanks to /u/skeeto for this [challenge idea]
(https://www.reddit.com/r/dailyprogrammer_ideas/comments/4nt7rp)
"""
def main():
pass
if __name__ == "__main__":
main()
|
jcolekaplan/WNCYC | src/main/api/getBuildingId.py | Python | mit | 1,479 | 0.006761 | import boto3
from decEncoder import *
from DynamoTable import *
def handler(event, context):
"""Dynamo resource"""
buildingTable = DynamoTable('Buildings')
return getBuildingId(event, buildingTable)
"""Lambda handler function for /buildings/{buildingId} API call
Returns building with the buildingId specified in the path
or 'Building not found' error if buildingId not found by query search
"""
def getBuildingId(event, buildingTable):
"""If buildingId specified, assign it to variable,
use get_item to find it in building table
put it in JSON format and return
"""
if event.get('pathParameters'):
buildingIdVal = event.get('pathParameters').get('buildingId')
response = buildingTable.get(buildingId= | buildingIdVal)
if response.get('Item'):
return {
'statusCode': 200,
'headers': {'Content-Type': 'application/json'},
'body': json.dumps(response.get('Item'), cls | =DecimalEncoder)
}
else:
"""Error if not found"""
return {
'statusCode': 404,
'headers': {'Content-Type': 'application/json'},
'body': json.dumps({'error': 'Building not found'})
}
else:
"""No path parameters"""
return {
'statusCode': 400,
'headers': {'Content-Type': 'application/json'},
'body': json.dumps({'error': 'Path not found'})
}
|
KaranToor/MA450 | google-cloud-sdk/lib/third_party/prompt_toolkit/terminal/win32_input.py | Python | apache-2.0 | 12,797 | 0.000469 | from __future__ import unicode_literals
from ctypes import windll, pointer
from ctypes.wintypes import DWORD
from six.moves import range
from prompt_toolkit.key_binding.input_processor import KeyPress
from prompt_toolkit.keys import Keys
from prompt_toolkit.mouse_events import MouseEventType
from prompt_toolkit.win32_types import EventTypes, KEY_EVENT_RECORD, MOUSE_EVENT_RECORD, INPUT_RECORD, STD_INPUT_HANDLE
import msvcrt
import os
import sys
import six
__all__ = (
'ConsoleInputReader',
'raw_mode',
'cooked_mode'
)
class ConsoleInputReader(object):
"""
:param recognize_paste: When True, try to discover paste actions and turn
the event into a BracketedPaste.
"""
# Keys with character data.
mappings = {
b'\x1b': Keys.Escape,
b'\x00': Keys.ControlSpace, # Control-Space (Also for Ctrl-@)
b'\x01': Keys.ControlA, # Control-A (home)
b'\x02': Keys.ControlB, # Control-B (emacs cursor left)
b'\x03': Keys.ControlC, # Control-C (interrupt)
b'\x04': Keys.ControlD, # Control-D (exit)
b'\x05': Keys.ControlE, # Contrel-E (end)
b'\x06': Keys.ControlF, # Control-F (cursor forward)
b'\x07': Keys.ControlG, # Control-G
b'\x08': Keys.ControlH, # Control-H (8) (Identical to '\b')
b'\x09': Keys.ControlI, # Control-I (9) (Identical to '\t')
b'\x0a': Keys.ControlJ, # Control-J (10) (Identical to '\n')
b'\x0b': Keys.ControlK, # Control-K (delete until end of line; vertical tab)
b'\x0c': Keys.ControlL, # Control-L (clear; form feed)
b'\x0d': Keys.ControlJ, # Control-J NOTE: Windows sends \r instead of
# \n when pressing enter. We turn it into \n
# to be compatible with other platforms.
b'\x0e': Keys.ControlN, # Control-N (14) (history forward)
b'\x0f': Keys.ControlO, # Control-O (15)
b'\x10': Keys.ControlP, # Control-P (16) (history back)
b'\x11': Keys.ControlQ, # Control-Q
b'\x12': Keys.ControlR, # Control-R (18) (reverse search)
b'\x13': Keys.ControlS, # Control-S (19) (forward search)
b'\x14': Keys.ControlT, # Control-T
b'\x15': Keys.ControlU, # Control-U
b'\x16': Keys.ControlV, # Control-V
b'\x17': Keys.ControlW, # Control-W
b'\x18': Keys.ControlX, # Control-X
b'\x19': Keys.ControlY, # Control-Y (25)
b'\x1a': Keys.ControlZ, # Control-Z
b'\x1c': Keys.ControlBackslash, # Both Control-\ and Ctrl-|
b'\x1d': Keys.ControlSquareClose, # Control-]
b'\x1e': Keys.ControlCircumflex, # Control-^
b'\x1f': Keys.ControlUnderscore, # Control-underscore (Also for Ctrl-hypen.)
b'\x7f': Keys.Backspace, # (127) Backspace
}
# Keys that don't carry character data.
keycodes = {
# Home/End
33: Keys.PageUp,
34: Keys.PageDown,
35: Keys.End,
36: Keys.Home,
# Arrows
37: Keys.Left,
38: Keys.Up,
39: Keys.Right,
40: Keys.Down,
45: Keys.Insert,
46: Keys.Delete,
# F-keys.
112: Keys.F1,
113: Keys.F2,
114: Keys.F3,
115: Keys.F4,
116: Keys.F5,
117: Keys.F6,
118: Keys.F7,
119: Keys.F8,
120: Keys.F9,
121: Keys.F10,
122: Keys.F11,
123: Keys.F12,
}
LEFT_ALT_PRESSED = 0x0002
RIGHT_ALT_PRESSED = 0x0001
SHIFT_PRESSED = 0x0010
LEFT_CTRL_PRESSED = 0x0008
RIGHT_CTRL_PRESSED = 0x0004
def __init__(self, recognize_paste=True):
self._fdcon = None
self.recognize_paste = recognize_paste
# When stdin is a tty, use that handle, otherwise, create a handle from
# CONIN$.
if sys.stdin.isatty():
self.handle = windll.kernel32.GetStdHandle(STD_INPUT_HANDLE)
else:
self._fdcon = os.open('CONIN$', os.O_RDWR | os.O_BINARY)
self.handle = msvcrt.get_osfhandle(self._fdcon)
def close(self):
" Close fdcon. "
if self._fdcon is not None:
os.close(self._fdcon)
def read(self):
"""
Return a list of `KeyPress` instances. It won't return anything when
there was nothing to read. (This function doesn't block.)
http://msdn.microsoft.com/en-us/library/windows/desktop/ms684961(v=vs.85).aspx
"""
max_count = 2048 # Max events to read at the same time.
read = DWORD(0)
arrtype = INPUT_RECORD * max_count
input_records = arrtype()
# Get next batch of input event.
windll.kernel32.ReadConsoleInputW(
self.handle, pointer(input_records), max_count, pointer(read))
# First, get all the keys from the input buffer, in order to determine
# whether we should consider this a paste event or not.
all_keys = list(self._get_keys(read, input_records))
if self.recognize_paste and self._is_paste(all_keys):
gen = iter(all_keys)
for k in gen:
# Pasting: if the current key consists of text or \n, turn it
# into a BracketedPaste.
data = []
while k and (isinstance(k.key, six.text_type) or
k.key == Keys.ControlJ):
data.append(k.data)
try:
k = next(gen)
except StopIteration:
k = None
if data:
yield KeyPress(Keys.BracketedPaste, ''.join(data))
if k is not None:
yield k
else:
for k in all_keys:
yield k
def _get_keys(self, read, input_records):
"""
Generator that y | ields `KeyPress` objects from the input records.
"""
for i in range(read.value):
ir = input_records[i]
# Get the right EventType from the EVENT_RECORD.
# (For some reason the Windows console application 'cmder'
# [http://gooseberrycreative.com/cmder/] can return '0' for
# ir.EventType. -- Just ignore that.)
if ir.EventType in EventTypes:
ev = getattr(ir.Event, Even | tTypes[ir.EventType])
# Process if this is a key event. (We also have mouse, menu and
# focus events.)
if type(ev) == KEY_EVENT_RECORD and ev.KeyDown:
for key_press in self._event_to_key_presses(ev):
yield key_press
elif type(ev) == MOUSE_EVENT_RECORD:
for key_press in self._handle_mouse(ev):
yield key_press
@staticmethod
def _is_paste(keys):
"""
Return `True` when we should consider this list of keys as a paste
event. Pasted text on windows will be turned into a
`Keys.BracketedPaste` event. (It's not 100% correct, but it is probably
the best possible way to detect pasting of text and handle that
correctly.)
"""
# Consider paste when it contains at least one newline and at least one
# other character.
text_count = 0
newline_count = 0
for k in keys:
if isinstance(k.key, six.text_type):
text_count += 1
if k.key == Keys.ControlJ:
newline_count += 1
return newline_count >= 1 and text_count > 1
def _event_to_key_presses(self, ev):
"""
For this `KEY_EVENT_RECORD`, return a list of `KeyPress` instances.
"""
assert type(ev) == KEY_EVENT_RECORD and ev.KeyDown
result = None
u_char = ev.uChar.UnicodeChar
ascii_char = ev.uChar.AsciiChar
if u_char == '\x00':
if ev.VirtualKeyCode in self.keycodes:
result = KeyPress(self.keycodes[ev.VirtualKeyCode], '')
else:
if ascii_char in self.mappings:
if self.mappings[ascii_char] == Keys.ControlJ:
|
puttarajubr/commcare-hq | corehq/apps/mach/api.py | Python | bsd-3-clause | 1,569 | 0.003824 | import urllib
from django.conf import settings
import urllib2
from corehq.apps.sms.mixin import SMSBackend
from dimagi.ext.couchdbkit import *
from corehq.apps.mach.forms import MachBackendForm
MACH_URL = "http://smsgw.a2p.mme.syniverse.com/sms.php"
class MachBackend(SMSBackend):
account_id = StringProperty()
password = StringProperty()
sender_id = StringProperty()
# Defines the maximum number of outgoing sms requests to be made per
# second. This is defined at the account level.
max_sms_per_second = IntegerProperty(default=1)
@classmethod
def get_api_id(cls):
return "MACH"
@classmethod
def get_generic_name(cls):
return "Syniverse"
@classmethod
def get_template(cls):
return "mach/backend.html"
@classmethod
def get_form_class(cls):
return MachBackendForm
def get_sms_interval(self):
| return (1.0 / self.max_sms_per_second)
def send(self, msg, delay=True, *args, **kwargs):
params = {
"id" : self.account_id,
"pw" : self.password,
"snr" : self.sender_id,
"dnr" : msg.phone_number,
}
try:
text = msg.text.encode("iso-8859-1")
params["msg"] = text
except Un | icodeEncodeError:
params["msg"] = msg.text.encode("utf-16-be").encode("hex")
params["encoding"] = "ucs"
url = "%s?%s" % (MACH_URL, urllib.urlencode(params))
resp = urllib2.urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return resp
|
informatics-isi-edu/microscopy | rbk/worker/delete_youtube/clientlib/rbk_delete_youtube_lib/client.py | Python | apache-2.0 | 9,450 | 0.006667 | #!/usr/bin/python
#
# Copyright 2017 University of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Client for deleting videos from YouTube.
"""
import os
import json
import urlparse
import sys
import traceback
import time
import smtplib
from email.mime.text import MIMEText
import socket
from deriva.core import PollingErmrestCatalog, urlquote
mail_footer = 'Do not reply to this message. This is an automated message generated by the system, which does not receive email messages.'
import google.oauth2.credentials
import google_auth_oauthlib.flow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from google_auth_oauthlib.flow import InstalledAppFlow
from oauth2client.file import Storage
SCOPES = ['https://www.googleapis.com/auth/youtube.force-ssl', 'https://www.googleapis.com/auth/youtube']
API_SERVICE_NAME = 'youtube'
API_VERSION = 'v3'
mail_footer = 'Do not reply to this message. This is an automated message generated by the system, which does not receive email messages.'
class YouTubeClient (object):
"""
Network client for YouTube.
"""
## Derived from the ermrest iobox service client
def __init__(self, **kwargs):
self.baseuri = kwargs.get("baseuri")
o = urlparse.urlparse(self.baseuri)
self.scheme = o[0]
host_port = o[1].split(":")
self.host = host_port[0]
self.path = o.path
self.port = None
if len(host_port) > 1:
self.port = host_port[1]
self.cookie = kwargs.get("cookie")
self.client_secrets_file = kwargs.get("client_secrets_file")
self.client_oauth2_file = kwargs.get("client_oauth2_file")
self.catalog = PollingErmrestCatalog(
self.scheme,
self.host,
self.path.split('/')[-1],
{'cookie': self.cookie}
)
self.mail_server = kwargs.get("mail_server")
self.mail_sender = kwargs.get("mail_sender")
self.mail_receiver = kwargs.get("mail_receiver")
self.logger = kwargs.get("logger")
self.logger.debug('Delete YouTube Client initialized.')
"""
Send email notification
"""
def sendMail(self, subject, text):
if self.mail_server and self.mail_sender and self.mail_receiver:
retry = 0
ready = False
while not ready:
try:
msg = MIMEText('%s\n\n%s' % (text, mail_footer), 'plain')
msg['Subject'] = subject
msg['From'] = self.mail_sender
msg['To'] = self.mail_receiver
s = smtplib.SMTP(self.mail_server)
s.sendmail(self.mail_sender, self.mail_receiver.split(','), msg.as_string())
s.quit()
self.logger.debug('Sent email notification.')
ready = True
except socket.gaierror as e:
if e.errno == socket.EAI_AGAIN:
time.sleep(100)
retry = retry + 1
ready = retry > 10
else:
ready = True
if ready:
et, ev, tb = sys.exc_info()
self.logger.error('got exception "%s"' % str(ev))
self.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))
except:
et, ev, tb = sys.exc_info()
self.logger.error('got exception "%s"' % str(ev))
self.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))
ready = True
"""
Start the process for deleting files from YouTube
"""
def start(self):
try:
self.deleteFromYouTube()
except:
et, ev, tb = sys.exc_info()
self.logger.error('got unexpected exception "%s"' % str(ev))
self.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))
self.sendMail('FAILURE Delete YouTube: unexpected exception', '%s\nThe process might have been stopped\n' % str(traceback.format_exception(et, ev, tb)))
raise
"""
Get the YouTube Delete credentials
"""
def youtube_authenticated_service(self):
flow = InstalledAppFlow.from_client_secrets_file(self.client_secrets_file, SCOPES)
storage = Storage(self.client_oauth2_file)
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = flow.run_console()
self.youtube = build(API_SERVICE_NAME, API_VERSION, credentials = credentials)
"""
Delete a video from YouTube
"""
def youtube_delete(self, youtube_uri):
res = False
try:
id = youtube_uri[youtube_uri.rfind('/')+1:youtube_uri.index('?')]
self.logger.debug('Deleting YouTube video id="%s".' % (id))
self.youtube_authenticated_service()
if self.youtube is not None:
self.logger.debug('Authenticated to the YouTube delete service.')
response = self.youtube.videos().delete(id=id).execute()
self.logger.debug('Deleted response %s.' % (response))
res = True
else:
self.logger.debug('Authentication for deleting a YouTube video failed.')
except:
et, ev, tb = sys.exc_info()
self.logger.error('got YouTube exception "%s"' % str(ev))
self.logger.error('%s' % str(traceback.format_exception(et, ev, tb)))
return res
"""
Delete videos from YouTube
"""
def deleteFromYouTube(self):
url = '/entity/Common:Delete_Youtube/Youtube_Deleted=FALSE/Processing_Status=in%20progress;Processing_Status::null::'
resp = self.catalog.get(url)
resp.raise_for_status()
files = resp.json()
fileids = []
for f in files:
fileids.append((f['YouTube_URI'], f['RID']))
self.logger.debug('Deleting from YouTube %d videos(s).' % (len(fileids)))
for youtube_uri,rid in fileids:
try:
youtube_deleted = self.y | outube_delete(youtube_uri)
if youtube_deleted == Tru | e:
self.logger.debug('SUCCEEDED deleted from YouTube the video with the URL: "%s".' % (youtube_uri))
columns = ["Youtube_Deleted", "Processing_Status"]
columns = ','.join([urlquote(col) for col in columns])
url = '/attributegroup/Common:Delete_Youtube/RID;%s' % (columns)
obj = {'RID': rid,
'Youtube_Deleted': True,
'Processing_Status': 'success'
}
self.catalog.put(
url,
json=[obj]
)
self.logger.debug('SUCCEEDED updated the Common:Delete_Youtube table entry for the YouTube URL: "%s".' % (youtube_uri))
else:
self.logger.debug('Failure in deleting from YouTube the video with the URL: "%s".' % (youtube_uri))
self.sendMail('FAILURE Delete YouTube: YouTube Failure', 'The video "%s" could not be deleted from Youtube.' % youtube_uri)
self.reportFailure(rid, 'YouTube Failure')
except Exception as e:
et, ev, tb = sys.exc_info()
self.logger.error('got exception "%s"' % str(ev))
|
valexandersaulys/airbnb_kaggle_contest | venv/lib/python3.4/site-packages/Theano-0.7.0-py3.4.egg/theano/sandbox/gpuarray/tests/test_conv_cuda_ndarray.py | Python | gpl-2.0 | 27,571 | 0.001197 | """
Tests for GPU convolution
"""
from __future__ import print_function
import sys
import time
import unittest
import numpy
from six.moves import xrange
import theano
from theano import tensor
from theano.tests.unittest_tools import seed_rng
# We let that import do the init of the back-end if needed.
from .config import mode_with_gpu, test_ctx_name
from ..type import GpuArrayType, get_context
from ..conv import GpuConv
from theano.sandbox.gpuarray import dnn
import pygpu
imported_scipy_convolve2d = False
try:
from scipy.signal import convolve2d
imported_scipy_convolve2d = True
except ImportError:
pass
gftensor4 = GpuArrayType('float32', [False] * 4, context_name=test_ctx_name)
def py_conv_valid_numpy(img, kern):
assert img.shape[1] == kern.shape[1]
outshp = (img.shape[0], kern.shape[0],
img.shape[2] - kern.shape[2] + 1,
img.shape[3] - kern.shape[3] + 1)
out = numpy.zeros(outshp, dtype='float32')
for b in xrange(out.shape[0]):
for k in xrange(out.shape[1]):
for rr in xrange(out.shape[2]):
for cc in xrange(out.shape[3]):
# rr, cc is the upper-left corner of img patches
imgpatch = img[b, :, rr:rr + kern.shape[2],
cc:cc + kern.shape[3]]
innerprod = (imgpatch[:, ::-1, ::-1] *
kern[k, :, :, :]).sum()
out[b, k, rr, cc] = innerprod
return out
def py_conv_full_numpy(img, kern):
# manually pad the img with zeros all around, and then run it
# through py_conv_valid
pad_rows = 2 * (kern.shape[2] - 1) + img.shape[2]
pad_cols = 2 * (kern.shape[3] - 1) + img.shape[3]
padded_img = numpy.zeros((img.shape[0], img.shape[1], pad_rows, pad_cols),
dtype=img.dtype)
padded_img[:, :, kern.shape[2] - 1: kern.shape[2] - 1 + img.shape[2],
kern.shape[3] - 1: kern.shape[3] - 1 + img.shape[3]] = img
return py_conv_valid_numpy(padded_img, kern)
def py_conv(img, kern, mode, subsample):
"""
use a scipy or numpy implementation depending is scipy is available.
The scipy version is faster.
"""
if imported_scipy_convolve2d:
return py_conv_scipy(img, kern, mode, subsample)
elif mode == 'valid':
return py_conv_valid_numpy(img, kern)[:, :, ::subsample[0],
::subsample[1]]
elif mode == 'full':
return py_conv_full_numpy(img, kern)[:, :, ::subsample[0],
::subsample[1]]
else:
raise Exception("Can't execute this kernel.")
def py_conv_scipy(img, kern, mode, subsample):
assert img.shape[1] == kern.shape[1]
if mode == 'valid':
outshp = (img.shape[0], kern.shape[0],
img.shape[2] - kern.shape[2] + 1,
img.shape[3] - kern.shape[3] + 1)
else:
outshp = (img.shape[0], kern.shape[0],
img.shape[2] + kern.shape[2] - 1,
img.shape[3] + kern.shape[3] - 1)
out = numpy.zeros(outshp, dtype='float32')
for b in xrange(out.shape[0]):
for k in xrange(out.shape[1]):
for s in xrange(img.shape[1]):
out[b, k, :, :] += convolve2d(img[b, s, :, :],
kern[k, s, :, :],
mode)
return out[:, :, ::subsample[0], ::subsample[1]]
def _params_allgood_header():
print("ishape kshape #Mflops CPU Mflops GPU Mflops Speedup")
def _params_allgood(ishape, kshape, mode, subsample=(1, 1), img_stride=(1, 1),
kern_stride=(1, 1), version=-1, verbose=0, random=True,
print_=None, id=None, rtol=1e-5, atol=1e-8,
nb_iter=0, ones=False, compile_kshp=None):
#
# This function is the core of several of the big unit-test drivers,
# but it can also be used very directly on its own to test a specific
# kind of convolution.
#
# See `test_example` (above) for an example of how to use this directly.
#
# :param kshape: (4d)The shape of the kernel at run time.
# :param compile_kshp: (2d) hardcode the shape of the kernel in
# the generated code This is supposed to be
# faster, but we need to check That we raise
# an error if the input have the wrong shape.
#
if ones:
assert not random
npy_img = theano._asarray(numpy.ones(ishape), dtype='float32')
npy_kern = -theano._asarray(numpy.ones(kshape), dtype='float32')
elif random:
npy_img = theano._asarray(numpy.random.rand(*ishape) + 1,
dtype='float32')
npy_kern = theano._asarray(numpy.random.rand(*kshape) - 2,
dtype='float32')
else:
npy_img = theano._asarray(numpy.arange(
numpy.prod(ishape)).reshape(ishape), dtype='float32') + 1
npy_kern = -(theano._asarray(numpy.arange(
numpy.prod(kshape)).reshape(kshape), dtype='float32') + 1)
img = pygpu.array(npy_img, context=get_context(test_ctx_name))
kern = pygpu.array(npy_kern, context=get_context(test_ctx_name))
# we take the stride after the transfert as we make c_contiguous
# data on the GPU.
if img_stride != (1, 1):
img = img[:, :, ::img_stride[0], ::img_stride[1]]
npy_img = npy_img[:, :, ::img_stride[0], ::img_stride[1]]
if kern_stride != (1, 1):
kern = kern[:, :, ::kern_strid | e[0], ::kern_stride[1]]
npy_kern = npy_kern[:, :, | ::kern_stride[0], ::kern_stride[1]]
t2 = None
rval = True
try:
t0 = time.time()
cpuval = py_conv(npy_img, npy_kern, mode, subsample)
t1 = time.time()
i = gftensor4()
k = gftensor4()
op = GpuConv(border_mode=mode,
subsample=subsample,
version=version,
verbose=verbose,
kshp=compile_kshp)(i, k)
f = theano.function([i, k], op, mode=mode_with_gpu)
gpuval = f(img, kern)
t2 = time.time()
for i in range(nb_iter):
gpuval2 = f(img, kern)
assert numpy.allclose(numpy.asarray(gpuval),
numpy.asarray(gpuval2))
assert (numpy.asarray(gpuval) == numpy.asarray(gpuval2)).all()
gpuval = numpy.asarray(gpuval)
if gpuval.shape != cpuval.shape:
print("ERROR: shape mismatch", end=' ', file=sys.stdout)
print(gpuval.shape, cpuval.shape, file=sys.stdout)
rval = False
if rval:
rval = numpy.allclose(cpuval, gpuval, rtol=rtol)
assert numpy.all(numpy.isfinite(gpuval))
except NotImplementedError as e:
print('_params_allgood Failed allclose', e, file=sys.stdout)
rval = False
if (t2 is not None):
if mode == 'valid':
approx_fp = cpuval.size * ishape[1] * kshape[2] * kshape[3] * 2
else:
approx_fp = (ishape[0] * kshape[0] * kshape[1] * kshape[2] *
kshape[3] * ishape[2] * ishape[3] * 2)
approx_fp /= 1e6
cpu_mflops = approx_fp / (t1 - t0)
gpu_mflops = approx_fp / (t2 - t1)
if verbose > 0:
print('%15s' % str(ishape), '%15s' % str(kshape), end=' ',
file=sys.stdout)
print('%12.5f %7.2f %7.2f %7.1f' %
(approx_fp, cpu_mflops, gpu_mflops, (t1 - t0) / (t2 - t1)),
file=sys.stdout)
if not rval:
print('test_' + mode + ' id=' + str(id) +
' FAILED for ishape, kshape, mode, subsample,' +
' img_stride, kern_stride, version', ishape,
kshape, mode, subsample, img_stride, kern_stride,
version, file=sys.stdout)
diff = cpuval - gpuval
diffabs = numpy.absolute(diff)
pr_diff = diffabs / numpy.absolute(cpuval)
nb_close = (diffabs <= (atol + rtol * numpy.absolute(gpuval |
cybertoast/flask-mongoutils | loader.py | Python | bsd-3-clause | 65 | 0 | from fl | ask.ext.mongoeng | ine import MongoEngine
db = MongoEngine()
|
schanezon/webapp | test.py | Python | apache-2.0 | 278 | 0.010791 | from flask import Flask, redirect, abort, url_for
app = Flask(__name__)
app.debug = True
@app.route('/')
def index():
| return redir | ect(url_for('login'))
@app.route('/login')
def login():
abort(401)
this_is_never_executed()
if __name__ == '__main__':
app.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.