repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
nzufelt/theano_nn
|
min_work_ex.py
|
Python
|
mit
| 757
| 0.029062
|
import numpy as np
import numpy.random as rng
import theano
import theano.tensor as T
from theano.tensor.nnet import conv2d
minibatch = 3
image_height,image_width = 28,28
filter_height,filter_width = 3,3
n_filters = 1
n_channels = 1
n = 1/(np.sqrt(image_height*image_width))
X = T.tensor4(name='X')
X_shape = (minibatch,n_channels,image_height,image_width)
W_shape = (n_filters,n_channels,filter_height,filt
|
er_width)
W = theano.shared(n*rng.randn(*W_shape),name='W')
conv_out = conv2d(X,
W,
input_shape=X_shape,
filter_s
|
hape=W_shape,
border_mode='valid')
f = theano.function([X],[conv_out])
X_data = np.array(rng.randint(low=0,high=256,size=X_shape))
conv_out = f(X_data)
|
j2a/django-simprest
|
simprest/docs.py
|
Python
|
bsd-3-clause
| 1,557
| 0.001927
|
from django.core.urlresolvers import get_resolver
from django.http import HttpResponse
class HandlerRegistry(dict):
def __init__(self):
self.maxlength = 0
def __setitem__(self, name, v
|
alue):
self.maxlength = max(self.maxlength, len(str(value)))
super(HandlerRegistry, self).__setitem__(name, value)
def register(
|
self, handler):
self[handler] = None
def sync_urls(self):
resolver = get_resolver(None)
reverse = resolver.reverse_dict
for h in self:
if not self[h]:
# tied to current django url storage
urltuple = reverse[h][0][0]
args = dict((name, '<%s>' % name) for name in urltuple[1])
url = urltuple[0] % args
self[h] = url
registry = HandlerRegistry()
def docs(request):
registry.sync_urls()
output = []
format = '%%-%ss\n%%s\n\n' % registry.maxlength
paramformatsrc = '\t%%-%ss - %%s\n'
for handler, url in registry.items():
try:
paramlength = max(map(len, handler.params.keys()))
paramformat = paramformatsrc % paramlength
params = ''.join(paramformat % (k, v) for k, v
in handler.params.items())
except ValueError:
params = ''
if handler.__doc__:
doc = '\t%s\n\n%s' % (handler.__doc__.strip(), params)
else:
doc = params
output.append(format % (url, doc))
return HttpResponse(''.join(output), mimetype='text/plain')
|
letiangit/802.11ah-ns3
|
src/energy/bindings/callbacks_list.py
|
Python
|
gpl-2.0
| 641
| 0.00624
|
callback_classes = [
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'double', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'int', 'ns3::empty'
|
, 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'n
|
s3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
Ganapati/DjangoZik
|
infos_grabber/coverGrabber.py
|
Python
|
gpl-2.0
| 998
| 0.002004
|
#!/usr/bin/python
import requests
from bs4 import BeautifulSoup
class CoverGrabber:
def __init__(self, url=None):
if url is None:
self.url = 'http://www.amazon.com/s/ref=nb_sb_noss_2?url=search-alias=aps&field-keywords=cd'
else:
self.url = url
def request_service(self, keyword):
complete_url = "%s %s" % (self.url, keyword)
html = requests.get(complete_url)
soup = BeautifulSoup(html.text)
return soup
def grab(self, keyword):
try:
soup = self.request_service(keyword)
image = soup.find_all("img",
{"class": "s-access-image"})[0].get('src')
return image
except:
return None
if __name__ == "__main__":
pri
|
nt "Grab CD Cover from Amazon"
cover_grabber = CoverGrabber()
cover = cover_grabber.grab('Blac
|
k ice')
if cover is None:
print "Error"
else:
print "Cover : %s" % cover
|
rananda/cfme_tests
|
cfme/services/catalogs/catalog_item.py
|
Python
|
gpl-2.0
| 16,459
| 0.001701
|
# -*- coding: utf-8 -*-
from functools import partial
from types import NoneType
from navmazing import NavigateToSibling, NavigateToAttribute
from cfme.exceptions import DestinationNotFound
from cfme.fixtures import pytest_selenium as sel
from cfme.provisioning import provisioning_form as request_form
from cfme.web_ui import (
Form, Select, Table, accordion, fill, paginator,
flash, form_buttons, tabstrip, DHTMLSelect, Input, Tree, AngularSelect,
BootstrapTreeview, toolbar as tb, match_location, CheckboxTable)
from utils import version, fakeobject_or_object
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator
from utils.update import Updateable
from utils.pretty import Pretty
from utils.version import current_version
cfg_btn = partial(tb.select, "Configuration")
policy_btn = partial(tb.select, "Policy")
accordion_tree = partial(accordion.tree, "Catalog Items")
dynamic_tree = Tree("//div[@id='basic_info_div']//ul[@class='dynatree-container']")
entry_tree = BootstrapTreeview('automate_treebox')
listview_table = CheckboxTable(table_locator='//div[@id="list_grid"]/table')
template_select_form = Form(
fields=[
('template_table', Table('//div[@id="prov_vm_div"]/table')),
('add_button', form_buttons.add),
('cancel_button', form_buttons.cancel)
]
)
# Forms
basic_info_form = Form(
fields=[
('name_text', Input("name")),
('description_text', Input("description")),
('display_checkbox', Input("display")),
('select_catalog', AngularSelect('catalog_id')),
('select_dialog', AngularSelect('dialog_id')),
('select_orch_template', AngularSelect('template_id')),
('select_provider', AngularSelect('manager_id')),
('select_config_template', AngularSelect('template_id')),
('field_entry_point', Input("fqname")),
('retirement_entry_point', Input("retire_fqname")),
('edit_button', form_buttons.save),
('apply_btn', '//a[normalize-space(.)="Apply"]')
])
# TODO: Replace with Taggable
edit_tags_form = Form(
fields=[
("select_tag", AngularSelect('tag_cat')),
("select_value", AngularSelect('tag_add'))
])
detail_form = Form(
fields=[
('long_desc', Input('long_description')),
])
resources_form = Form(
fields=[
('choose_resource', Select("//select[@id='resource_id']")),
('add_button', form_buttons.add),
('save_button', form_buttons.save)
])
button_group_form = Form(
fields=[
('btn_group_text', Input("name")),
('btn_group_hvr_text', Input("description")),
('add_button', form_buttons.add)
])
button_form = Form(
fields=[
('btn_text', Input("name")),
('btn_hvr_text', Input("description")),
('select_dialog', Select("//select[@id='dialog_id']")),
('system_process', Select("//select[@id='instance_name']")),
('request', Input("object_request")),
('add_button', form_buttons.add)
])
match_page = partial(match_location, title='Catalogs', controller='catalog')
class CatalogItem(Updateable, Pretty, Navigatable):
pretty_attrs = ['name', 'item_type', 'catalog', 'catalog_name', 'provider', 'domain']
def __init__(self, item_type=None, vm_name=None, name=None, description=None,
display_in=False, catalog=None, dialog=None,
catalog_name=None, orch_template=None, provider_type=None,
provider=None, config_template=None, prov_data=None, domain="ManageIQ (Locked)",
appliance=None):
self.item_type = item_type
self.vm_name = vm_name
self.name = name
self.description = description
self.display_in = display_in
self.catalog = catalog
self.dialog = dialog
self.catalog_name = catalog_name
self.orch_template = orch_template
self.provider = provider
self.config_template = config_template
self.provider_type = provider_type
self.provisioning_data = prov_data
self.domain = domain
Navigatable.__init__(self, appliance=appliance)
def __str__(self):
return self.name
def create(self):
# Create has sequential forms, the first is only the provider type
navigate_to(self, 'Add')
# For element not found exception (To be removed)
sel.sleep(5)
sel.select("//select[@id='st_prov_type']",
self.provider_type or self.item_type or 'Generic')
sel.wait_for_element(basic_info_form.name_text)
catalog = fakeobject_or_object(self.catalog, "name", "Unassigned")
dialog = fakeobject_or_object(self.dialog, "name", "No Dialog")
# Need to provide the (optional) provider name to the form, not the object
provider_name = None
provider_required_types = ['AnsibleTower', 'Orchestration']
if self.item_type in provider_required_types \
or self.provider_type in provider_required_types:
provider_name = self.provider.name
# For tests where orchestration template is None
orch_template = None
if self.orch_template:
orch_template = self.orch_template.template_name
fill(basic_info_form, {'name_text': self.name,
'description_text': self.description,
'display_checkbox': self.display_in,
'select_catalog': catalog.name,
'select_dialog': dialog.name,
'select_orch_template': orch_template,
'select_provider': provider_name,
'select_config_template': self.config_template})
if not (self.item_type in provider_required_types):
sel.click(basic_info_form.field_entry_point)
if version.current_version() < "5.7":
dynamic_tree.click_path("Datastore", self.domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
else:
entry_tree.click_path("Datastore", self.domain, "Service", "Provisioning",
"StateMachines", "ServiceProvision_Template", "default")
sel.click(basic_info_form.apply_btn)
if version.current_version() >= "5.7" and self.item_type == "AnsibleTower":
sel.click(basic_info_form.retirement_entry_point)
entry_tree.click_path("Datastore", self.domain, "Service", "Retirement",
"StateMachines", "ServiceRetirement", "Gene
|
ric")
sel.click(basic_info_form.apply_btn)
if self.catalog_name is not None \
and self.provisioning_data is not None \
and not isinstance(self.provider, NoneType):
tabstrip.select_tab("Request Info")
tabstrip.select_tab("Catalog")
templa
|
te = template_select_form.template_table.find_row_by_cells({
'Name': self.catalog_name,
'Provider': self.provider.name
})
sel.click(template)
request_form.fill(self.provisioning_data)
sel.click(template_select_form.add_button)
def update(self, updates):
navigate_to(self, 'Edit')
fill(basic_info_form, {'name_text': updates.get('name', None),
'description_text':
updates.get('description', None)},
action=basic_info_form.edit_button)
flash.assert_success_message('Service Catalog Item "{}" was saved'.format(self.name))
def delete(self, from_dest='All'):
if from_dest in navigator.list_destinations(self):
navigate_to(self, from_dest)
else:
msg = 'cfme.services.catalogs.catalog_item does not have destination {}'\
.format(from_dest)
raise DestinationNotFound(msg)
if from_dest == 'All':
# select the row for deletion
listview_ta
|
cinepost/Copperfield_FX
|
copper/core/utils/singleton.py
|
Python
|
unlicense
| 849
| 0.003534
|
# Taken from here: https://stackoverflow.com/questions/50566934/why-is-this-singleton-implementation-not-thread-safe
import functools
import threading
lock = threading.Lock()
def synchronized(lock):
""" Synchronization decorator """
def wrapper(f):
@functools.wraps(f)
def inner_wrapper(*args, **kw):
with lock:
return f(*args, **kw)
return inner_wrapper
return wrapper
clas
|
s SingletonOptimized(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._locked_call(*args, **kwargs)
return cls._instances[cls]
@synchronized(lock)
def _locked_call(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(SingletonOptim
|
ized, cls).__call__(*args, **kwargs)
|
boar/boar
|
boar/accounts/urls.py
|
Python
|
bsd-3-clause
| 562
| 0.010676
|
from django.conf.urls.defaul
|
ts import *
urlpatterns = patterns('',
url(r'^$', 'boar.accounts.views.accounts', name='accounts'),
url(r'^signout/$', 'boar.accounts.views.signout', name='accounts_signout'),
url(r'^settings/$', 'boar.accounts.views.settings_form', name='accounts_settings'),
url(r'^mailing-lists/unsubscribe/(?P<user_id>\d+)-(?P<mailing_list_id>\d+)-(?P<token>.+)/$',
'boar.mailing_lists.views.
|
unsubscribe',
name='accounts_mailing_lists_unsubscribe'),
url(r'^user-data/$', 'boar.accounts.views.user_data'),
)
|
magnunleno/Rurouni
|
tests/test_basic_operations.py
|
Python
|
gpl-3.0
| 5,681
| 0.010033
|
#!/usr/bin/env python
# encoding: utf-8
from conftests import *
from rurouni.exceptions import *
from rurouni.types import *
from rurouni import Database, Column, Table
def test_insert_errors(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
with pytest.raises(UnknownField):
id = Client.insert(name="John", last_name="Doe")
assert 'id' not in locals()
db.destroy()
def test_insert(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
name1 = "John"
birthdate1 = randomDate()
name2 = "Jack"
birthdate2 = randomDate()
name3 = "Bob"
birthdate3 = randomDate()
c1 = Client.insert(name=name1, birthdate=birthdate1)
c2 = Client.insert(name=name2, birthdate=birthdate2)
c3 = Client.insert(name=name3, birthdate=birthdate3)
assert c1.id == 1
assert c1.name == name1
assert c1.birthdate == birthdate1
assert c2.id == 2
assert c2.name == name2
assert c2.birthdate == birthdate2
assert c3.id == 3
assert c3.name == name3
assert c3.birthdate == birthdate3
db.destroy()
def test_insert_many_errors(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
with pytest.raises(InvalidData):
Client.insert_many() == []
with pytest.raises(InvalidData):
Client.insert_many(None, None)
with pytest.raises(InvalidData):
Client.insert_many({})
with pytest.raises(UnknownField):
Client.insert_many({'n':'err'})
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
for i, data in enumerate(data, 1):
c = Client(i)
assert c.name == data['name']
assert c.birthdate == data['birthdate']
db.destroy()
def test_hasId(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
assert not Client.has(1)
assert not (1 in Client)
c = Client.insert(name='John', birthdate=randomDate())
assert c.id == 1
assert Client.has(1)
assert 1 in Client
db.destroy()
def test_delete(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
assert Client.has(1) == True
assert Client.has(2) == True
assert Client.has(3) == True
Client.delete(2)
assert Client.has(1) == True
assert Client.has(2) == False
assert Client.has(3) == True
with pytest.raises(InvalidId):
Client.delete(None)
Client.delete_all()
assert Client.has(1) == False
assert Client.has(2) == False
assert Client.has(3) == False
assert Client.isEmpty()
db.destroy()
def test_delete_many(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
Client.delete_many([1, 3, 5])
assert Client.has(1) == False
assert Client.has(2) == True
assert Client.has(3) == False
assert Client.has(4) == True
assert Client.has(5) == False
assert Client.has(6) == True
assert len(Client) == 3
db.destroy()
def test_iter(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
data = [
{'name':'John', 'birthdate':randomDate()},
{'name':'Jack', 'birthdate':randomDate()},
{'name':'Bob', 'birthdate':randomDate()},
]
Client.insert_many(*data)
# Iterate using Client.all()
count = 0
for (n, c) in enumerate(Client.all()):
vals = data[n]
assert c.id == n + 1
assert c.name == vals['name']
assert c.birthdate == vals['birthdate']
count += 1
assert count == 3
# Iterate using Client.__iter__
count = 0
for (n, c) in enumerate(Client):
vals = data[n]
assert c.id == n + 1
assert c.name == vals['name']
assert c.birthdate == vals['birthdate']
count += 1
assert count == 3
db.destroy()
def test_count(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
assert Client.count() == 0
assert len(Client) == 0
Client.insert(name='John', birthdate=randomDate())
assert Client.count() == 1
assert len(Client) == 1
Client.insert(name='Jack', birthdate=randomDate())
assert Client.count() == 2
assert len(Client) == 2
Client.insert(name='Bob', birthdate=randomDate())
assert Client.count() == 3
assert len(C
|
lient) == 3
db.destroy()
def test_empty(db):
class Client(Table):
__db__ = db
name = Column(String)
birthdate = Column(Date)
assert Client.isEmpty()
Client.insert(name='Joh
|
n', birthdate=randomDate())
assert not Client.isEmpty()
db.destroy()
|
Arnukk/TDS
|
wnaffect.py
|
Python
|
mit
| 3,540
| 0.012994
|
# -*- coding: utf-8 -*-
"""
Clement Michard (c) 2015
"""
import os
import sys
import nltk
from emotion import Emotion
from nltk.corpus import WordNetCorpusReader
import xml.etree.ElementTree as ET
class WNAffect:
"""WordNet-Affect ressource."""
def __init__(self, wordnet16_dir, wn_domains_dir):
"""Initializes the WordNet-Affect object."""
try:
cwd = os.getcwd()
nltk.data
|
.path.append(cwd)
wn16_path = "{0}/dict".format(wordnet16_dir)
self.wn16 = WordNetCorpusReader(os.path.abspath("{0}/{1}".format(cwd, wn16_path)), nltk.data.find(wn16_path))
self.flat_pos = {'NN':'NN', 'NNS':'NN', 'JJ':'JJ', 'JJR':'JJ', 'JJS':'JJ', 'RB'
|
:'RB', 'RBR':'RB', 'RBS':'RB', 'VB':'VB', 'VBD':'VB', 'VGB':'VB', 'VBN':'VB', 'VBP':'VB', 'VBZ':'VB'}
self.wn_pos = {'NN':self.wn16.NOUN, 'JJ':self.wn16.ADJ, 'VB':self.wn16.VERB, 'RB':self.wn16.ADV}
self._load_emotions(wn_domains_dir)
self.synsets = self._load_synsets(wn_domains_dir)
except:
print "Please download the dependencies and re-run the script after installing them successfully. Exiting !"
exit()
def _load_synsets(self, wn_domains_dir):
"""Returns a dictionary POS tag -> synset offset -> emotion (str -> int -> str)."""
tree = ET.parse("{0}/wn-affect-1.1/a-synsets.xml".format(wn_domains_dir))
root = tree.getroot()
pos_map = { "noun": "NN", "adj": "JJ", "verb": "VB", "adv": "RB" }
synsets = {}
for pos in ["noun", "adj", "verb", "adv"]:
tag = pos_map[pos]
synsets[tag] = {}
for elem in root.findall(".//{0}-syn-list//{0}-syn".format(pos, pos)):
offset = int(elem.get("id")[2:])
if not offset: continue
if elem.get("categ"):
synsets[tag][offset] = Emotion.emotions[elem.get("categ")] if elem.get("categ") in Emotion.emotions else None
elif elem.get("noun-id"):
synsets[tag][offset] = synsets[pos_map["noun"]][int(elem.get("noun-id")[2:])]
return synsets
def _load_emotions(self, wn_domains_dir):
"""Loads the hierarchy of emotions from the WordNet-Affect xml."""
tree = ET.parse("{0}/wn-affect-1.1/a-hierarchy.xml".format(wn_domains_dir))
root = tree.getroot()
for elem in root.findall("categ"):
name = elem.get("name")
if name == "root":
Emotion.emotions["root"] = Emotion("root")
else:
Emotion.emotions[name] = Emotion(name, elem.get("isa"))
def get_emotion(self, word, pos):
"""Returns the emotion of the word.
word -- the word (str)
pos -- part-of-speech (str)
"""
if pos in self.flat_pos:
pos = self.flat_pos[pos]
synsets = self.wn16.synsets(word, self.wn_pos[pos])
if synsets:
offset = synsets[0].offset
if offset in self.synsets[pos]:
return self.synsets[pos][offset], offset
return None
def get_emotion_synset(self, offset):
"""Returns the emotion of the synset.
offset -- synset offset (int)
"""
for pos in self.flat_pos.values():
if offset in self.synsets[pos]:
return self.synsets[pos][offset]
return None
|
SlicerRt/SlicerDebuggingTools
|
PyDevRemoteDebug/ptvsd-4.1.3/ptvsd/_vendored/pydevd/pydevd_concurrency_analyser/pydevd_thread_wrappers.py
|
Python
|
bsd-3-clause
| 2,233
| 0.002239
|
from _pydev_imps._pydev_saved_modules import threading
def wrapper(fun):
def pydev_after_run_call():
pass
def inner(*args, **kwargs):
fun(*args, **kwargs)
pydev_after_run_call()
return inner
def wrap_attr(obj, attr):
t_save_start = getattr(obj, attr)
setattr(obj, attr, wrapper(t_save_start))
obj._pydev_run_patched = True
class ObjectWrapper(object):
def __init__(self, obj):
self.wrapped_object = obj
try:
import functools
functools.update_wrapper(self, obj)
except:
pass
def __getattr__(self, attr):
orig_attr = getattr(self.wrapped_object, attr) #.__getattribute__(attr)
if callable(orig_attr):
def patched_attr(*args, **kwargs):
self.call_begin(attr)
result = orig_attr(*args, **kwargs)
self.call_end(attr)
if result == self.wrapped_object:
return self
return result
return patched
|
_attr
else:
return orig_attr
def call_begin(self, attr):
pass
def call_end(self, attr):
pass
def __enter__(self):
self.call_begin("__enter__")
self.wrapped_object.__enter__()
self.call_end("__enter__")
def __exit__(self, exc_type, exc_val, exc_tb):
self.call_begin("__exit__")
self.wrapped_object.__exit__(exc_type, exc_val, exc_tb)
def factory_wrapper(fun)
|
:
def inner(*args, **kwargs):
obj = fun(*args, **kwargs)
return ObjectWrapper(obj)
return inner
def wrap_threads():
# TODO: add wrappers for thread and _thread
# import _thread as mod
# print("Thread imported")
# mod.start_new_thread = wrapper(mod.start_new_thread)
import threading
threading.Lock = factory_wrapper(threading.Lock)
threading.RLock = factory_wrapper(threading.RLock)
# queue patching
try:
import queue # @UnresolvedImport
queue.Queue = factory_wrapper(queue.Queue)
except:
import Queue
Queue.Queue = factory_wrapper(Queue.Queue)
|
chimkentec/KodiMODo_rep
|
script.module.xbmcup/lib/xbmcup/cache.py
|
Python
|
gpl-3.0
| 7,268
| 0.004403
|
# -*- coding: utf-8 -*-
__author__ = 'hal9000'
__all__ = ['Cache', 'CacheServer']
import socket
import time
import json
import hashlib
from sqlite3 import dbapi2 as sqlite
import xbmc
import log
import gui
import system
SOCKET = '127.0.0.1', 59999
CLEAR = 60*60*24 # 1 day
class SQL:
def __init__(self, name, version):
self.fs = system.FS('cache')
if self.fs.exists('sandbox://' + name + '.sqlite'):
self.con = sqlite.connect(self.fs('sandbox://' + name + '.sqlite'))
else:
self.con = sqlite.connect(self.fs('sandbox://' + name + '.sqlite'))
self.sql_set('pragma auto_vacuum=1')
self.sql_set('create table meta(data text)')
self.sql_set('insert into meta(data) values(?)', (json.dumps({'version': version, 'timeout': int(time.time()) + CLEAR}),))
self.sql_set('create table cache(token varchar(32) unique, expire integer, data text)')
s
|
elf.sql_set('create index dataindex on cache(expire)')
self.meta_load()
def health(self, version):
if self.meta['version'] != version:
self.meta_save('version', version)
self.clear()
elif self.meta['timeout'] < int(time.time()):
self.sq
|
l_set('delete from cache where expire<?', (int(time.time()), ))
self.meta_save('timeout', int(time.time()) + CLEAR)
def get(self, token):
return self.sql_get('select data from cache where token=? and expire>? limit 1', (hashlib.md5(str(token)).hexdigest(), int(time.time())))
def set(self, token, expire, data):
try:
jsdata = json.dumps(data)
except:
pass
else:
self.sql_set('replace into cache(token,expire,data) values(?,?,?)', (hashlib.md5(str(token)).hexdigest(), int(time.time()) + expire, jsdata))
def clear(self):
self.sql_set('delete from cache')
self.meta_save('timeout', int(time.time()) + CLEAR)
# Private
def sql_get(self, sql, *args):
cur = self.con.cursor()
cur.execute(sql, *args)
rows = cur.fetchall()
cur.close()
try:
return json.loads(rows[0][0])
except:
return None
def sql_set(self, sql, *args):
cur = self.con.cursor()
cur.execute(sql, *args)
self.con.commit()
cur.close()
def meta_load(self):
self.meta = self.sql_get('select data from meta')
if not self.meta:
self.meta = {'version': '', 'timeout': 0}
def meta_save(self, key, value):
self.meta[key] = value
self.sql_set('update meta set data=?', (json.dumps(self.meta),))
class Base:
def recv(self, sock):
data = ''
length = ''
idle = time.time()
while True:
try:
if isinstance(length, basestring):
c = sock.recv(1)
if c == '.':
length = int(length)
else:
length += c
else:
data = sock.recv(length - len(data))
except socket.error, e:
if not e.errno in (10035, 35):
self.log('Recive', repr(e))
if e.errno in (22,):
self.log('Socket error 22')
return None
if idle + 10 < time.time():
self.log('Timeout')
return None
else:
if not isinstance(length, basestring) and len(data) == length:
try:
return json.loads(data)
except Exception, e:
self.log('JSON', repr(e))
return None
def send(self, sock, data):
try:
jsdata = json.dumps(data)
except:
jsdata = 'null'
sock.send(str(len(jsdata)) + '.' + jsdata)
def log(self, *args):
log.error(str(self.__class__.__name__), *args)
class Cache(Base):
def __init__(self, name, version=None):
self.name = str(name).strip()
self.version = str(version).strip()
def call(self, token, fun, *args, **kwargs):
cache = self._call([1, token])
if cache is not None:
return cache
res = fun(*args, **kwargs)
if res is None:
return None
else:
if isinstance(res, tuple) and len(res) == 2 and isinstance(res[0], int):
self._call([2, token, res[0], res[1]])
return res[1]
else:
return res
def clear(self):
self._call('clear')
def _call(self, data):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect(SOCKET)
except socket.error, e:
if e.errno in (111,):
self.log("CacheServer isn't running")
else:
self.log('Connect', repr(e))
return None
except:
return None
else:
self.send(sock, [self.name, self.version] + data)
r = self.recv(sock)
sock.close()
return r
class CacheServer(Base):
def __init__(self):
self.sql = {}
def run(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(SOCKET)
except Exception, e:
self.log('Bind', repr(e))
gui.message('Failed to start CacheServer. Check log.')
else:
sock.listen(1)
sock.setblocking(0)
idle = time.time()
while not xbmc.abortRequested:
try:
(client, address) = sock.accept()
except socket.error, e:
if e.errno == 11 or e.errno == 10035 or e.errno == 35:
if idle + 3 < time.time():
time.sleep(0.5)
continue
self.log('Accept', repr(e))
continue
except:
continue
else:
self.send(client, self.command(self.recv(client)))
idle = time.time()
sock.close()
def command(self, data):
if not data or not isinstance(data, list) or len(data) < 3 or data[2] not in (1, 2, 3):
return None
sql = self.open(data[0], data[1])
if not sql:
return None
if data[2] == 1 and len(data) == 4 and isinstance(data[3], basestring):
return sql.get(data[3])
elif data[2] == 2 and len(data) == 6 and isinstance(data[3], basestring) and isinstance(data[4], int):
sql.set(data[3], data[4], data[5])
return 1
elif data[2] == 3:
sql.clear()
return 1
return None
def open(self, db, version):
name = str(db).strip()
if not name:
return None
ver = str(version).strip()
if db not in self.sql:
self.sql[db] = SQL(db, ver)
self.sql[db].health(ver)
return self.sql[db]
|
joshsamara/game-website
|
core/migrations/0014_auto_20150413_1639.py
|
Python
|
mit
| 496
| 0.002016
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0013_merge'),
]
operations = [
migrations
|
.AlterField(
model_name='user',
name='public',
f
|
ield=models.BooleanField(default=True, help_text=b'Determines whether or not your profile is open to the public'),
preserve_default=True,
),
]
|
rudhir-upretee/Sumo17_With_Netsim
|
tools/assign/costFunctionChecker.py
|
Python
|
gpl-3.0
| 8,986
| 0.004118
|
#!/usr/bin/env python
"""
@file costFunctionChecker.py
@author Michael Behrisch
@author Daniel Krajzewicz
@author Jakob Erdmann
@date 2009-08-31
@version $Id: costFunctionChecker.py 13811 2013-05-01 20:31:43Z behrisch $
Run duarouter repeatedly and simulate weight changes via a cost function.
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2009-2013 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
import os, sys, subprocess, types
from datetime import datetime
from optparse import OptionParser
from xml.sax import make_parser, handler
def call(command, log):
if not isinstance(args, types.StringTypes):
command = [str(c) for c in command]
print >> log, "-" * 79
print >> log, command
log.flush()
retCode = subprocess.call(command, stdout=log, stderr=log)
if retCode != 0:
print >> sys.stderr, "Execution of %s failed. Look into %s for details." % (command, log.name)
sys.exit(retCode)
def writeRouteConf(step, options, file, output):
fd = open("iteration_" + str(step) + ".duarcfg", "w")
print >> fd, """<configuration>
<input>
<net-file value="%s"/>""" % options.net
if step==0:
if options.flows:
print >> fd, ' <flow-definition value="%s"/>' % file
else:
print >> fd, ' <trip-defs value="%s"/>' % file
else:
print >> fd, ' <alternatives value="%s"/>' % file
print >> fd, ' <weights value="dump_%s_%s.xml"/>' % (step-1, options.aggregation)
print >> fd, """ </input>
<output>
<output-file value="%s"/>
<exit-times value="True"/>
</output>""" % output
print >> fd, """ <processing>
<continue-on-unbuild value="%s"/>
<expand-weights value="True"/>
<gBeta value="%s"/>
<gA value="%s"/>
</processing>""" % (options.continueOnUnbuild, options.gBeta, options.gA)
print >> fd, ' <random_number><abs-rand value="%s"/></random_number>' % options.absrand
print >> fd, ' <time><begin value="%s"/>' % options.begin,
if options.end:
print >> fd, '<end value="%s"/>' % options.end,
print >> fd, """</time>
<report>
<verbose value="%s"/>
<suppress-warnings value="%s"/>
</report>
</configuration>""" % (options.verbose, options.noWarnings)
fd.close()
class RouteReader(handler.ContentHandler):
def __init__(self):
self._edgeWeights = {}
self._maxDepart = 0
def startElement(self, name, attrs):
if name == 'route':
for edge in attrs['edges'].split():
if not edge in self._edgeWeights:
self._edgeWeights[edge] = 0
self._edgeWeights[edge] += 1
elif name == 'vehicle':
if float(attrs['depart']) > self._maxDepart:
self._maxDepart = float(attrs['depart'])
def getWeight(self, edge):
return self._edgeWeights.get(edge, 0)
def getMaxDepart(self):
return self._maxDepart
class NetReader(handler.ContentHandler):
def __init__(self):
self._edges = []
def startElement(self, name, attrs):
if name == 'edge':
if not attrs.has_key('function') or attrs['function'] == 'normal':
self._edges.append(attrs['id'])
def getEdges(self):
return self._edges
def identity(edge, weight):
return weight
def generateWeights(step, options, edges, weights, costFunction):
fd = open("dump_%s_%s.xml" % (step, options.aggregation), "w")
print >> fd, '<?xml version="1.0"?>\n<netstats>'
for time in range(0, int(reader.getMaxDepart()+1), options.aggregation):
print >> fd, ' <interval begin="%s" end="%s" id="dump_%s">' % (time, time + options.aggregation, options.aggregation)
for edge in edges:
cost = costFunction(edge, weights.getWeight(edge))
if cost != None:
print >> fd, ' <edge id="%s" traveltime="%s"/>' % (edge, cost)
print >> fd, ' </interval>'
print >> fd, '</netstats>'
fd.close()
optParser = OptionParser()
optParser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="tell me what you are doing")
optParser.add_option("-C", "--continue-on-unbuild", action="store_true", dest="continueOnUnbuild",
default=False, help="continues on unbuild routes")
optParser.add_option("-w", "--disable-warnings", action="store_true", dest="noWarnings",
default=False, help="disables warnings")
optParser.add_option("-n", "--net-file", dest="net",
help="SUMO network (mandatory)", metavar="FILE")
optParser.add_option("-t", "--trips", dest="trips",
help="trips in step 0 (this or flows is mandatory)", metavar="FILE")
optParser.add_option("-F", "--flows",
help="flows in step 0 (this or trips is mandatory)", metavar="FILE")
optParser.add_option("-+", "--additional", dest="additional",
default="", help="Additional files")
optParser.add_option("-b", "--begin", dest="begin",
type="int", default=0, help="Set simulation/routing begin [default: %default]")
optParser.add_option("-e", "--end", dest="end",
type="int", help="Set simulation/routing end [default: %default]")
optParser.add_option("-R", "--route-steps", dest="routeSteps",
type="int", default=200, help="Set simulation route steps [default: %default]")
optParser.add_option("-a", "--aggregation", dest="aggregation",
type="int", default=900, help="Set main weights aggregation period [default: %default]")
optParser.add_option("-A", "--gA", dest="gA",
type="float", default=.5, help="Sets Gawron's Alpha [default: %default]")
optParser.add_option("-B", "--gBeta", dest="gBeta",
type="float", default=.9, help="Sets Gawron's Beta [default: %default]")
optParser.add_option("-f", "--first-step", dest="firstStep",
type="int", default=0, help="First DUA step [default: %default]")
optParser.add_option("-l", "--last-step", dest="lastStep",
type="int", default=50, help="Last DUA step [default: %default]")
optParser.add_option("-p", "--path", dest="path",
default=os.environ.get("SUMO_BINDIR", ""), help="Path to binaries [default: %default]")
optParser.add_option("-y", "--absrand", dest="absrand", action="store_true",
default=False, help="use current time to generate random number")
optParser.add_option("-c", "--cost-function", dest="costfunc",
default="identity", help="(python) function to use as cost function")
(options, args) = optParser.parse_args()
if not options.net or not (options.trips or options.flows):
optParser.error("At least --net-file and --trips or --flows have to be given!")
duaBinary = os.environ.get("DUAROUTER_BINARY", os.path.join(options.path, "duarouter"))
log = open("dua-log.txt", "w+")
parser = make_parser()
reader = NetReader()
parser.setContentHandler(reader)
parser.parse(options.net)
edges = reader.getEdges()
if "." in options.costfunc:
idx = options.costfunc.rfind(".")
module = options.costfunc[:idx]
func = options.costfunc[idx+1:]
exec("from %s import %s as costFunction" % (module, func))
else:
exec("costFunction = %s" % options.costfunc)
if options.flows:
tripFiles = options.flows.split(",")
else:
tripFiles = options.trips.split(",")
starttime = datetime.now()
|
for step in range(options.firstStep, options.lastStep):
btimeA = datetime.now()
print "> Executing step " + str(step)
# router
files = []
for tripFile in tripFiles:
file = tripFile
tripFile = os.path.basename(tripFile)
if step
|
>0:
file = tripFile[:tripFile.find(".")] + "_%s.rou.alt.xml" % (step-1)
output = tripFile[:tripFile.find(".")] + "_%s.rou.xml" % step
print ">> Running router with " + file
btime = datetime.now()
|
waterdotorg/power.Water
|
project/custom/management/commands/friend_joined_email.py
|
Python
|
gpl-3.0
| 2,527
| 0.004353
|
import datetime
import logging
import time
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail.message import EmailMultiAlternatives
from django.core.management.base import BaseCommand
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.timezone import utc
from custom.models import Profile, FriendJoinedEmailLog
# Get an instance of a logger
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Friend joined email daemon'
args = ''
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.site = Site.objects.get_current()
def close_db_connection(self):
from django import db
db.close_connection()
def handle(self, *args, **options):
while True:
last_hour = datetime.datetime.utcnow().replace(tzinfo=utc) - datetime.timedelta(hours=1)
profiles = Profile.objects.select_related().filter(
user__date_joined__gte=last_hour,
user_referrer__profile__enable_email_updates=True,
user_referrer__is_active=True,
)
for profile in profiles:
|
if not profile.user_referrer.e
|
mail:
continue
try:
FriendJoinedEmailLog.objects.get(user=profile.user_referrer, user_referred=profile.user)
except FriendJoinedEmailLog.DoesNotExist:
dict_context = {
'site': self.site,
'referred_profile': profile,
'referring_profile': profile.user_referrer.get_profile(),
}
email_subject = render_to_string('emails/friend-joined/subject.txt', dict_context).strip()
email_txt = render_to_string('emails/friend-joined/message.txt', dict_context)
email_html = render_to_string('emails/friend-joined/message.html', dict_context)
email = EmailMultiAlternatives(
email_subject, email_txt, settings.DEFAULT_FROM_EMAIL, [profile.user_referrer.email,]
)
email.attach_alternative(email_html, 'text/html')
email.send()
FriendJoinedEmailLog.objects.create(user=profile.user_referrer, user_referred=profile.user)
self.close_db_connection()
time.sleep(600)
|
matthijsvk/multimodalSR
|
code/Experiments/neon-master/tests/test_bleuscore.py
|
Python
|
mit
| 1,914
| 0.001045
|
# ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def test_bleuscore():
# dataset with two sentences
sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
|
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore()
bleu_metric(sentences, references)
# check against references
for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
assert round(score, 1) ==
|
reference
if __name__ == '__main__':
test_bleuscore()
|
ryfeus/lambda-packs
|
pytorch/source/PIL/FtexImagePlugin.py
|
Python
|
mit
| 3,322
| 0
|
"""
A Pillow loader for .ftc and .ftu files (FTEX)
Jerome Leclanche <jerome@leclan.ch>
The contents of this file are hereby released in the public domain (CC0)
Full text of the CC0 license:
https://creativecommons.org/publicdomain/zero/1.0/
Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
packed custom format called FTEX. This file format uses file extensions FTC
and FTU.
* FTC files are compressed textures (using standard texture compression).
* FTU files are not compressed.
Texture File Format
The FTC and FTU texture files both use the same format. This
has the following structure:
{header}
{format_directory}
{data}
Where:
{header} = {
u32:magic,
u32:version,
u32:width,
u32:height,
u32:mipmap_count,
u32:format_count
}
* The "magic" number is "FTEX".
* "width" and "height" are the dimensions of the texture.
* "mipmap_count" is the number of mipmaps in the texture.
* "format_count" is the number of texture formats (different versions of the
same
|
texture) in this file.
{format_directory} = format_count * { u32:format, u32:where }
The format value is 0 for DX
|
T1 compressed textures and 1 for 24-bit RGB
uncompressed textures.
The texture data for a format starts at the position "where" in the file.
Each set of texture data in the file has the following structure:
{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
* "mipmap_size" is the number of bytes in that mip level. For compressed
textures this is the size of the texture data compressed with DXT1. For 24 bit
uncompressed textures, this is 3 * width * height. Following this are the image
bytes for that mipmap level.
Note: All data is stored in little-Endian (Intel) byte order.
"""
import struct
from io import BytesIO
from . import Image, ImageFile
MAGIC = b"FTEX"
FORMAT_DXT1 = 0
FORMAT_UNCOMPRESSED = 1
class FtexImageFile(ImageFile.ImageFile):
format = "FTEX"
format_description = "Texture File Format (IW2:EOC)"
def _open(self):
struct.unpack("<I", self.fp.read(4)) # magic
struct.unpack("<i", self.fp.read(4)) # version
self._size = struct.unpack("<2i", self.fp.read(8))
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
self.mode = "RGB"
# Only support single-format files.
# I don't know of any multi-format file.
assert format_count == 1
format, where = struct.unpack("<2i", self.fp.read(8))
self.fp.seek(where)
mipmap_size, = struct.unpack("<i", self.fp.read(4))
data = self.fp.read(mipmap_size)
if format == FORMAT_DXT1:
self.mode = "RGBA"
self.tile = [("bcn", (0, 0) + self.size, 0, (1))]
elif format == FORMAT_UNCOMPRESSED:
self.tile = [("raw", (0, 0) + self.size, 0, ('RGB', 0, 1))]
else:
raise ValueError(
"Invalid texture compression format: %r" % (format))
self.fp.close()
self.fp = BytesIO(data)
def load_seek(self, pos):
pass
def _validate(prefix):
return prefix[:4] == MAGIC
Image.register_open(FtexImageFile.format, FtexImageFile, _validate)
Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"])
|
Zyell/home-assistant
|
homeassistant/components/thermostat/__init__.py
|
Python
|
mit
| 8,539
| 0
|
"""
Provides functionality to interact with thermostats.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/thermostat/
"""
import logging
import os
import voluptuous as vol
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.config import load_yaml_config_file
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.temperature import convert
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.components import (ecobee, zwave)
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_ON, STATE_OFF, STATE_UNKNOWN,
TEMP_CELSIUS)
DOMAIN = "thermostat"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = 60
SERVICE_SET_AWAY_MODE = "set_away_mode"
SERVICE_SET_TEMPERATURE = "set_temperature"
SERVICE_SET_FAN_MODE = "set_fan_mode"
STATE_HEAT = "heat"
STATE_COOL = "cool"
STATE_IDLE = "idle"
ATTR_CURRENT_TEMPERATURE = "current_temperature"
ATTR_AWAY_MODE = "away_mode"
ATTR_FAN = "fan"
ATTR_MAX_TEMP = "max_temp"
ATTR_MIN_TEMP = "min_temp"
ATTR_TEMPERATURE_LOW = "target_temp_low"
ATTR_TEMPERATURE_HIGH = "target_temp_high"
ATTR_OPERATION = "current_operation"
_LOGGER = logging.getLogger(__name__)
DISCOVERY_PLATFORMS = {
ecobee.DISCOVER_THERMOSTAT: 'ecobee',
zwave.DISCOVER_THERMOSTATS: 'zwave'
}
SET_AWAY_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_AWAY_MODE): cv.boolean,
})
SET_TEMPERATURE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_TEMPERATURE): vol.Coerce(float),
})
SET_FAN_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_FAN): cv.boolean,
})
def set_away_mode(hass, away_mode, entity_id=None):
"""Turn all or specified thermostat away mode on."""
data = {
ATTR_AWAY_MODE: away_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AWAY_MODE, data)
def set_temperature(hass, temperature, entity_id=None):
"""Set new target temperature."""
data = {ATTR_TEMPERATURE: temperature}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, data)
def set_fan_mode(hass, fan_mode, entity_id=None):
"""Turn all or specified thermostat fan mode on."""
data = {
ATTR_FAN: fan_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data)
# pylint: disable=too-many-branches
def setup(hass, config):
"""Setup thermostats."""
component = EntityComponent(_LOGGER, DOMAIN, hass,
SCAN_INTERVAL, DISCOVERY_PLATFORMS)
component.setup(config)
descriptions = load_yaml_config_file(
os.path.join(os.path.dirname(__file__), 'services.yaml'))
def away_mode_set_service(service):
"""Set away mode on target thermostats."""
target_thermostats = component.extract_from_service(service)
away_mode = service.data[ATTR_AWAY_MODE]
for thermostat in target_thermostats:
if away_mode:
thermostat.turn_away_mode_on()
else:
thermostat.turn_away_mo
|
de_off()
thermostat.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_AWAY_MODE, away_mode_set_service,
descriptions.get(SERVICE_SET_AWAY_MODE),
schema=SET_AWAY_MODE_SCHEMA)
def temperature_set_service(service):
"""Set temperature on the target thermostats.
|
"""
target_thermostats = component.extract_from_service(service)
temperature = service.data[ATTR_TEMPERATURE]
for thermostat in target_thermostats:
thermostat.set_temperature(convert(
temperature, hass.config.temperature_unit,
thermostat.unit_of_measurement))
thermostat.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_TEMPERATURE, temperature_set_service,
descriptions.get(SERVICE_SET_TEMPERATURE),
schema=SET_TEMPERATURE_SCHEMA)
def fan_mode_set_service(service):
"""Set fan mode on target thermostats."""
target_thermostats = component.extract_from_service(service)
fan_mode = service.data[ATTR_FAN]
for thermostat in target_thermostats:
if fan_mode:
thermostat.turn_fan_on()
else:
thermostat.turn_fan_off()
thermostat.update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_FAN_MODE, fan_mode_set_service,
descriptions.get(SERVICE_SET_FAN_MODE),
schema=SET_FAN_MODE_SCHEMA)
return True
class ThermostatDevice(Entity):
"""Representation of a thermostat."""
# pylint: disable=no-self-use
@property
def state(self):
"""Return the current state."""
return self.target_temperature or STATE_UNKNOWN
@property
def state_attributes(self):
"""Return the optional state attributes."""
data = {
ATTR_CURRENT_TEMPERATURE:
self._convert_for_display(self.current_temperature),
ATTR_MIN_TEMP: self._convert_for_display(self.min_temp),
ATTR_MAX_TEMP: self._convert_for_display(self.max_temp),
ATTR_TEMPERATURE:
self._convert_for_display(self.target_temperature),
ATTR_TEMPERATURE_LOW:
self._convert_for_display(self.target_temperature_low),
ATTR_TEMPERATURE_HIGH:
self._convert_for_display(self.target_temperature_high),
}
operation = self.operation
if operation is not None:
data[ATTR_OPERATION] = operation
is_away = self.is_away_mode_on
if is_away is not None:
data[ATTR_AWAY_MODE] = STATE_ON if is_away else STATE_OFF
is_fan_on = self.is_fan_on
if is_fan_on is not None:
data[ATTR_FAN] = STATE_ON if is_fan_on else STATE_OFF
return data
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
raise NotImplementedError
@property
def current_temperature(self):
"""Return the current temperature."""
raise NotImplementedError
@property
def operation(self):
"""Return current operation ie. heat, cool, idle."""
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
raise NotImplementedError
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
return self.target_temperature
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
return self.target_temperature
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return None
@property
def is_fan_on(self):
"""Return true if the fan is on."""
return None
def set_temperate(self, temperature):
"""Set new target temperature."""
pass
def turn_away_mode_on(self):
"""Turn away mode on."""
pass
def turn_away_mode_off(self):
"""Turn away mode off."""
pass
def turn_fan_on(self):
"""Turn fan on."""
pass
def turn_fan_off(self):
"""Turn fan off."""
pass
@property
def min_temp(self):
"""Return the minimum temperature."""
return convert(7, TEMP_CELSIUS, self.unit_of_measurement)
@property
def max_temp(self):
"""Return the maximum temperature."""
return convert(35, TEMP_CELSIUS, self.unit_of_measurement)
def _convert_for_display(self, temp):
"""Convert temperature into preferred units for display purposes."""
if temp is None:
return None
|
mvendra/mvtools
|
detect_repo_type.py
|
Python
|
mit
| 1,152
| 0.007813
|
#!/usr/bin/env python3
import sys
import os
import path_utils
import git_lib
import svn_lib
REPO_TYPE_GIT_BARE="git/bare"
REPO_TYPE_GIT_STD="git/std"
REPO_TYPE_GIT_SUB="git/sub"
REPO_TYPE_SVN="svn"
REPO_TYPES = [REPO_TYPE_GIT_BARE, REPO_TYPE_GIT_STD, REPO_TYPE_GIT_SUB, REPO_TYPE_SVN]
def detect_repo_type(path):
if not os.path.exists(path):
return False, "Path %s doesn't exist." % path
v, r = git_lib.is_repo_bare(path)
if v and r:
return True, REPO_TYPE_GIT_BARE
v, r = git_lib.is_repo_standard(path)
if v and r:
return True, REPO_TYPE_GIT_STD
v, r = git_lib.is_repo_submodule(path
|
)
if v and r:
return True, REPO_TYPE_GIT_SUB
v, r = svn_lib.is_svn_repo(path)
if v and r:
return True, REPO_TYPE_SVN
return True, None
def is_any_repo_type(repo_type):
return repo_type in REPO_TYPES
def puaq():
print("Usage
|
: %s path" % path_utils.basename_filtered(__file__))
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) < 2:
puaq()
path = sys.argv[1]
v, r = detect_repo_type(path)
print(r)
if not v:
sys.exit(1)
|
chris-hld/sfs-python
|
sfs/time/source.py
|
Python
|
mit
| 3,359
| 0
|
"""Compute the sound field generated by a sound source.
The Green's function describes the spatial sound propagation over time.
.. include:: math-definitions.rst
"""
from __future__ import division
import numpy as np
from .. import util
from .. import defs
def point(xs, signal, observation_time, grid, c=None):
r"""Source model for a point source: 3D Green's function.
Calculates the scalar sound pressure field for a given point in
time, evoked by source excitation signal.
Parameters
----------
xs
|
: (3,) array_like
Position of source in cartesian coordinates.
sign
|
al : (N,) array_like + float
Excitation signal consisting of (mono) audio data and a sampling
rate (in Hertz). A `DelayedSignal` object can also be used.
observation_time : float
Observed point in time.
grid : triple of array_like
The grid that is used for the sound field calculations.
See `sfs.util.xyz_grid()`.
c : float, optional
Speed of sound.
Returns
-------
numpy.ndarray
Scalar sound pressure field, evaluated at positions given by
*grid*.
Notes
-----
.. math::
g(x-x_s,t) = \frac{1}{4 \pi |x - x_s|} \dirac{t - \frac{|x -
x_s|}{c}}
"""
xs = util.asarray_1d(xs)
data, samplerate, signal_offset = util.as_delayed_signal(signal)
data = util.asarray_1d(data)
grid = util.as_xyz_components(grid)
if c is None:
c = defs.c
r = np.linalg.norm(grid - xs)
# evaluate g over grid
weights = 1 / (4 * np.pi * r)
delays = r / c
base_time = observation_time - signal_offset
return weights * np.interp(base_time - delays,
np.arange(len(data)) / samplerate,
data, left=0, right=0)
def point_image_sources(x0, signal, observation_time, grid, L, max_order,
coeffs=None, c=None):
"""Point source in a rectangular room using the mirror image source model.
Parameters
----------
x0 : (3,) array_like
Position of source in cartesian coordinates.
signal : (N,) array_like + float
Excitation signal consisting of (mono) audio data and a sampling
rate (in Hertz). A `DelayedSignal` object can also be used.
observation_time : float
Observed point in time.
grid : triple of array_like
The grid that is used for the sound field calculations.
See `sfs.util.xyz_grid()`.
L : (3,) array_like
Dimensions of the rectangular room.
max_order : int
Maximum number of reflections for each image source.
coeffs : (6,) array_like, optional
Reflection coeffecients of the walls.
If not given, the reflection coefficients are set to one.
c : float, optional
Speed of sound.
Returns
-------
numpy.ndarray
Scalar sound pressure field, evaluated at positions given by
*grid*.
"""
if coeffs is None:
coeffs = np.ones(6)
positions, order = util.image_sources_for_box(x0, L, max_order)
source_strengths = np.prod(coeffs**order, axis=1)
p = 0
for position, strength in zip(positions, source_strengths):
if strength != 0:
p += strength * point(position, signal, observation_time, grid, c)
return p
|
arangaraju/graph-stix
|
tests/conftest.py
|
Python
|
mit
| 316
| 0
|
#!/
|
usr/bin/env python
# -*- coding: utf-8 -*-
"""
Dummy conftest.py for graph_stix.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/l
|
atest/plugins.html
"""
from __future__ import print_function, absolute_import, division
import pytest
|
namhyung/uftrace
|
tests/t142_recv_multi.py
|
Python
|
gpl-2.0
| 1,783
| 0.001122
|
#!/usr/bin/env python
import os.path
import random
import subprocess as sp
from runtest import TestBase
TDIR = 'xxx'
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'abc', """
# DURATION TID FUNCTION
62.202 us [28141] | __cxa_atexit();
[28141] | main() {
[28141] | a() {
[28141] | b() {
[28141] | c() {
0.753 us [28141] | getpid();
1.430 us [28141] | } /* c */
1.915 us [28141] | } /* b */
2.405 us [28141] | } /* a */
3.005 us [28141] | } /* main */
""")
def prerun(self, timeout):
self.gen_port()
self.subcmd = 'recv'
self.option = '-d %s --port %s' % (TDIR, self.port)
self.exearg = ''
recv_cmd = self.runcmd()
self.pr_debug('prerun command: ' + recv_cmd)
self.recv_p = sp.Popen(recv_cmd.split())
# recorded but not used
self.subcmd = 'record'
self.option = '--host %s --port %s' % ('localhost', self.po
|
rt)
self.exearg = 't-' + self.name
record_cmd = self.runcmd()
self.pr_debug('prerun command: ' + record_cmd)
|
sp.call(record_cmd.split())
# use this
self.pr_debug('run another record')
self.dirname = 'dir-' + str(random.randint(100000, 999999))
self.pr_debug('after randint')
self.option += ' -d ' + self.dirname
record_cmd = self.runcmd()
self.pr_debug('prerun command: ' + record_cmd)
sp.call(record_cmd.split())
return TestBase.TEST_SUCCESS
def setup(self):
self.subcmd = 'replay'
self.option = '-d %s' % os.path.join(TDIR, self.dirname)
def postrun(self, ret):
self.recv_p.terminate()
return ret
|
TaliesinSkye/evennia
|
wintersoasis-master/commands/unloggedin.py
|
Python
|
bsd-3-clause
| 12,835
| 0.004441
|
"""
Commands that are available from the connect screen.
"""
import re
import traceback
from django.conf import settings
from src.players.models import PlayerDB
from src.objects.models import ObjectDB
from src.server.models import ServerConfig
from src.comms.models import Channel
from src.utils import create, logger, utils, ansi
from src.commands.default.muxcommand import MuxCommand
from src.commands.cmdhandler import CMD_LOGINSTART
# limit symbol import for API
__all__ = ("CmdUnconnectedConnect", "CmdUnconnectedCreate", "CmdUnconnectedQuit", "CmdUnconnectedLook", "CmdUnconnectedHelp", "Magic")
CONNECTION_SCREEN_MODULE = settings.CONNECTION_SCREEN_MODULE
CONNECTION_SCREEN = ""
try:
CONNECTION_SCREEN = ansi.parse_ansi(utils.string_from_module(CONNECTION_SCREEN_MODULE))
except Exception:
pass
if not CONNECTION_SCREEN:
CONNECTION_SCREEN = "\nEvennia: Error in CONNECTION_SCREEN MODULE (randomly picked connection screen variable is not a string). \nEnter 'help' for aid."
class Magic(MuxCommand):
"""
Hidden command for the web client's magic cookie authenticator.
"""
key = "magic"
def func(self):
session = self.caller
player = PlayerDB.objects.player_search(self.lhs)
if len(player) != 1:
player = None
else:
player = player[0]
if player.name.lower() != self.lhs.lower():
player=None
pswd = None
if player:
pswd = self.rhs == player.db.magic_cookie
if not (player and pswd):
# No playername or password match
session.msg("Could not verify Magic Cookie. Please email the server administrator for assistance.")
return
# Check IP and/or name bans
bans = ServerConfig.objects.conf("server_bans")
if bans and (any(tup[0]==player.name for tup in bans)
or
any(tup[2].match(session.address[0]) for tup in bans if tup[2])):
# this is a banned IP or name!
string = "{rYou have been banned and cannot continue from here."
string += "\nIf you feel this ban is in error, please email an admin.{x"
session.msg(string)
session.execute_cmd("quit")
return
session.sessionhandler.login(session, player)
class Connect(MuxCommand):
"""
Connect to the game.
Usage (at login screen):
connect playername password
connect "player name" "pass word"
Use the create command to first create an account before logging in.
If you have spaces in your name, enclose it in quotes.
"""
key = "connect"
aliases = ["conn", "con", "co"]
locks = "cmd:all()" # not really needed
def func(self):
"""
Uses the Django admin api. Note that unlogged-in commands
have a unique position in that their func() receives
a session object instead of a source_object like all
other types of logged-in commands (this is because
there is no object yet before the player has logged in)
"""
session = self.caller
args = self.args
# extract quoted parts
parts = [part.strip() for part in re.split(r"\"|\'", args) if part.strip()]
if len(parts) == 1:
# this was (hopefully) due to no quotes being found
parts = parts[0].split(None, 1)
if len(parts) != 2:
session.msg("\n\r Usage (without <>): connect <name> <password>")
return
playername, password = parts
# Match account name and check password
player = PlayerDB.objects.player_search(playername)
if len(player) != 1:
player = None
else:
player = player[0]
if player.name.lower() != playername.lower():
player=None
pswd = None
if player:
pswd = player.check_password(password)
if not (player and pswd):
# No playername or password match
string = "Wrong login information given.\nIf you have spaces in your name or "
string += "password, don't forget to enclose it in quotes. Also capitalization matters."
string += "\nIf you are new you should first create a new account "
string += "using the 'create' command."
session.msg(string)
return
# Check IP and/or name bans
bans = ServerConfig.objects.conf("server_bans")
if bans and (any(tup[0]==player.name for tup in bans)
or
any(tup[2].match(session.address[0]) for tup in bans if tup[2])):
# this is a banned IP or name!
string = "{rYou have been banned and cannot continue from here."
string += "\nIf you feel this ban is in error, please email an admin.{x"
session.msg(string)
session.execute_cmd("quit")
return
# actually do the login. This will call all other hooks:
# session.at_init()
# if character:
# at_first_login() # only once
# at_pre_login()
# player.at_post_login() - calls look if no character is set
# character.at_post_login() - this calls look command by default
session.sessionhandler.login(session, player)
class Create(MuxCommand):
"""
Create a new account.
Usage (at login screen):
create <playername> <password>
create "player name" "pass word"
This creates a new player account.
If you have spaces in your name, enclose it in quotes.
"""
key = "create"
aliases = ["cre", "cr"]
locks = "cmd:all()"
def func(self):
"Do checks and create account"
session = self.caller
args = self.args.strip()
# extract quoted parts
parts = [part.strip() for part in re.split(r"\"|\'", args) if part.strip()]
if len(parts) == 1:
# this was (hopefully) due to no quotes being found
parts = parts[0].split(None, 1)
if len(parts) != 2:
string = "\n Usage (without <>): create <name> <password>"
|
string += "\nIf <name> or <password> contains spaces, enclose it in quotes."
session.msg(string)
return
playername, password = parts
print "playername '%s', password: '%s'" % (playername, password)
# sanity checks
if not re.findall('^[\w. @+-]+$', playername) or not (0 < len(pl
|
ayername) <= 30):
# this echoes the restrictions made by django's auth module (except not
# allowing spaces, for convenience of logging in).
string = "\n\r Playername can max be 30 characters or fewer. Letters, spaces, digits and @/./+/-/_ only."
session.msg(string)
return
# strip excessive spaces in playername
playername = re.sub(r"\s+", " ", playername).strip()
if PlayerDB.objects.filter(user__username__iexact=playername) or PlayerDB.objects.filter(username__iexact=playername):
# player already exists (we also ignore capitalization here)
session.msg("Sorry, there is already a player with the name '%s'." % playername)
return
if not re.findall('^[\w. @+-]+$', password) or not (3 < len(password)):
string = "\n\r Password should be longer than 3 characers. Letters, spaces, digits and @\.\+\-\_ only."
string += "\nFor best security, make it longer than 8 characters. You can also use a phrase of"
string += "\nmany words if you enclose the password in quotes."
session.msg(string)
return
# everything's ok. Create the new player account.
try:
default_home = ObjectDB.objects.get_id(settings.CHARACTER_DEFAULT_HOME)
typeclass = settings.BASE_CHARACTER_TYPECLASS
permissions = settings.PERMISSION_PLAYER_DEFAULT
try:
new_character = create.create_player(playername, None, password,
permissions=permissions,
|
CMU-Robotics-Club/roboticsclub.org
|
robocrm/migrations/0007_robouser_magnetic.py
|
Python
|
mit
| 461
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
|
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('robocrm', '0006_auto_20141005_1800'),
]
operations = [
migrations.AddField(
model_name='robouser',
name='magnetic',
field=models.CharField(max_length=9, null=True, blank=True),
preserve_default=True,
),
|
]
|
levibostian/myBlanky
|
googleAppEngine/google/appengine/tools/dev_appserver_apiserver.py
|
Python
|
mit
| 53,339
| 0.00718
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Helper CGI for Apiserver in the development app server.
This is a fake apiserver proxy that does simple transforms on requests that
come in to /_ah/api and then re-dispatches them to /_ah/spi. It does not do
any authentication, quota checking, DoS checking, etc.
In addition, the proxy loads api configs from
/_ah/spi/BackendService.getApiConfigs prior to making the first call to the
backend at /_ah/spi and afterwards if app.yaml is changed.
"""
from __future__ import with_statement
import base64
import cgi
import cStringIO
import httplib
try:
import json
except ImportError:
import simplejson as json
import logging
import mimetools
import re
API_SERVING_PATTERN = '/_ah/api/.*'
SPI_ROOT_FORMAT = 'http://127.0.0.1:%s/_ah/spi/%s'
_API_REST_PATH_FORMAT = '{!name}/{!version}/%s'
_PATH_VARIABLE_PATTERN = r'[a-zA-Z_][a-zA-Z_.\d]*'
_RESERVED_PATH_VARIABLE_PATTERN = r'!' + _PATH_VARIABLE_PATTERN
_PATH_VALUE_PATTERN = r'[^:/?#\[\]{}]*'
_CORS_HEADER_ORIGIN = 'Origin'.lower()
_CORS_HEADER_REQUEST_METHOD = 'Access-Control-Request-Method'.lower()
_CORS_HEADER_REQUEST_HEADERS = 'Access-Control-Request-Headers'.lower()
_CORS_HEADER_ALLOW_ORIGIN = 'Access-Control-Allow-Origin'
_CORS_HEADER_ALLOW_METHODS = 'Access-Control-Allow-Methods'
_CORS_HEADER_ALLOW_HEADERS = 'Access-Control-Allow-Headers'
_CORS_ALLOWED_METHODS = frozenset(('DELETE', 'GET', 'PATCH', 'POST', 'PUT'))
_INVALID_ENUM_TEMPLATE = 'Invalid string value: %r. Allowed values: %r'
class RequestRejectionError(Exception):
"""Base class for rejected requests.
To be raised when parsing the request values and comparing them against the
generated discovery document.
"""
def Message(self): raise NotImplementedError
def Errors(self): raise NotImplementedError
def ToJson(self):
"""JSON string representing the rejected value.
Calling this will fail on the base class since it relies on Message and
Errors being implemented on the class. It is up to a subclass to implement
these methods.
Returns:
JSON string representing the rejected value.
"""
return json.dumps({
'error': {
'errors': self.Errors(),
'code': 400,
'message': self.Message(),
},
})
class EnumRejectionError(RequestRejectionError):
"""Custom request rejection exception for enum values."""
def __init__(self, parameter_name, value, allowed_values):
"""Constructor for EnumRejectionError.
Args:
parameter_name: String; the name of the enum parameter which had a value
rejected.
value: The actual value passed in for the enum. Usually string.
allowed_values: List of strings allowed for the enum.
"""
self.parameter_name = parameter_name
self.value = value
self.allowed_values = allowed_values
def Message(self):
"""A descriptive message describing the error."""
return _INVALID_ENUM_TEMPLATE % (self.value, self.allowed_values)
def Errors(self):
"""A list containing the errors associated with the rejection.
Intended to mimic those returned from an API in production in Google's API
infrastructure.
Returns:
A list with a single element that is a dictionary containing the error
information.
"""
return [
{
'domain': 'global',
'reason': 'invalidParameter',
'message': self.Message(),
'locationType': 'parameter',
'location': self.parameter_name,
},
]
class ApiRequest(object):
"""Simple data object representing an API request.
Takes an app_server CGI request and environment in the constructor.
Parses the request into convenient pieces and stores them as members.
"""
API_PREFIX = '/_ah/api/'
def __init__(self, base_env_dict, old_dev_appserver, request=None):
"""Constructor.
Args:
base_env_dict: Dictionary of CGI environment parameters.
old_dev_appserver: used to call standard SplitURL method.
request: AppServerRequest. Can be None.
"""
self.cgi_env = base_env_dict
self.headers = {}
self.http_method = base_env_dict['REQUEST_METHOD']
self.port = base_env_dict['SERVER_PORT']
if request:
self.path, self.query = old_dev_appserver.SplitURL(request.relative_url)
self.body = request.infile.read()
for header in request.headers.headers:
header_name, header_value = header.split(':', 1)
self.headers[header_name.strip()] = header_value.strip()
else:
self.body = ''
self.path = self.API_PREFIX
self.query = ''
assert self.path.startswith(self.API_PREFIX)
self.path = self.path[len(self.API_PREFIX):]
self.parameters = cgi.parse_qs(self.query, keep_blank_values=True)
self.body_obj = json.loads(self.body) if self.body else {}
self.request_id = None
def _IsRpc(self):
return self.path == 'rpc'
class DiscoveryApiProxy(object):
"""Proxies discovery service requests to a known cloud endpoint."""
_DISCOVERY_PROXY_HOST = 'webapis-discovery.appspot.com'
_STATIC_PROXY_HOST = 'webapis-discovery.appspot.com'
_DISCOVERY_API_PATH_PREFIX = '/_ah/api/discovery/v1/'
def _DispatchRequest(self, path, body):
"""Proxies GET request to discovery service API.
Args:
path: URL path relative to discovery service.
body: HTTP POST request body.
Returns:
HTTP response body or None if it failed.
"""
full_path = self._DISCOVERY_API_PATH_PREFIX + path
headers = {'Content-type': 'application/json'}
connection = httplib.HTTPSConnection(self._DISCOVERY_PROXY_HOST)
try:
connection.request('POST', full_path, body, headers)
response = connection.getresponse()
response_body = response.read()
if response.status != 200:
logging.error('Discovery API proxy failed on %s with %d.\r\n'
'Request: %s\r\nResponse: %s',
full_path, response.status, body, response_body)
return None
return response_body
finally:
connection.close()
def GenerateDiscoveryDoc(self, api_config, api_format):
"""Generates a discovery document from an API file.
Args:
api_config: .api file contents as string.
api_format: 'rest' or 'rpc' depending on the which kind of discvoery doc.
Returns:
Discovery doc as JSON string.
Raises:
ValueError: When api_format is invalid.
"""
if api_format not in ['rest', 'rpc']:
raise ValueError('Invalid API format')
path = 'apis/generate/' + api_format
request_dict = {'config': json.dumps(api_config)}
request_body = json.dumps(request_dict)
return self._DispatchRequest(path, request_body)
def GenerateDirectory(self, api_configs):
"""Generates an API directory from a list of API files.
Args:
api_configs: list of strings which are the .api file contents.
Returns:
API directory as JSON string.
"""
request_dict = {'configs': api_configs}
request_body = json.dumps(request_dict)
return self._DispatchRequest('apis/generate/directory', request_body)
def GetStaticFile(self, path):
"""Returns static content via a GET request.
Args:
path: URL path
|
after the domain.
Returns:
Tuple of (response, response_body):
response: HTTPResponse object.
response_body: Response body as string.
"""
connection = httplib.HTTPSConnection(self._STATIC_PROXY_HOST)
try:
connection.request('GET', path, None, {
|
})
response = con
|
dob71/x2swn
|
skeinforge/fabmetheus_utilities/xml_simple_writer.py
|
Python
|
gpl-3.0
| 4,447
| 0.02586
|
"""
XML tag writer utilities.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
import cStringIO
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Nophead <http://hydraraptor.blogspot.com/>\nArt of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def addBeginEndInnerXMLTag(attributes, depth, innerText, localName, output, text=''):
'Add the begin and end xml tag and the inner text if any.'
if len( innerText ) > 0:
addBeginXMLTag(attributes, depth, localName, output, text)
output.write( innerText )
addEndXMLTag(depth, localName, output)
else:
addClosedXMLTag(attributes, depth, localName, output, text)
def addBeginXMLTag(attributes, depth, localName, output, text=''):
'Add the begin xml tag.'
depthStart = '\t' * depth
output.write('%s<%s%s>%s\n' % (depthStart, localName, getAttributesString(attributes), text))
def addClosedXMLTag(attributes, depth, localName, output, text=''):
'Add the closed xml tag.'
depthStart = '\t' * depth
attributesString = getAttributesString(attributes)
if len(text) > 0:
output.write('%s<%s%s >%s</%s>\n' % (depthStart, localName, attributesString, text, localName))
else:
output.write('%s<%s%s />\n' % (depthStart, localName, attributesString))
def addEndXMLTag(depth, localName, output):
'Add the end xml tag.'
depthStart = '\t' * depth
output.write('%s</%s>\n' % (depthStart, localName))
def addXMLFromLoopComplexZ(attributes, depth, loop, output, z):
'Add xml from loop.'
addBeginXMLTag(attributes, depth, 'path', output)
for pointComplexIndex in xrange(len(loop)):
pointComplex = loop[pointComplexIndex]
addXMLFromXYZ(depth + 1, pointComplexIndex, output, pointComplex.real, pointComplex.imag, z)
addEndXMLTag(depth, 'path', output)
def addXMLFromObjects(depth, objects, output):
'Add xml from objects.'
for object in objects:
object.addXML(depth, output)
def addXMLFromVertexes(depth, output, vertexes):
'Add xml from loop.'
for vertexIndex in xrange(len(vertexes)):
vertex = vertexes[vertexIndex]
addXMLFromXYZ(depth + 1, vertexIndex, output, vertex.x, vertex.y, vertex.z)
def addXMLFromXYZ(depth, index, output, x, y, z):
'Add xml from x, y & z.'
attributes = {'index' : str(index)}
if x != 0.0:
attributes['x'] = str(x)
if y != 0.0:
attributes['y'] = str(y)
if z != 0.0:
attributes['z'] = str(z)
addClosedXMLTag(attributes, depth, 'vertex', output)
def compareAttributeKeyAscending(key, otherKey):
'Get comparison in order to sort attribute keys in ascending order, with the id key first and name second.'
if key == 'id':
return - 1
if otherKey == 'id':
return 1
|
if key == 'name':
return - 1
if otherKey == 'name':
return 1
if key < otherKey:
return - 1
return int(key > otherKey)
def getAttributesString(attributes):
'Add the closed xml tag.'
attributesString = ''
attributesKeys = attributes.keys()
attributesKeys.sort(compareAttributeKeyAscending)
for attributesKey in attributesKeys:
valueString = st
|
r(attributes[attributesKey])
if "'" in valueString:
attributesString += ' %s="%s"' % (attributesKey, valueString)
else:
attributesString += " %s='%s'" % (attributesKey, valueString)
return attributesString
def getBeginGeometryXMLOutput(elementNode=None):
'Get the beginning of the string representation of this boolean geometry object info.'
output = getBeginXMLOutput()
attributes = {}
if elementNode != None:
documentElement = elementNode.getDocumentElement()
attributes = documentElement.attributes
addBeginXMLTag(attributes, 0, 'fabmetheus', output)
return output
def getBeginXMLOutput():
'Get the beginning of the string representation of this object info.'
output = cStringIO.StringIO()
output.write("<?xml version='1.0' ?>\n")
return output
def getDictionaryWithoutList(dictionary, withoutList):
'Get the dictionary without the keys in the list.'
dictionaryWithoutList = {}
for key in dictionary:
if key not in withoutList:
dictionaryWithoutList[key] = dictionary[key]
return dictionaryWithoutList
def getEndGeometryXMLString(output):
'Get the string representation of this object info.'
addEndXMLTag(0, 'fabmetheus', output)
return output.getvalue()
|
phenoxim/cinder
|
cinder/tests/unit/volume/test_init_host.py
|
Python
|
apache-2.0
| 14,005
| 0
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for volume init host method cases."""
import mock
from oslo_config import cfg
from cinder import context
from cinder import objects
from ci
|
nder.tests.unit import utils as tests_utils
from cinder.tests.unit import volume as base
from cinder.volume import driver
from cinder.volume import utils as volutils
CONF = cfg.CONF
class VolumeInit
|
HostTestCase(base.BaseVolumeTestCase):
def setUp(self):
super(VolumeInitHostTestCase, self).setUp()
self.service_id = 1
@mock.patch('cinder.manager.CleanableManager.init_host')
def test_init_host_count_allocated_capacity(self, init_host_mock):
vol0 = tests_utils.create_volume(
self.context, size=100, host=CONF.host)
vol1 = tests_utils.create_volume(
self.context, size=128,
host=volutils.append_host(CONF.host, 'pool0'))
vol2 = tests_utils.create_volume(
self.context, size=256,
host=volutils.append_host(CONF.host, 'pool0'))
vol3 = tests_utils.create_volume(
self.context, size=512,
host=volutils.append_host(CONF.host, 'pool1'))
vol4 = tests_utils.create_volume(
self.context, size=1024,
host=volutils.append_host(CONF.host, 'pool2'))
self.volume.init_host(service_id=self.service_id)
init_host_mock.assert_called_once_with(
service_id=self.service_id, added_to_cluster=None)
stats = self.volume.stats
self.assertEqual(2020, stats['allocated_capacity_gb'])
self.assertEqual(
384, stats['pools']['pool0']['allocated_capacity_gb'])
self.assertEqual(
512, stats['pools']['pool1']['allocated_capacity_gb'])
self.assertEqual(
1024, stats['pools']['pool2']['allocated_capacity_gb'])
# NOTE(jdg): On the create we have host='xyz', BUT
# here we do a db.volume_get, and now the host has
# been updated to xyz#pool-name. Note this is
# done via the managers init, which calls the drivers
# get_pool method, which in the legacy case is going
# to be volume_backend_name or None
vol0.refresh()
expected_host = volutils.append_host(CONF.host, 'fake')
self.assertEqual(expected_host, vol0.host)
self.volume.delete_volume(self.context, vol0)
self.volume.delete_volume(self.context, vol1)
self.volume.delete_volume(self.context, vol2)
self.volume.delete_volume(self.context, vol3)
self.volume.delete_volume(self.context, vol4)
@mock.patch('cinder.manager.CleanableManager.init_host')
def test_init_host_count_allocated_capacity_cluster(self, init_host_mock):
cluster_name = 'mycluster'
self.volume.cluster = cluster_name
# All these volumes belong to the same cluster, so we will calculate
# the capacity of them all because we query the DB by cluster_name.
tests_utils.create_volume(self.context, size=100, host=CONF.host,
cluster_name=cluster_name)
tests_utils.create_volume(
self.context, size=128, cluster_name=cluster_name,
host=volutils.append_host(CONF.host, 'pool0'))
tests_utils.create_volume(
self.context, size=256, cluster_name=cluster_name,
host=volutils.append_host(CONF.host + '2', 'pool0'))
tests_utils.create_volume(
self.context, size=512, cluster_name=cluster_name,
host=volutils.append_host(CONF.host + '2', 'pool1'))
tests_utils.create_volume(
self.context, size=1024, cluster_name=cluster_name,
host=volutils.append_host(CONF.host + '3', 'pool2'))
# These don't belong to the cluster so they will be ignored
tests_utils.create_volume(
self.context, size=1024,
host=volutils.append_host(CONF.host, 'pool2'))
tests_utils.create_volume(
self.context, size=1024, cluster_name=cluster_name + '1',
host=volutils.append_host(CONF.host + '3', 'pool2'))
self.volume.init_host(service_id=self.service_id)
init_host_mock.assert_called_once_with(
service_id=self.service_id, added_to_cluster=None)
stats = self.volume.stats
self.assertEqual(2020, stats['allocated_capacity_gb'])
self.assertEqual(
384, stats['pools']['pool0']['allocated_capacity_gb'])
self.assertEqual(
512, stats['pools']['pool1']['allocated_capacity_gb'])
self.assertEqual(
1024, stats['pools']['pool2']['allocated_capacity_gb'])
@mock.patch.object(driver.BaseVD, "update_provider_info")
def test_init_host_sync_provider_info(self, mock_update):
vol0 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol1 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol2 = tests_utils.create_volume(
self.context, size=1, host=CONF.host, status='creating')
snap0 = tests_utils.create_snapshot(self.context, vol0.id)
snap1 = tests_utils.create_snapshot(self.context, vol1.id)
# Return values for update_provider_info
volumes = [{'id': vol0.id, 'provider_id': '1 2 xxxx'},
{'id': vol1.id, 'provider_id': '3 4 yyyy'}]
snapshots = [{'id': snap0.id, 'provider_id': '5 6 xxxx'},
{'id': snap1.id, 'provider_id': '7 8 yyyy'}]
mock_update.return_value = (volumes, snapshots)
# initialize
self.volume.init_host(service_id=self.service_id)
# Grab volume and snapshot objects
vol0_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol0.id)
vol1_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol1.id)
vol2_obj = objects.Volume.get_by_id(context.get_admin_context(),
vol2.id)
snap0_obj = objects.Snapshot.get_by_id(self.context, snap0.id)
snap1_obj = objects.Snapshot.get_by_id(self.context, snap1.id)
# Check updated provider ids
self.assertEqual('1 2 xxxx', vol0_obj.provider_id)
self.assertEqual('3 4 yyyy', vol1_obj.provider_id)
self.assertIsNone(vol2_obj.provider_id)
self.assertEqual('5 6 xxxx', snap0_obj.provider_id)
self.assertEqual('7 8 yyyy', snap1_obj.provider_id)
# Clean up
self.volume.delete_snapshot(self.context, snap0_obj)
self.volume.delete_snapshot(self.context, snap1_obj)
self.volume.delete_volume(self.context, vol0)
self.volume.delete_volume(self.context, vol1)
@mock.patch.object(driver.BaseVD, "update_provider_info")
def test_init_host_sync_provider_info_no_update(self, mock_update):
vol0 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
vol1 = tests_utils.create_volume(
self.context, size=1, host=CONF.host)
snap0 = tests_utils.create_snapshot(self.context, vol0.id)
snap1 = tests_utils.create_snapshot(self.context, vol1.id)
mock_update.return_value = ([], [])
# initialize
self.volume.init_host(service_id=self.service_id)
# Grab volume and snapshot objects
vol0_obj = objects.Volume.get_by_id(context.get_admin_c
|
ezequielpereira/Time-Line
|
libs/wx/tools/Editra/src/style_editor.py
|
Python
|
gpl-3.0
| 30,179
| 0.001922
|
###############################################################################
# Name: style_editor.py #
# Purpose: Syntax Highlighting configuration dialog #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
Provides an editor dialog for graphically editing how the text is presented in
the editor when syntax highlighting is turned on. It does this by taking the
data from the controls and formating it into an Editra Style Sheet that the
editor can load to configure the styles of the text.
@summary: Gui for creating custom Editra Style Sheets
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: style_editor.py 63520 2010-02-19 03:27:26Z CJP $"
__revision__ = "$Revision: 63520 $"
#--------------------------------------------------------------------------#
# Imports
import os
import glob
import wx
# Editra Imports
import ed_glob
from profiler import Profile_Get, Profile_Set
import ed_basestc
from ed_style import StyleItem
import util
import syntax.syntax as syntax
import eclib
# Function Aliases
#_ = wx.GetTranslation
from gettext import gettext as _
# Global Values
ID_STYLES = wx.NewId()
ID_FORE_COLOR = wx.NewId()
ID_BACK_COLOR = wx.NewId()
ID_BOLD = wx.NewId()
ID_ITALIC = wx.NewId()
ID_EOL = wx.NewId()
ID_ULINE = wx.NewId()
ID_FONT = wx.NewId()
ID_FONT_SIZE = wx.NewId()
SETTINGS_IDS = [ ID_FORE_COLOR, ID_BACK_COLOR, ID_BOLD, ID_ITALIC,
ID_EOL, ID_ULINE, ID_FONT, ID_FONT_SIZE ]
#--------------------------------------------------------------------------#
class StyleEditor(wx.Dialog):
"""This class creates the window that contains the controls
for editing/configuring the syntax highlighting styles it acts
as a graphical way to interact with the L{ed_style.StyleMgr}.
@see: ed_style.StyleMgr
"""
def __init__(self, parent, id_=wx.ID_ANY, title=_("Style Editor"),
style=wx.DEFAULT_DIALOG_STYLE | wx.RAISED_BORDER):
"""Initializes the Dialog
@todo: rework the layout
"""
wx.Dialog.__init__(self, parent, id_, title, style=style)
# Attributes
self.LOG = wx.GetApp().GetLog()
self.preview = ed_basestc.EditraBaseStc(self, wx.ID_ANY, size=(-1, 200),
style=wx.SUNKEN_BORDER)
self.styles_orig = self.preview.GetStyleSet()
self.preview.SetCaretLineVisible(True)
self.styles_new = DuplicateStyleDict(self.styles_orig)
self.preview.SetStyles('preview', self.styles_new, True)
self.OpenPreviewFile('cpp')
# XXX On Windows the settings pane must be made before the
# sizer it is to be put in or it becomes unable to recieve
# focus. But is the exact opposite on mac/gtk. This is really
# a pain or possibly a bug?
if wx.Platform == '__WXMSW__':
self._settings = SettingsPanel(self)
# Main Sizer
sizer = wx.BoxSizer(wx.VERTICAL)
# Control Panel
self.ctrl_pane = wx.Panel(self, wx.ID_ANY)
ctrl_sizer = wx.BoxSizer(wx.HORIZONTAL) # Main Control Sizer
left_colum = wx.BoxSizer(wx.VERTICAL) # Left Column
right_colum = wx.BoxSizer(wx.VERTICAL) # Right Column
# XXX On Mac/GTK if panel is created before sizer all controls in
# it become unable to recieve focus from clicks, but it is the
# exact opposite on windows!
if wx.Platform != '__WXMSW__':
self._settings = SettingsPanel(self)
# Control Panel Left Column
left_colum.AddMany([((10, 10), 0),
(self.__StyleSheets(), 0, wx.ALIGN_LEFT),
((10, 10), 0),
(self.__LexerChoice(), 0, wx.ALIGN_LEFT),
((10, 10), 0),
(self.__StyleTags(), 1, wx.ALIGN_LEFT|wx.EXPAND),
((10, 10), 0)])
ctrl_sizer.Add(left_colum, 0, wx.ALIGN_LEFT)
# Divider
ctrl_sizer.Add(wx.StaticLine(self.ctrl_pane, size=(-1, 2),
style=wx.LI_VERTICAL),
0, wx.ALIGN_CENTER_HORIZONTAL | wx.EXPAND)
ctrl_sizer.Add((5, 5), 0)
# Control Panel Right Column
right_colum.Add(self._settings, 1, wx.ALIGN_LEFT | wx.EXPAND)
ctrl_sizer.AddMany([(right_colum, 1, wx.ALIGN_RIGHT | wx.EXPAND),
((5, 5), 0)])
# Finish Control Panel Setup
self.ctrl_pane.SetSizer(ctrl_sizer)
sizer.AddMany([((10, 10)), (self.ctrl_pane, 0, wx.ALIGN_CENTER)])
# Preview Area
pre_sizer = wx.BoxSizer(wx.HORIZONTAL)
pre_sizer.AddMany([((10, 10), 0),
(wx.StaticText(self, label=_("Preview") + u": "),
0, wx.ALIGN_LEFT)])
sizer.AddMany([((10, 10), 0), (pre_sizer, 0, wx.ALIGN_LEFT),
(self.preview, 0, wx.EXPAND | wx.BOTTOM)])
# Create Buttons
b_sizer = wx.BoxSizer(wx.HORIZONTAL)
ok_b = wx.Button(self, wx.ID_OK, _("OK"))
ok_b.SetDefault()
b_sizer.AddMany([(wx.Button(self, wx.ID_CANCEL, _("Cancel")), 0),
((5, 5), 0),
(wx.Button(self, wx.ID_SAVE, _("Export")), 0),
((5, 5), 0), (ok_b, 0)])
sizer.Add(b_sizer, 0, wx.ALIGN_RIGHT |
wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
# Finish the Layout
self.SetSizer(sizer)
self.SetAutoLayout(True)
sizer.Fit(self)
self.EnableSettings(False)
# Event Handlers
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=wx.ID_CANCEL)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.OnExport, id=wx.ID_SAVE)
self.Bind(wx.EVT_CHOICE, self.OnChoice)
self.Bind(wx.EVT_CHECKBOX, self.OnCheck)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.Bind(wx.EVT_LISTBOX, self.OnListBox)
self.Bind(eclib.EVT_COLORSETTER, self.OnColor)
self.preview.Bind(wx.EVT_LEFT_UP, self.OnTextRegion)
self.preview.Bind(wx.EVT_KEY_UP, self.OnTextRegion)
#--- End Init ---#
def __LexerChoice(self):
"""Returns a sizer object con
|
taining a choice control with all
available lexers listed in it.
@return: sizer item containing a choice control with all available
syntax test files available
"""
lex_sizer = wx.BoxSizer(wx.HORIZONTAL)
lexer_lbl = wx.StaticText(self.ctrl_pane, wx.ID_ANY,
_("Syntax Files"
|
) + u": ")
lexer_lst = wx.Choice(self.ctrl_pane, ed_glob.ID_LEXER,
choices=syntax.GetLexerList())
lexer_lst.SetToolTip(wx.ToolTip(_("Set the preview file type")))
lexer_lst.SetStringSelection(u"CPP")
lex_sizer.AddMany([((10, 10)), (lexer_lbl, 0, wx.ALIGN_CENTER_VERTICAL),
((5, 0)), (lexer_lst, 1, wx.ALIGN_CENTER_VERTICAL),
((10, 10))])
return lex_sizer
def __StyleSheets(self):
"""Returns a sizer item that contains a choice control with
all the available style sheets listed in it.
@return: sizer item holding all installed style sheets
"""
ss_sizer = wx.BoxSizer(wx.HORIZONTAL)
ss_lbl = wx.StaticText(self.ctrl_pane, wx.ID_ANY,
_("Style Theme") + u": ")
ss_lst = util.GetResourceFiles(u'styles', get_all=True)
ss_choice = wx.Choice(self.ctrl_pane, ed_glob.ID_PREF_SYNTHEME,
choices=sorted(ss_lst))
ss_choice.SetToolTip(wx.ToolTip(_("Base new theme on existing one")))
ss_choice.SetStringSelec
|
alvarolopez/pyocci
|
setup.py
|
Python
|
apache-2.0
| 171
| 0
|
#!/usr/bin/env python
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO
|
- DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=
|
['pbr'],
pbr=True)
|
dittaeva/ocitysmap
|
ocitysmap2/layoutlib/commons.py
|
Python
|
agpl-3.0
| 1,272
| 0.003943
|
# -*- coding: utf-8 -*-
# ocitysmap, city map
|
and street index generator from OpenStreetMap data
# Copyright (C) 2010 David Decotigny
# Copyright (C) 2010 Frédéric Lehobey
# Copyright (C) 2010 Pierre Mauduit
# Copyright (C) 2010 David Mentré
# Copyright (C) 2010 Maxime Petazzoni
# Copyright (C) 2010 Thomas Petazzoni
# Copyright (C) 2010 Gaël Utard
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free So
|
ftware Foundation, either version 3 of the
# License, or any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# PT/metrics conversion routines
PT_PER_INCH = 72.0
def convert_pt_to_dots(pt, dpi = PT_PER_INCH):
return float(pt * dpi) / PT_PER_INCH
def convert_mm_to_pt(mm):
return ((mm/10.0) / 2.54) * 72
def convert_pt_to_mm(pt):
return (float(pt) * 10.0 * 2.54) / 72
|
rmcgurrin/PyQLab
|
instruments/AWGs.py
|
Python
|
apache-2.0
| 872
| 0
|
"""
AWGs
"""
from atom.api import Atom, List, Int, Float, Range, Enum, Bool, Constant, Str
from
|
Instrument import Instrument
import enaml
from enaml.qt.qt_application import QtApplication
from instruments.AWGBase import AWGChannel, AWG, AWGDriver
from plugins import find_plugins
AWGList = []
# local plugin registration to enable access by AWGs.plugin
plugins = find_plugins(AWG, verbose=False)
for plugin in plugins:
if plug
|
in not in AWGList:
AWGList.append(plugin)
if plugin.__name__ not in globals().keys():
globals().update({plugin.__name__: plugin})
print 'Registered Plugin {0}'.format(plugin.__name__)
if __name__ == "__main__":
with enaml.imports():
from AWGsViews import AWGView
awg = APS(label='BBNAPS1')
app = QtApplication()
view = AWGView(awg=awg)
view.show()
app.start()
|
Jorge-Rodriguez/ansible
|
test/units/modules/network/ftd/test_ftd_configuration.py
|
Python
|
gpl-3.0
| 5,145
| 0.002527
|
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
from ansible.module_utils import basic
from ansible.module_utils.network.ftd.common import FtdConfigurationError, FtdServerError, FtdUnexpectedResponse
from ansible.module_utils.network.ftd.configuration import FtdInvalidOperationNameError, CheckModeException
from ansible.module_utils.network.ftd.fdm_swagger_client import ValidationError
from ansible.modules.network.ftd import ftd_configuration
class TestFtdConfiguration(object):
module = ftd_configuration
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture(autouse=True)
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.modules.network.ftd.ftd_configuration.Connection')
return connection_class_mock.return_value
@pytest.fixture
def resource_mock(self, mocker):
resource_class_mock = mocker.patch('ansible.modules.network.ftd.ftd_configuration.BaseConfigurationResource')
resource_instance = resource_class_mock.return_value
return resource_instance.execute_operation
def test_module_should_fail_when_ftd_invalid_operation_name_error(self, resource_mock):
operation_name = 'test name'
resource_mock.side_effect = FtdInvalidOperationNameError(operation_name)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Invalid operation name provided: %s' % operation_name == result['msg']
def test_module_should_fail_when_ftd_configuration_error(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
|
resource_mock.side_effect = FtdConfigurationError(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['fai
|
led']
assert 'Failed to execute %s operation because of the configuration error: %s' % (operation_name, msg) == \
result['msg']
def test_module_should_fail_when_ftd_server_error(self, resource_mock):
operation_name = 'test name'
code = 500
response = {'error': 'foo'}
resource_mock.side_effect = FtdServerError(response, code)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert 'Server returned an error trying to execute %s operation. Status code: %s. ' \
'Server response: %s' % (operation_name, code, response) == \
result['msg']
def test_module_should_fail_when_validation_error(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = ValidationError(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert msg == result['msg']
def test_module_should_fail_when_unexpected_server_response(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = FtdUnexpectedResponse(msg)
result = self._run_module_with_fail_json({'operation': operation_name})
assert result['failed']
assert msg == result['msg']
def test_module_should_fail_when_check_mode_exception(self, resource_mock):
operation_name = 'test name'
msg = 'Foo error.'
resource_mock.side_effect = CheckModeException(msg)
result = self._run_module({'operation': operation_name})
assert not result['changed']
def test_module_should_run_successful(self, resource_mock):
operation_name = 'test name'
resource_mock.return_value = 'ok'
result = self._run_module({'operation': operation_name})
assert result['response'] == 'ok'
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]
def _run_module_with_fail_json(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleFailJson) as exc:
self.module.main()
result = exc.value.args[0]
return result
|
capturePointer/or-tools
|
examples/python/nontransitive_dice.py
|
Python
|
apache-2.0
| 5,680
| 0.011972
|
# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, e
|
ither express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Nontransitive dice in Goog
|
le CP Solver.
From
http://en.wikipedia.org/wiki/Nontransitive_dice
'''
A set of nontransitive dice is a set of dice for which the relation
'is more likely to roll a higher number' is not transitive. See also
intransitivity.
This situation is similar to that in the game Rock, Paper, Scissors,
in which each element has an advantage over one choice and a
disadvantage to the other.
'''
I start with the 3 dice version
'''
* die A has sides {2,2,4,4,9,9},
* die B has sides {1,1,6,6,8,8}, and
* die C has sides {3,3,5,5,7,7}.
'''
3 dice:
Maximum winning: 27
comp: [19, 27, 19]
dice:
[[0, 0, 3, 6, 6, 6],
[2, 5, 5, 5, 5, 5],
[1, 1, 4, 4, 4, 7]]
max_win: 27
Number of solutions: 1
Nodes: 1649873 Time: 25.94
getFailures: 1649853
getBacktracks: 1649873
getPropags: 98105090
Max winnings where they are the same: 21
comp: [21, 21, 21]
dice:
[[0, 0, 3, 3, 3, 6],
[2, 2, 2, 2, 2, 5],
[1, 1, 1, 4, 4, 4]]
max_win: 21
Compare with these models:
* MiniZinc: http://hakank.org/minizinc/nontransitive_dice.mzn
* Comet: http://hakank.org/comet/nontransitive_dice.co
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import sys
import string
from ortools.constraint_solver import pywrapcp
def main(m=3, n=6, minimize_val=0):
# Create the solver.
solver = pywrapcp.Solver("Nontransitive dice")
#
# data
#
print "number of dice:", m
print "number of sides:", n
#
# declare variables
#
dice = {}
for i in range(m):
for j in range(n):
dice[(i, j)] = solver.IntVar(1, n * 2, "dice(%i,%i)" % (i, j))
dice_flat = [dice[(i, j)] for i in range(m) for j in range(n)]
comp = {}
for i in range(m):
for j in range(2):
comp[(i, j)] = solver.IntVar(0, n * n, "comp(%i,%i)" % (i, j))
comp_flat = [comp[(i, j)] for i in range(m) for j in range(2)]
# The following variables are for summaries or objectives
gap = [solver.IntVar(0, n * n, "gap(%i)" % i) for i in range(m)]
gap_sum = solver.IntVar(0, m * n * n, "gap_sum")
max_val = solver.IntVar(0, n * 2, "max_val")
max_win = solver.IntVar(0, n * n, "max_win")
# number of occurrences of each value of the dice
counts = [solver.IntVar(0, n * m, "counts(%i)" % i) for i in range(n * 2 + 1)]
#
# constraints
#
# number of occurrences for each number
solver.Add(solver.Distribute(dice_flat, range(n * 2 + 1), counts))
solver.Add(max_win == solver.Max(comp_flat))
solver.Add(max_val == solver.Max(dice_flat))
# order of the number of each die, lowest first
[solver.Add(dice[(i, j)] <= dice[(i, j + 1)])
for i in range(m) for j in range(n - 1)]
# nontransitivity
[comp[i, 0] > comp[i, 1] for i in range(m)],
# probability gap
[solver.Add(gap[i] == comp[i, 0] - comp[i, 1]) for i in range(m)]
[solver.Add(gap[i] > 0) for i in range(m)]
solver.Add(gap_sum == solver.Sum(gap))
# and now we roll...
# Number of wins for [A vs B, B vs A]
for d in range(m):
b1 = [solver.IsGreaterVar(dice[d % m, r1], dice[(d + 1) % m, r2])
for r1 in range(n) for r2 in range(n)]
solver.Add(comp[d % m, 0] == solver.Sum(b1))
b2 = [solver.IsGreaterVar(dice[(d + 1) % m, r1], dice[d % m, r2])
for r1 in range(n) for r2 in range(n)]
solver.Add(comp[d % m, 1] == solver.Sum(b2))
# objective
if minimize_val != 0:
print "Minimizing max_val"
objective = solver.Minimize(max_val, 1)
# other experiments
# objective = solver.Maximize(max_win, 1)
# objective = solver.Maximize(gap_sum, 1)
#
# solution and search
#
db = solver.Phase(dice_flat + comp_flat,
solver.INT_VAR_DEFAULT,
solver.ASSIGN_MIN_VALUE)
if minimize_val:
solver.NewSearch(db, [objective])
else:
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
print "gap_sum:", gap_sum.Value()
print "gap:", [gap[i].Value() for i in range(m)]
print "max_val:", max_val.Value()
print "max_win:", max_win.Value()
print "dice:"
for i in range(m):
for j in range(n):
print dice[(i, j)].Value(),
print
print "comp:"
for i in range(m):
for j in range(2):
print comp[(i, j)].Value(),
print
print "counts:", [counts[i].Value() for i in range(n * 2 + 1)]
print
num_solutions += 1
solver.EndSearch()
print
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
m = 3 # number of dice
n = 6 # number of sides of each die
minimize_val = 0 # Minimizing max value (0: no, 1: yes)
if __name__ == "__main__":
if len(sys.argv) > 1:
m = string.atoi(sys.argv[1])
if len(sys.argv) > 2:
n = string.atoi(sys.argv[2])
if len(sys.argv) > 3:
minimize_val = string.atoi(sys.argv[3])
main(m, n, minimize_val)
|
victorywang80/Maintenance
|
saltstack/src/salt/modules/xapi.py
|
Python
|
apache-2.0
| 24,954
| 0.000761
|
# -*- coding: utf-8 -*-
'''
This module (mostly) uses the XenAPI to manage Xen virtual machines.
Big fat warning: the XenAPI used in this file is the one bundled with
Xen Source, NOT XenServer nor Xen Cloud Platform. As a matter of fact it
*will* fail under those platforms. From what I've read, little work is needed
to adapt this code to XS/XCP, mostly playing with XenAPI version, but as
XCP is not taking precedence on Xen Source on many platforms, please keep
compatibility in mind.
Useful documentation:
. http://downloads.xen.org/Wiki/XenAPI/xenapi-1.0.6.pdf
. http://docs.vmd.ci
|
trix.com/XenServer/6.0.0/1.0/en_gb/api/
. https://github.com/xen-org/xen-api/tree/master/scripts/examples/python
. http://xenbits.xen.org/gitweb/?p=xen.git;a=tree;f=tools/python/xen/xm;hb=HEAD
'''
# Import pytho
|
n libs
import sys
import contextlib
import os
try:
import importlib
HAS_IMPORTLIB = True
except ImportError:
# Python < 2.7 does not have importlib
HAS_IMPORTLIB = False
# Import salt libs
from salt.exceptions import CommandExecutionError
import salt.utils
# Define the module's virtual name
__virtualname__ = 'virt'
# This module has only been tested on Debian GNU/Linux and NetBSD, it
# probably needs more path appending for other distributions.
# The path to append is the path to python Xen libraries, where resides
# XenAPI.
def _check_xenapi():
if __grains__['os'] == 'Debian':
debian_xen_version = '/usr/lib/xen-common/bin/xen-version'
if os.path.isfile(debian_xen_version):
# __salt__ is not available in __virtual__
xenversion = salt.modules.cmdmod._run_quiet(debian_xen_version)
xapipath = '/usr/lib/xen-{0}/lib/python'.format(xenversion)
if os.path.isdir(xapipath):
sys.path.append(xapipath)
try:
if HAS_IMPORTLIB:
return importlib.import_module('xen.xm.XenAPI')
return __import__('xen.xm.XenAPI')
except ImportError:
return False
def __virtual__():
if _check_xenapi() is not False:
return __virtualname__
return False
@contextlib.contextmanager
def _get_xapi_session():
'''
Get a session to XenAPI. By default, use the local UNIX socket.
'''
_xenapi = _check_xenapi()
xapi_uri = __salt__['config.option']('xapi.uri')
xapi_login = __salt__['config.option']('xapi.login')
xapi_password = __salt__['config.option']('xapi.password')
if not xapi_uri:
# xend local UNIX socket
xapi_uri = 'httpu:///var/run/xend/xen-api.sock'
if not xapi_login:
xapi_login = ''
if not xapi_password:
xapi_password = ''
try:
session = _xenapi.Session(xapi_uri)
session.xenapi.login_with_password(xapi_login, xapi_password)
yield session.xenapi
except Exception:
raise CommandExecutionError('Failed to connect to XenAPI socket.')
finally:
session.xenapi.session.logout()
# Used rectypes (Record types):
#
# host
# host_cpu
# VM
# VIF
# VBD
def _get_xtool():
'''
Internal, returns xl or xm command line path
'''
for xtool in ['xl', 'xm']:
path = salt.utils.which(xtool)
if path is not None:
return path
def _get_all(xapi, rectype):
'''
Internal, returns all members of rectype
'''
return getattr(xapi, rectype).get_all()
def _get_label_uuid(xapi, rectype, label):
'''
Internal, returns label's uuid
'''
try:
return getattr(xapi, rectype).get_by_name_label(label)[0]
except Exception:
return False
def _get_record(xapi, rectype, uuid):
'''
Internal, returns a full record for uuid
'''
return getattr(xapi, rectype).get_record(uuid)
def _get_record_by_label(xapi, rectype, label):
'''
Internal, returns a full record for uuid
'''
uuid = _get_label_uuid(xapi, rectype, label)
if uuid is False:
return False
return getattr(xapi, rectype).get_record(uuid)
def _get_metrics_record(xapi, rectype, record):
'''
Internal, returns metrics record for a rectype
'''
metrics_id = record['metrics']
return getattr(xapi, '{0}_metrics'.format(rectype)).get_record(metrics_id)
def _get_val(record, keys):
'''
Internal, get value from record
'''
data = record
for key in keys:
if key in data:
data = data[key]
else:
return None
return data
def list_vms():
'''
Return a list of virtual machine names on the minion
CLI Example:
.. code-block:: bash
salt '*' virt.list_vms
'''
with _get_xapi_session() as xapi:
hosts = xapi.VM.get_all()
ret = []
for _host in hosts:
if xapi.VM.get_record(_host)['is_control_domain'] is False:
ret.append(xapi.VM.get_name_label(_host))
return ret
def vm_info(vm_=None):
'''
Return detailed information about the vms.
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_info
'''
with _get_xapi_session() as xapi:
def _info(vm_):
vm_rec = _get_record_by_label(xapi, 'VM', vm_)
if vm_rec is False:
return False
vm_metrics_rec = _get_metrics_record(xapi, 'VM', vm_rec)
return {'cpu': vm_metrics_rec['VCPUs_number'],
'maxCPU': _get_val(vm_rec, ['VCPUs_max']),
'cputime': vm_metrics_rec['VCPUs_utilisation'],
'disks': get_disks(vm_),
'nics': get_nics(vm_),
'maxMem': int(_get_val(vm_rec, ['memory_dynamic_max'])),
'mem': int(vm_metrics_rec['memory_actual']),
'state': _get_val(vm_rec, ['power_state'])
}
info = {}
if vm_:
ret = _info(vm_)
if ret is not None:
info[vm_] = ret
else:
for vm_ in list_vms():
ret = _info(vm_)
if ret is not None:
info[vm_] = _info(vm_)
return info
def vm_state(vm_=None):
'''
Return list of all the vms and their state.
If you pass a VM name in as an argument then it will return info
for just the named VM, otherwise it will return all VMs.
CLI Example:
.. code-block:: bash
salt '*' virt.vm_state <vm name>
'''
with _get_xapi_session() as xapi:
info = {}
if vm_:
info[vm_] = _get_record_by_label(xapi, 'VM', vm_)['power_state']
return info
for vm_ in list_vms():
info[vm_] = _get_record_by_label(xapi, 'VM', vm_)['power_state']
return info
def node_info():
'''
Return a dict with information about this node
CLI Example:
.. code-block:: bash
salt '*' virt.node_info
'''
with _get_xapi_session() as xapi:
# get node uuid
host_rec = _get_record(xapi, 'host', _get_all(xapi, 'host')[0])
# get first CPU (likely to be a core) uuid
host_cpu_rec = _get_record(xapi, 'host_cpu', host_rec['host_CPUs'][0])
# get related metrics
host_metrics_rec = _get_metrics_record(xapi, 'host', host_rec)
# adapted / cleaned up from Xen's xm
def getCpuMhz():
cpu_speeds = [int(host_cpu_rec["speed"])
for host_cpu_it in host_cpu_rec
if "speed" in host_cpu_it]
if cpu_speeds:
return sum(cpu_speeds) / len(cpu_speeds)
else:
return 0
def getCpuFeatures():
if host_cpu_rec:
return host_cpu_rec['features']
def getFreeCpuCount():
cnt = 0
for host_cpu_it in host_cpu_rec:
if len(host_cpu_rec['cpu_pool']) == 0:
cnt += 1
return cnt
info = {
'cpucores': _get_val(host_rec,
["cpu_configuration"
|
frreiss/tensorflow-fred
|
tensorflow/python/tpu/tpu.py
|
Python
|
apache-2.0
| 97,112
| 0.006683
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================
"""Library of TPU helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import enum
import typing
from typing import Any, Callable, Iterable, List, Optional, Text, Tuple, Union
from absl import logging
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.compiler.tf2xla.python import xla as tf2xla
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.protobuf.tpu import dynamic_padding_pb2 as dynamic_padding
from tensorflow.core.protobuf.tpu import tpu_embedding_configuration_pb2 as embedding_pb2
from tensorflow.python.compiler.xla import xla
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import auto_control_deps
from tensorflow.python.framework import c_api_util
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.tpu import device_assignment as device_assignment_lib
from tensorflow.python.tpu import tpu_feed
from tensorflow.python.tpu import tpu_function
from tensorflow.python.tpu import tpu_name_util
from tensorflow.python.tpu.ops import tpu_ops
from tensorflow.python.types import core as core_types
from tensorflow.python.util import compat
from tensorflow.python.util import nest
from tensorflow.python.util import object_identity
from tensorflow.python.util.tf_export import tf_export
ops.NotDifferentiable("TPUReplicatedInput")
# Operations that indicate some error in the users graph, e.g. a placeholder
# that's introduced outside of the infeed.
_DENYLISTED_OPS = set([
"Placeholder",
])
# XLA doesn't currently support reading of intermediate tensors, thus some ops
# are not supported.
_UNSUPPORTED_OPS = set([
"AudioSummary",
"AudioSummaryV2",
"HistogramSummary",
"ImageSummary",
"MergeSummary",
"Print",
"ScalarSummary",
"TensorSummary",
"TensorSummaryV2",
])
# Ops which can be safely pruned from XLA compile if they have no consumers.
# These ops should also have no inputs.
_UNCONNECTED_OPS_TO_PRUNE = set(["Placeholder", "VarHandleOp"])
_MAX_WARNING_LINES = 5
_TPU_REPLICATE_ATTR = "_tpu_replicate"
_POST_DEVICE_REWRITE_ATTR = "_post_device_rewrite"
_TPU_COMPILATION_STATUS_ATTR = "_tpu_compilation_status"
_OUTSIDE_COMPILATION_ATTR = "_xla_outside_compilation"
_PIVOT_FOR_CLUSTER = "_pivot_for_cluster"
core = tpu_name_util.core
def _tpu_system_device_name(job: Optional[Text]) -> Text:
"""Returns the device name for the TPU_SYSTEM device of `job`."""
if job is None:
return "/device:TPU_SYSTEM:0"
else:
return "/job:%s/device:TPU_SYSTEM:0" % job
@tf_export(v1=["tpu.initialize_system"])
def initialize_system(
embedding_config: Optional[embedding_pb2.TPUEmbeddingConfiguration] = None,
job: Optional[Text] = None,
compilation_failure_closes_chips: bool = True
) -> core_types.Tensor:
"""Initializes a distributed TPU system for use with TensorFlow.
Args:
embedding_config: If not None, a `TPUEmbeddingConfiguration` proto
describing the desired configuration of the hardware embedding lookup
tables. If embedding_config is None, no hardware embeddings can be used.
job: The job (the XXX in TensorFlow device specification /job:XXX) that
contains the TPU devices that will be initialized. If job=None it is
assumed there is only one job in the TensorFlow flock, and an error will
be returned if this assumption does not hold.
compilation_failure_closes_chips: Set the configuration whether
we want to close TPU chips when there is a compilation failure.
Returns:
A serialized `TopologyProto` that describes the TPU system. Note:
the topology must be evaluated using `Session.run` before it can be used.
"""
config_string = ("" if embedding_config is None else
embedding_config.SerializeToString())
with ops.device(_tpu_system_device_name(job)):
topology = tpu_ops.configure_distributed_tpu(
compilation_failure_closes_chips=compilation_failure_closes_chips)
if embedding_config is None:
return topology
# This set of control dependencies is needed as this function is expected to
# return an op which will return the topology when executed, but we need to
# call the embedding initialization op between initializing the TPU and
# returning the topology.
with ops.control_dependencies([topology]):
embedding_init = tpu_ops.configure_tpu_embedding(config=config_string)
with ops.control_dependencies([embedding_init]):
return array_ops.identity(topology, name="tpu_init_identity")
def initialize_system_for_tpu_embedding(
embedding_config: embedding_pb2.TPUEmbeddingConfiguration,
job: Optional[Text] = None,
) -> ops.Operat
|
ion:
"""Initializes a distributed TPU Embedding system for use with TensorFlow.
The following two are equivalent:
1. initialize_system() with embedding_config.
2. initialize_system() without embedding_config, then
initialize_system_for_tpu_embedding().
initialize_system() should not be called with embedding_config i
|
f
initialize_system_for_tpu_embedding() is meant to be called later.
Args:
embedding_config: a `TPUEmbeddingConfiguration` proto describing the desired
configuration of the hardware embedding lookup tables.
job: The job (the XXX in TensorFlow device specification /job:XXX) that
contains the TPU devices that will be initialized. If job=None it is
assumed there is only one job in the TensorFlow flock, and an error will
be returned if this assumption does not hold.
Returns:
A no-op.
"""
config_string = embedding_config.SerializeToString()
with ops.device(_tpu_system_device_name(job)):
return tpu_ops.configure_tpu_embedding(config=config_string)
@tf_export(v1=["tpu.shutdown_system"])
def shutdown_system(job: Optional[Text] = None) -> ops.Operation:
"""Shuts down a running a distributed TPU system.
Args:
job: The job (the XXX in TensorFlow device specification /job:XXX) that
contains the TPU devices that will be shutdown. If job=None it is
assumed there is only one job in the TensorFlow flock, and an error will
be returned if this assumption does not hold.
"""
with ops.device(_tpu_system_device_name(job)):
shutdown_distributed_tpu = tpu_ops.shutdown_distributed_tpu()
return shutdown_distributed_tpu
def _enclosing_tpu_context_and_graph() -> Tuple[Any, Any]:
"""Returns the TPUReplicateContext and its associated graph."""
graph = ops.get_default_graph()
while graph is not None:
# pylint: disable=protected-access
context_ = graph._get_control_flow_context()
# pylint: enable=protected-acce
|
vjFaLk/frappe
|
frappe/core/doctype/view_log/test_view_log.py
|
Python
|
mit
| 850
| 0.031765
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestViewLog(unittest.TestCase):
def tearDown(self):
frappe.set_user('Administrator')
def test_if_user_is_added(self):
ev
|
= frappe.get_doc({
'doctype': 'Event',
'subject': 'test event for view logs',
'starts_on': '2018-06-04 14:11:00',
'event_type': 'Public'
}).ins
|
ert()
frappe.set_user('test@example.com')
from frappe.desk.form.load import getdoc
# load the form
getdoc('Event', ev.name)
a = frappe.get_value(
doctype="View Log",
filters={
"reference_doctype": "Event",
"reference_name": ev.name
},
fieldname=['viewed_by']
)
self.assertEqual('test@example.com', a)
self.assertNotEqual('test1@example.com', a)
|
evildmp/arkestra-publications
|
publications/migrations/0003_auto__chg_field_student_student_id.py
|
Python
|
bsd-2-clause
| 40,862
| 0.007905
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Student.student_id'
db.alter_column('publications_student', 'student_id', self.gf('django.db.models.fields.CharField')(unique=True, max_length=12))
def backwards(self, orm):
# Changing field 'Student.student_id'
db.alter_column('publications_student', 'student_id', self.gf('django.db.models.fields.CharField')(max_length=8, unique=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharFi
|
eld', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
|
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 3, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')", 'object_name': 'Page'},
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'page_flags': ('django.db.models.fields.TextField', [], {'null': True, 'blank': True}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('
|
mdworks2016/work_development
|
Python/20_Third_Certification/venv/lib/python3.7/site-packages/celery/canvas.py
|
Python
|
apache-2.0
| 55,734
| 0.000036
|
# -*- coding: utf-8 -*-
"""Composing task work-flows.
.. seealso:
You should import these from :mod:`celery` and not this module.
"""
from __future__ import absolute_import, unicode_literals
import itertools
import operator
from collections import deque
from copy import deepcopy
from functools import partial as _partial
from functools import reduce
from operator import itemgetter
from kombu.utils.functional import fxrange, reprcall
from kombu.utils.objects import cached_property
from kombu.utils.uuid import uuid
from vine import barrier
from celery._state import current_app
from celery.five import PY3, python_2_unicode_compatible
from celery.local import try_import
from celery.result import GroupResult, allow_join_result
from celery.utils import abstract
from celery.utils.collections import ChainMap
from celery.utils.functional import _regen
from celery.utils.functional import chunks as _chunks
from celery.utils.functional import (is_list, maybe_list, regen,
seq_concat_item, seq_concat_seq)
from celery.utils.objects import getitem_property
from celery.utils.text import remove_repeating_from_task, truncate
try:
from collections.abc import MutableSequence
except ImportError:
# TODO: Remove this when we drop Python 2.7 support
from collections import MutableSequence
__all__ = (
'Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
'group', 'chord', 'signature', 'maybe_signature',
)
# json in Python 2.7 borks if dict contains byte keys.
JSON_NEEDS_UNICODE_KEYS = PY3 and not try_import('simplejson')
def maybe_unroll_group(group):
"""Unroll group with only one member."""
# Issue #1656
try:
size = len(group.tasks)
except TypeError:
try:
size = group.tasks.__length_hint__()
except (AttributeError, TypeError):
return group
else:
return list(group.tasks)[0] if size == 1 else group
else:
return group.tasks[0] if size == 1 else group
def task_name_from(task):
return getattr(task, 'name', task)
def _upgrade(fields, sig):
"""Used by custom signatures in .from_dict, to keep common fields."""
sig.update(chord_size=fields.get('chord_size'))
return sig
@abstract.CallableSignature.register
@python_2_unicode_compatible
class Signature(dict):
"""Task Signature.
Class that wraps the arguments and execution options
for a single task invocation.
Used as the parts in a :class:`group` and other constructs,
or to pass tasks around as callbacks while being compatible
with serial
|
izers with a strict type subset.
Signa
|
tures can also be created from tasks:
- Using the ``.signature()`` method that has the same signature
as ``Task.apply_async``:
.. code-block:: pycon
>>> add.signature(args=(1,), kwargs={'kw': 2}, options={})
- or the ``.s()`` shortcut that works for star arguments:
.. code-block:: pycon
>>> add.s(1, kw=2)
- the ``.s()`` shortcut does not allow you to specify execution options
but there's a chaning `.set` method that returns the signature:
.. code-block:: pycon
>>> add.s(2, 2).set(countdown=10).set(expires=30).delay()
Note:
You should use :func:`~celery.signature` to create new signatures.
The ``Signature`` class is the type returned by that function and
should be used for ``isinstance`` checks for signatures.
See Also:
:ref:`guide-canvas` for the complete guide.
Arguments:
task (Union[Type[celery.app.task.Task], str]): Either a task
class/instance, or the name of a task.
args (Tuple): Positional arguments to apply.
kwargs (Dict): Keyword arguments to apply.
options (Dict): Additional options to :meth:`Task.apply_async`.
Note:
If the first argument is a :class:`dict`, the other
arguments will be ignored and the values in the dict will be used
instead::
>>> s = signature('tasks.add', args=(2, 2))
>>> signature(s)
{'task': 'tasks.add', args=(2, 2), kwargs={}, options={}}
"""
TYPES = {}
_app = _type = None
@classmethod
def register_type(cls, name=None):
def _inner(subclass):
cls.TYPES[name or subclass.__name__] = subclass
return subclass
return _inner
@classmethod
def from_dict(cls, d, app=None):
typ = d.get('subtask_type')
if typ:
target_cls = cls.TYPES[typ]
if target_cls is not cls:
return target_cls.from_dict(d, app=app)
return Signature(d, app=app)
def __init__(self, task=None, args=None, kwargs=None, options=None,
type=None, subtask_type=None, immutable=False,
app=None, **ex):
self._app = app
if isinstance(task, dict):
super(Signature, self).__init__(task) # works like dict(d)
else:
# Also supports using task class/instance instead of string name.
try:
task_name = task.name
except AttributeError:
task_name = task
else:
self._type = task
super(Signature, self).__init__(
task=task_name, args=tuple(args or ()),
kwargs=kwargs or {},
options=dict(options or {}, **ex),
subtask_type=subtask_type,
immutable=immutable,
chord_size=None,
)
def __call__(self, *partial_args, **partial_kwargs):
"""Call the task directly (in the current process)."""
args, kwargs, _ = self._merge(partial_args, partial_kwargs, None)
return self.type(*args, **kwargs)
def delay(self, *partial_args, **partial_kwargs):
"""Shortcut to :meth:`apply_async` using star arguments."""
return self.apply_async(partial_args, partial_kwargs)
def apply(self, args=None, kwargs=None, **options):
"""Call task locally.
Same as :meth:`apply_async` but executed the task inline instead
of sending a task message.
"""
args = args if args else ()
kwargs = kwargs if kwargs else {}
# Extra options set to None are dismissed
options = {k: v for k, v in options.items() if v is not None}
# For callbacks: extra args are prepended to the stored args.
args, kwargs, options = self._merge(args, kwargs, options)
return self.type.apply(args, kwargs, **options)
def apply_async(self, args=None, kwargs=None, route_name=None, **options):
"""Apply this task asynchronously.
Arguments:
args (Tuple): Partial args to be prepended to the existing args.
kwargs (Dict): Partial kwargs to be merged with existing kwargs.
options (Dict): Partial options to be merged
with existing options.
Returns:
~@AsyncResult: promise of future evaluation.
See also:
:meth:`~@Task.apply_async` and the :ref:`guide-calling` guide.
"""
args = args if args else ()
kwargs = kwargs if kwargs else {}
# Extra options set to None are dismissed
options = {k: v for k, v in options.items() if v is not None}
try:
_apply = self._apply_async
except IndexError: # pragma: no cover
# no tasks for chain, etc to find type
return
# For callbacks: extra args are prepended to the stored args.
if args or kwargs or options:
args, kwargs, options = self._merge(args, kwargs, options)
else:
args, kwargs, options = self.args, self.kwargs, self.options
# pylint: disable=too-many-function-args
# Borks on this, as it's a property
return _apply(args, kwargs, **options)
def _merge(self, args=None, kwargs=None, options=None, force=False):
args = args if args else ()
kwargs = kwargs if kwargs else {}
options = options if options else {}
if sel
|
popazerty/e2-dmm
|
lib/python/Screens/AudioSelectionExtended.py
|
Python
|
gpl-2.0
| 9,756
| 0.031673
|
from Screen import Screen
from Components.ServiceEventTracker import ServiceEventTracker
from Components
|
.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from Components.config import config, ConfigSubsection, getConfigListEntry, ConfigNothing, ConfigSelection, ConfigOnOff
from Components.Mu
|
ltiContent import MultiContentEntryText
from Components.Sources.List import List
from Components.Sources.Boolean import Boolean
from Components.SystemInfo import SystemInfo
from enigma import iPlayableService
from Tools.ISO639 import LanguageCodes
from Tools.BoundFunction import boundFunction
FOCUS_CONFIG, FOCUS_STREAMS = range(2)
[PAGE_AUDIO, PAGE_SUBTITLES] = ["audio", "subtitles"]
class AudioSelection(Screen, ConfigListScreen):
def __init__(self, session, infobar=None, page=PAGE_AUDIO):
Screen.__init__(self, session)
self["streams"] = List([])
self["key_red"] = Boolean(False)
self["key_green"] = Boolean(False)
self["key_yellow"] = Boolean(True)
self["key_blue"] = Boolean(False)
ConfigListScreen.__init__(self, [])
self.infobar = infobar or self.session.infobar
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
self.cached_subtitle_checked = False
self.__selected_subtitle = None
self["actions"] = ActionMap(["ColorActions", "SetupActions", "DirectionActions"],
{
"red": self.keyRed,
"green": self.keyGreen,
"yellow": self.keyYellow,
"blue": self.keyBlue,
"ok": self.keyOk,
"cancel": self.cancel,
"up": self.keyUp,
"down": self.keyDown,
}, -3)
self.settings = ConfigSubsection()
choicelist = [(PAGE_AUDIO,_("audio tracks")), (PAGE_SUBTITLES,_("Subtitles"))]
self.settings.menupage = ConfigSelection(choices = choicelist, default=page)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self["config"].instance.setSelectionEnable(False)
self.focus = FOCUS_STREAMS
self.settings.menupage.addNotifier(self.fillList)
def fillList(self, arg=None):
streams = []
conflist = []
selectedidx = 0
service = self.session.nav.getCurrentService()
self.audioTracks = audio = service and service.audioTracks()
n = audio and audio.getNumberOfTracks() or 0
if self.settings.menupage.getValue() == PAGE_AUDIO:
self.setTitle(_("Select audio track"))
if SystemInfo["CanDownmixAC3"]:
self.settings.downmix = ConfigOnOff(default=config.av.downmix_ac3.value)
self.settings.downmix.addNotifier(self.changeAC3Downmix, initial_call = False)
conflist.append(getConfigListEntry(_("AC3 downmix"), self.settings.downmix))
self["key_red"].setBoolean(True)
if n > 0:
self.audioChannel = service.audioChannel()
choicelist = [("0",_("left")), ("1",_("stereo")), ("2", _("right"))]
self.settings.channelmode = ConfigSelection(choices = choicelist, default = str(self.audioChannel.getCurrentChannel()))
self.settings.channelmode.addNotifier(self.changeMode, initial_call = False)
conflist.append(getConfigListEntry(_("Channel"), self.settings.channelmode))
self["key_green"].setBoolean(True)
selectedAudio = self.audioTracks.getCurrentTrack()
for x in range(n):
number = str(x)
i = audio.getTrackInfo(x)
languages = i.getLanguage().split('/')
description = i.getDescription() or _("<unknown>")
selected = ""
language = ""
if selectedAudio == x:
selected = _("Running")
selectedidx = x
cnt = 0
for lang in languages:
if cnt:
language += ' / '
if LanguageCodes.has_key(lang):
language += LanguageCodes[lang][0]
elif lang == "und":
_("<unknown>")
else:
language += lang
cnt += 1
streams.append((x, "", number, description, language, selected))
else:
streams = []
conflist.append(('',))
self["key_green"].setBoolean(False)
elif self.settings.menupage.getValue() == PAGE_SUBTITLES:
self.setTitle(_("Subtitle selection"))
conflist.append(('',))
conflist.append(('',))
self["key_red"].setBoolean(False)
self["key_green"].setBoolean(False)
if self.subtitlesEnabled():
sel = self.infobar.selected_subtitle
else:
sel = None
idx = 0
subtitlelist = self.getSubtitleList()
if len(subtitlelist):
for x in subtitlelist:
number = str(x[1])
description = "?"
language = _("<unknown>")
selected = ""
if sel and x[:4] == sel[:4]:
selected = _("Running")
selectedidx = idx
if x[4] != "und":
if LanguageCodes.has_key(x[4]):
language = LanguageCodes[x[4]][0]
else:
language = x[4]
if x[0] == 0:
description = "DVB"
number = "%x" % (x[1])
elif x[0] == 1:
description = "TTX"
number = "%x%02x" % (x[3],x[2])
elif x[0] == 2:
types = ("UTF-8 text","SSA / AAS",".SRT file")
description = types[x[2]]
streams.append((x, "", number, description, language, selected))
idx += 1
else:
streams = []
conflist.append(getConfigListEntry(_("Menu"), self.settings.menupage))
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
if hasattr(self.infobar, "runPlugin"):
class PluginCaller:
def __init__(self, fnc, *args):
self.fnc = fnc
self.args = args
def __call__(self, *args, **kwargs):
self.fnc(*self.args)
Plugins = [ (p.name, PluginCaller(self.infobar.runPlugin, p)) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_AUDIOMENU) ]
if len(Plugins):
self["key_blue"].setBoolean(True)
conflist.append(getConfigListEntry(Plugins[0][0], ConfigNothing()))
self.plugincallfunc = Plugins[0][1]
if len(Plugins) > 1:
print "these plugins are installed but not displayed in the dialog box:", Plugins[1:]
self["config"].list = conflist
self["config"].l.setList(conflist)
self["streams"].list = streams
self["streams"].setIndex(selectedidx)
def __updatedInfo(self):
self.fillList()
def getSubtitleList(self):
s = self.infobar and self.infobar.getCurrentServiceSubtitle()
l = s and s.getSubtitleList() or [ ]
return l
def subtitlesEnabled(self):
return self.infobar.subtitles_enabled
def enableSubtitle(self, subtitles):
if self.infobar.selected_subtitle != subtitles:
self.infobar.subtitles_enabled = False
self.infobar.selected_subtitle = subtitles
if subtitles:
self.infobar.subtitles_enabled = True
def changeAC3Downmix(self, downmix):
if downmix.getValue() == True:
config.av.downmix_ac3.value = True
else:
config.av.downmix_ac3.value = False
config.av.downmix_ac3.save()
def changeMode(self, mode):
if mode is not None:
self.audioChannel.selectChannel(int(mode.getValue()))
def changeAudio(self, audio):
track = int(audio)
if isinstance(track, int):
if self.session.nav.getCurrentService().audioTracks().getNumberOfTracks() > track:
self.audioTracks.selectTrack(track)
def keyLeft(self):
if self.focus == FOCUS_CONFIG:
ConfigListScreen.keyLeft(self)
elif self.focus == FOCUS_STREAMS:
self["streams"].setIndex(0)
def keyRight(self, config = False):
if config or self.focus == FOCUS_CONFIG:
if self["config"].getCurrentIndex() < 3:
ConfigListScreen.keyRight(self)
elif hasattr(self, "plugincallfunc"):
self.plugincallfunc()
if self.focus == FOCUS_STREAMS and self["streams"].count() and config == False:
self["streams"].setIndex(self["streams"].count()-1)
def keyRed(self):
if self["key_red"].getBoolean():
self.colorkey(0)
def keyGreen(self):
if self["key_green"].getBoolean():
self.colorkey(1)
def keyYellow(self):
if self["key_yellow"].getBoolean():
self.colorkey(2)
def keyBlue(self):
if self["key_blue"].getBoolean():
self.colorkey(3)
def colorkey(self, idx):
self["config"].setCurrentIndex(idx)
self.keyRight(True)
def keyUp(self):
if self.focus == FOCUS_CONFIG:
self["config"].instance.moveSelection(self["config"].instance.moveUp)
elif self.focus == FOCUS_STREAMS:
|
softak/webfaction_demo
|
apps/stores/views.py
|
Python
|
bsd-3-clause
| 11,025
| 0.004535
|
import commonware.log
from datetime import datetime, timedelta
import re
from session_csrf import anonymous_csrf
from django.shortcuts import redirect, render, get_object_or_404
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import CreateView, UpdateView
from django.views.generic.base import View, TemplateView
from django.views.generic import ListView, DetailView, FormView
from django.views.decorators.csrf import csrf_protect
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.utils.decorators import method_decorator
from django.core.urlresolvers import reverse
from django.http import Http404
from stores.resources import DiscountResource, DiscountGroupResource, \
ItemResource, ItemImageResource, DetailItemResource, \
PendingShippingRequestResource
from stores.models import Store, Item, ItemImage, Discount, DiscountGroup, \
StoreDesign
from stores.forms import StoreCreationForm, StoreImageUploadForm, \
ItemCreationForm, ItemImageCreationForm, \
DiscountCreationForm, DiscountGroupCreationForm, \
ContactInformationForm, \
ItemSearchForm, StoreDesignForm
from cart.models import PersonalTag, SocialBuy, SocialTag, ShippingRequest, paypalAP
from cart.forms import SocialBuyForm
from friends.models import Friendship
from utils import LoginRequiredMixin, render_to_string, thumbnail
log = commonware.log.getLogger('shoppindomain')
@login_required
def search(request):
form = ItemSearchForm(request.GET)
return render(request, 'stores/search.j.html', {
'form': form
})
# TODO store.get_buyers is slow.
# Maybe we should denormalize database and add to Store model m2m-field `buyers`
@login_required
@csrf_protect
def view_store(request, store_id=None):
store = get_object_or_404(Store, pk=int(store_id))
friend_ids = map(lambda friend: friend['id'],
Friendship.objects.friends_of(request.user).values('id'))
buyer_ids = store.get_buyer_ids()
buyers = User.objects.filter(id__in=buyer_ids).exclude(id=request.user.id)
return render(request, 'stores/store.j.html', {
'store': store,
'friend_buyers': buyers.filter(id__in=friend_ids),
'other_buyers': buyers.exclude(id__in=friend_ids),
})
class CreateStoreView(LoginRequiredMixin, CreateView):
model = Store
template_name = 'stores/manage/create.j.html'
def get_form_class(self):
return StoreCreationForm
def form_valid(self, form):
store = form.save(commit=False)
store.user = self.request.user
store.save()
return redirect(self.get_success_url())
def get_success_url(self):
return reverse('stores.create_store_done')
def dispatch(self, request, *args, **kwargs):
if hasattr(request.user, 'store'):
return render(request, self.template_name, {})
else:
return super(CreateStoreView, self).dispatch(request, *args, *
|
*kwargs)
@login_required
@csrf_protect
def store_image(request):
if not hasattr(request.user, 'store'):
raise Http404()
store = request.user.store
if request.method == 'POST':
form = StoreImageUploadForm(request.POST,
|
request.FILES,
instance=store)
if form.is_valid():
form.save()
return redirect(store_image)
else:
form = StoreImageUploadForm(instance=store)
return render(request, 'stores/manage/image.j.html', {
'image': request.user.store.window_image,
'form': form
})
class CreateStoreDoneView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/create_done.j.html'
def get_context_data(self, **kwargs):
request_token = self.request.GET.get('request_token', None)
verification_code = self.request.GET.get('verification_code', None)
store = self.request.user.store
if store.paypal_email and not store.is_active:
return {}
if (not request_token is None) and (not verification_code is None):
response = paypalAP.callPermissions('GetAccessToken',
token=request_token,
verifier=verification_code)
response = paypalAP.callPermissionsOnBehalf('GetBasicPersonalData',
access_token = response.token,
secret_token = response.tokenSecret,
**{
'attributeList.attribute(0)': 'http://axschema.org/contact/email',
'attributeList.attribute(1)': 'http://schema.openid.net/contact/fullname'
})
personal_data = {}
key_re = re.compile(r'response.personalData\((?P<index>\d+)\)\.(?P<name>.+)')
for key in response.raw:
m = re.match(key_re, key)
if not m is None and m.group('name') == 'personalDataKey':
personal_data[response.get(key)] = response.get('response.personalData(%s).personalDataValue' % m.group('index'))
store.paypal_email = personal_data['http://axschema.org/contact/email']
store.save()
return {}
else:
response = paypalAP.callPermissions('RequestPermissions',
callback=self.request.build_absolute_uri(reverse('stores.create_store_done')),
**{
'scope(0)': 'ACCESS_BASIC_PERSONAL_DATA',
'scope(1)':'REFUND'
})
return { 'paypal_url': paypalAP.generate_permissions_redirect_url(response.token) }
class ItemView(LoginRequiredMixin, DetailView):
model = Item
context_object_name = 'item'
template_name = 'stores/item.j.html'
def get_context_data(self, **kwargs):
context = super(ItemView, self).get_context_data(**kwargs)
request = self.request
item = self.object
context.update({
'item_details_json': DetailItemResource().to_json(
obj=item, request=request),
'images_json': ItemImageResource().to_json(
obj=item.images.all(), request=request),
})
return context
class ManageContactInformationView(LoginRequiredMixin, FormView):
template_name = 'stores/manage/contact_information.j.html'
form_class = ContactInformationForm
def get_initial(self):
return { 'country': 'AU' }
def get_form_kwargs(self):
kwargs = super(ManageContactInformationView, self).get_form_kwargs()
kwargs.update({ 'instance': self.request.user.store })
return kwargs
def form_valid(self, form):
form.save()
return self.render_to_response(
self.get_context_data(form=form,updated=True))
def form_invalid(self, form):
return self.render_to_response(self.get_context_data(form=form))
class ManageItemsView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/items.j.html'
def get_context_data(self, **kwargs):
items_per_page = 4
items = self.request.user.store.items.all()
items_json = ItemResource().to_json(obj=items[:items_per_page],
request=self.request)
kwargs.update({
'items_json': items_json,
'items_total': items.count(),
'items_per_page': items_per_page,
'item_form': ItemCreationForm(),
'image_form': ItemImageCreationForm()
})
return kwargs
class ManageDiscountsView(LoginRequiredMixin, TemplateView):
template_name = 'stores/manage/discounts.j.html'
def get_context_data(self, **kwargs):
context = super(ManageDiscountsView, self).get_context_data(**kwargs)
request = self.request
discounts_json = DiscountResource().to_json(
obj=Discount.objects.filter(store=request.use
|
cyphactor/lifecyclemanager
|
extra/plugins/docs/docs/__init__.py
|
Python
|
gpl-3.0
| 20
| 0
|
f
|
rom docs import *
| |
googleapis/python-resource-manager
|
samples/generated_samples/cloudresourcemanager_v3_generated_tag_keys_get_tag_key_async.py
|
Python
|
apache-2.0
| 1,477
| 0.000677
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "A
|
S IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetTagKey
# NOTE: This snippet has been automatically gen
|
erated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-resourcemanager
# [START cloudresourcemanager_v3_generated_TagKeys_GetTagKey_async]
from google.cloud import resourcemanager_v3
async def sample_get_tag_key():
# Create a client
client = resourcemanager_v3.TagKeysAsyncClient()
# Initialize request argument(s)
request = resourcemanager_v3.GetTagKeyRequest(
name="name_value",
)
# Make the request
response = await client.get_tag_key(request=request)
# Handle the response
print(response)
# [END cloudresourcemanager_v3_generated_TagKeys_GetTagKey_async]
|
hakanardo/pyvx
|
old/test/test_truediv.py
|
Python
|
mit
| 494
| 0.002024
|
from __future__ import
|
division
from pyvx import *
|
from array import array
class TestDiv(object):
def test_div(self):
g = Graph()
with g:
img = Image(3, 4, DF_IMAGE_U8, array('B', range(12)))
sa1 = img / 2
sa2 = img // 2
sa1.force()
sa2.force()
g.process()
assert [sa1.data[i] for i in range(6)] == [0, 0.5, 1.0, 1.5, 2.0, 2.5]
assert [sa2.data[i] for i in range(6)] == [0, 0, 1, 1, 2, 2]
|
i404ed/portfolio-dir
|
portfolio/apps/core/tests.py
|
Python
|
gpl-3.0
| 697
| 0.002869
|
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.test import TestCase
# Create your tests here.
from portfolio.apps.core.models import User
class UserModelTests(TestCase):
def test_admin_user_exist(self):
self.assertRaises(Http404, lambda: get_object_or_404(User, phone="3124795074"))
|
with self.assertRaises(Http404) as e:
get_object_or_404(User, phone="3124795074")
self.assertRaises(ObjectDoesNotExist, lambda: User.objects.get(phone="3124795074"))
with self.assertRaises(O
|
bjectDoesNotExist) as e:
User.objects.get(phone="3124795074")
|
ucolesanti/tinyos-wrappers
|
support/sdk/python/pynopath.py
|
Python
|
bsd-3-clause
| 296
| 0.037162
|
import sys
import
|
string
import re
import os
input_file = open(sys.argv[1])
prev_value = int(0)
true = 1
while true:
input_line = input_file.readline()
if input_line == "":
break
input_line = r
|
e.sub('#line.*','',input_line)
input_line = re.sub('# [0-9].*','',input_line)
print input_line
|
0ps/reprocks
|
client/reprocks_client.py
|
Python
|
mit
| 9,685
| 0.010532
|
#! /usr/bin/python
import threading
import socket
import sys,time
import SocketServer,struct,select
global bufLen
global endflag
global socksPort
###################
socksPort = 50000 #Default socks5 proxy port
###################
endflag = []
bufLen = 4*1024
class startThreadSoket(threading.Thread):
def __init__(self,socksPort):
threading.Thread.__init__(self)
self.socksPort = socksPort
def run(self):
socket_bind(
|
self.socksPort)
class control(threading.Thread):
def __init__(self,server_Conn,client_Conn,serverAddr,clientAddr,clientNum):
threading.Thread.__init__(self)
self.server_Conn = server_Conn
self.client_Conn = client_Conn
self.server_Ad
|
dr = serverAddr
self.client_Addr = clientAddr
self.clientNum = clientNum
def run(self):
global endflag
transferDataThreads = []
thread = 2
flag = self.clientNum
endflag.append(False)
y = transfer2Server(self.server_Conn,self.client_Conn,self.server_Addr,self.client_Addr,flag)
y.setDaemon(True)
z = transfer2Client(self.client_Conn,self.server_Conn,self.client_Addr,self.server_Addr,flag)
z.setDaemon(True)
transferDataThreads.append(y)
transferDataThreads.append(z)
for t in transferDataThreads:
t.start()
while True:
alive = True
for i in range(int(thread)):
alive = alive and transferDataThreads[i].isAlive()
if not alive:
time.sleep(3)
print "[Link %s] Connection has closed." % self.clientNum
break
break
class transfer2Server(threading.Thread):
def __init__(self,server_Conn,client_Conn,server_Addr,client_Addr,flag):
threading.Thread.__init__(self)
self.server_Conn = server_Conn
self.client_Conn = client_Conn
self.server_Addr = server_Addr
self.client_Conn = client_Conn
self.flag = flag
self.currentNum = self.flag+1
def run(self):
global bufLen
global endflag
servPeerName = self.server_Conn.getpeername()
clientPeerName = self.client_Conn.getpeername()
while True and not endflag[self.flag]:
try:
buf = self.client_Conn.recv(bufLen)
except:
print "Connection reset by peer.Program exit."
for m in endflag:
m = True
sys.exit()
if buf == '' or buf == '__closed__':
time.sleep(2)
self.client_Conn.close()
endflag[self.flag] = True
break
try:
self.server_Conn.send(buf)
print "[Link %s] %s --> %s : %s data" % (self.currentNum,clientPeerName,servPeerName,len(buf))
except:
endflag[self.flag] = True
time.sleep(2)
self.client_Conn.send('__closed__')
self.client_Conn.close()
break
class transfer2Client(threading.Thread):
def __init__(self,client_Conn,server_Conn,client_Addr,server_Addr,flag):
threading.Thread.__init__(self)
self.client_Conn = client_Conn
self.server_Conn = server_Conn
self.client_Addr = client_Addr
self.server_Addr = server_Addr
self.flag = flag
self.currentNum = flag+1
def run(self):
global bufLen
global endflag
servPeerName = self.server_Conn.getpeername()
clientPeerName = self.client_Conn.getpeername()
while True and not endflag[self.flag]:
buf = self.server_Conn.recv(bufLen)
if buf == '':
print "[Link %s] Server %s disconnect.End current thread." % (self.currentNum,clientPeerName)
time.sleep(2)
self.server_Conn.close()
endflag[self.flag] = True
break
try:
self.client_Conn.send(buf)
print "[Link %s] %s --> %s : %s data" % (self.currentNum,servPeerName,clientPeerName,len(buf))
except:
endflag[self.flag] = True
time.sleep(2)
self.server_Conn.close()
break
class ThreadingTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): pass
class Socks5Server(SocketServer.StreamRequestHandler):
def handle_tcp(self, sock, remote):
fdset = [sock, remote]
while True:
r, w, e = select.select(fdset, [], [])
if sock in r:
if remote.send(sock.recv(4096)) <= 0: break
if remote in r:
if sock.send(remote.recv(4096)) <= 0: break
def handle(self):
try:
pass
sock = self.connection
sock.recv(262)
sock.send("\x05\x00");
data = self.rfile.read(4)
mode = ord(data[1])
addrtype = ord(data[3])
if addrtype == 1:
addr = socket.inet_ntoa(self.rfile.read(4))
elif addrtype == 3:
addr = self.rfile.read(ord(sock.recv(1)[0]))
port = struct.unpack('>H', self.rfile.read(2))
reply = "\x05\x00\x00\x01"
try:
if mode == 1:
remote = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote.connect((addr, port[0]))
pass
else:
reply = "\x05\x07\x00\x01"
local = remote.getsockname()
reply += socket.inet_aton(local[0]) + struct.pack(">H", local[1])
except socket.error:
reply = '\x05\x05\x00\x01\x00\x00\x00\x00\x00\x00'
sock.send(reply)
if reply[1] == '\x00':
if mode == 1:
self.handle_tcp(sock, remote)
except socket.error:
pass
except IndexError:
pass
def socket_bind(socketPort):
socks_port = int(socketPort)
server = ThreadingTCPServer(('', socks_port), Socks5Server)
print 'Socks5 proxy bind port : %d' % socks_port + ' ok!'
server.serve_forever()
def usage():
print """
reprocks_client\t1.0
Code by H.K.T\temail:jlvsjp@qq.com
Thanks to ringzero@557.im for socks5 proxy module!
usage : %s -m 1 <reprocks_server_IP> <reprocks_server_port>
%s -m 2 <transferIP> <transferPort> <reprocks_server_IP> <reprocks_server_port>
%s -m 3 [bind_socket_port]
example:
%s -m 1 123.123.123.123 1230
#Rebind socks5 proxy to reprocks_server.
%s -m 2 127.0.0.1 22 123.123.123.123 1230
#Just port transmit in reconnection method.
%s -m 3 7070
#Just start socks5 proxy.
""" % (sys.argv[0],sys.argv[0],sys.argv[0],sys.argv[0],sys.argv[0],sys.argv[0])
def main():
global socksPort
global endflag
try:
if len(sys.argv)>=3:
if sys.argv[2]=='3':
if len(sys.argv)==4:
socksPort = int(sys.argv[3])
socket_bind(socksPort)
elif sys.argv[2]=='1' and len(sys.argv)==5:
socksProxy = startThreadSoket(socksPort)
socksProxy.setDaemon(True)
socksProxy.start()
reproket('localhost',socksPort,sys.argv[3],sys.argv[4])
elif sys.argv[2]=='2':
if len(sys.argv)==7:
reproket(sys.argv[3],sys.argv[4],sys.argv[5],sys.argv[6])
else:
usage()
else:
usage()
except KeyboardInterrupt:
print "Catch ctrl+c pressed,program will exit."
for m in endflag:
m = True
def reproket(transmitIP,transmitPort,clientIP,clientPort):
serverAddr = (transmitIP,int(transmitPort))
clientAddr = (clientIP,int(clientPort))
serverLink = []
clientLink = []
socketServer = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
socketServer.connect
|
pawpro/spa
|
spa/static/hashed.py
|
Python
|
bsd-3-clause
| 8,271
| 0.000484
|
import hashlib
import mimetypes
import os
import posixpath
import re
from time import time
from urlparse import urlsplit, urlunsplit
from werkzeug.exceptions import NotFound
from werkzeug.http import is_resource_modified, http_date
from spa.static.handlers import StaticHandler
from spa.utils import clean_path
class HashCache(object):
def __init__(self):
self.path_hashes = {}
self.contents = {}
def get_path_hash(self, path):
return self.path_hashes.get(path)
def set_path_hash(self, path, path_hash):
self.path_hashes[path] = path_hash
def get_contents(self, path):
return self.contents.get(path)
def set_contents(self, path, contents):
self.contents[path] = contents
class CacheBustingStaticHandler(StaticHandler):
css_url_patterns = (
(re.compile(r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""", re.IGNORECASE),
"""url("{hashed_url}")"""),
(re.compile(r"""(@import\s*["']\s*(.*?)["'])""", re.IGNORECASE),
"""@import url("{hashed_url}")"""),
)
def __init__(self, app, req, params, directory, hash_cache, **kwargs):
self.hash_cache = hash_cache
return super(CacheBustingStaticHandler, self).__init__(
app, req, params, directory, **kwargs
)
def get(self, filepath):
unhashed_path, path_hash = parse_hashed_filepath(filepath)
if unhashed_path is None:
return NotFound()
if self.hash_cache.get_path_hash(unhashed_path) is None:
# compute hash, and cache it.
file = self.get_file(unhashed_path)
if file is None:
return NotFound()
try:
hash_str = get_hash(file.handle)
self.hash_cache.set_path_hash(unhashed_path, hash_str)
finally:
file.handle.close()
# If hash we were passed doesn't equal the one we've computed and
# cached, then 404.
if path_hash != self.hash_cache.get_path_hash(unhashed_path):
return NotFound()
# For CSS stylesheets only, we'll rewrite content so that url()
# functions will point to hashed filenames instead of unhashed. The
# rewritten CSS content will be kept in memory.
if mimetypes.guess_type(filepath)[0] == 'text/css':
return self.make_css_response(unhashed_path)
return super(CacheBustingStaticHandler, self).get(unhashed_path)
def make_css_response(self, filepath):
def resp(environ, start_response):
file = self.get_file(filepath)
try:
headers = [('Date', http_date())]
if self.cache:
timeout = self.cache_timeout
etag = self.generate_etag(file.mtime, file.size, file.name)
headers += [
('Etag', '"%s"' % etag),
('Cache-Control', 'max-age=%d, public' % timeout)
]
if not is_resource_modified(environ, etag, last_modified=file.mtime):
start_response('304 Not Modified', headers)
return []
headers.append(('Expires', http_date(time() + timeout)))
else:
headers.append(('Cache-Control', 'public'))
contents = self.hash_cache.get_contents(filepath)
if contents is None:
contents = file.handle.read()
for pat, tpl in self.css_url_patterns:
converter = self.get_converter(tpl)
contents = pat.sub(converter, contents)
self.hash_cache.set_contents(filepath, contents)
headers.extend((
('Content-Type', file.mimetype),
('Content-Length', len(contents)),
('Last-Modified', http_date(file.mtime))
))
start_response('200 OK', headers)
return [contents]
finally:
file.handle.close()
return resp
def get_converter(self, tpl):
def converter(matchobj):
matched, url = matchobj.groups()
if url.startswith(('#', 'http:', 'https:', 'data:', '//')):
return url
return tpl.format(hashed_url=self.convert_css_url(url))
return converter
def convert_css_url(self, css_url):
split_url = urlsplit(css_url)
url_path = split_url.path
if not url_path.startswith('/'):
abs_url_path = self.make_path_absolute(url_path)
else:
abs_url_path = posixpath.realpath(url_path)
prefix = self.get_url_prefix()
# now make the path as it would be passed in to this handler when
# requested from the web. From there we can use existing methods on the
# class to resolve to a real file.
_, _, content_filepath = abs_url_path.partition(prefix)
content_filepath = clean_path(content_filepath)
content_file_hash = self.hash_cache.get_path_hash(content_filepath)
if content_file_hash is None:
content_file = self.get_file(content_filepath)
if content_file is None:
return 'NOT FOUND: "%s"' % url_path
try:
content_file_hash = get_hash(content_file.handle)
finally:
content_file.handle.close()
parts = list(split_url)
parts[2] = add_hash_to_filepath(url_path, content_file_hash)
url = urlunsplit(parts)
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if '?#' in css_url:
parts = list(urlsplit(url))
if not parts[3]:
parts[2] += '?'
url = urlunsplit(parts)
return url
def get_url_prefi
|
x(self):
"""
R
|
eturn the mount point for this handler. So if you had a route like
this:
('/foo/bar/static/<path:filepath>', 'foo', Handler)
Then this function should return '/foo/bar/static/'
"""
env = self.request.environ
filepath = self.params['filepath']
prefix, _, _ = (env['SCRIPT_NAME'] +
env['PATH_INFO']).rpartition(filepath)
return prefix
def make_path_absolute(self, path):
"""
Given a relative url found inside the CSS file we're currently serving,
return an absolute form of that URL.
"""
env = self.request.environ
pinfo = posixpath.dirname(env['PATH_INFO'])
return posixpath.realpath(env['SCRIPT_NAME'] + pinfo + '/' + path)
def parse_hashed_filepath(filename, hash_len=12):
"""
Given a name like '/static/my_file.deadbeef1234.txt', return a tuple of the file name
without the hash, and the hash itself, like this:
('/static/my_file.txt', 'deadbeef1234')
If no hash part is found, then return (None, None).
"""
pat = '^(?P<before>.*)\.(?P<hash>[0-9,a-f]{%s})(?P<after>.*?)$' % hash_len
m = re.match(pat, filename)
if m is None:
return None, None
parts = m.groupdict()
return '{before}{after}'.format(**parts), parts['hash']
def add_hash_to_filepath(filepath, hash_str):
path, filename = os.path.split(filepath)
root, ext = os.path.splitext(filename)
return os.path.join(path, "%s.%s%s" % (root, hash_str, ext))
def get_hash(lines, hash_len=12):
md5 = hashlib.md5()
for line in lines:
md5.update(line)
return md5.hexdigest()[:hash_len]
class CacheBuster(object):
"""
A factory for making CacheBustingStaticHandler instances that share a cache
instance.
"""
def __init__(self, directory):
self.directory = directory
self.hash_cache = HashCache()
def __call__(self, app, req, params, **kwargs):
return CacheBustingStaticHandler(app, req, params,
directory=self.directory,
|
openstack/tap-as-a-service
|
neutron_taas/tests/unit/taas_client/osc/test_osc_tap_flow.py
|
Python
|
apache-2.0
| 9,088
| 0
|
# All Rights Reserved 2020
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import operator
from unittest import mock
from neutronclient.tests.unit.osc.v2 import fakes as test_fakes
from osc_lib import utils as osc_utils
from osc_lib.utils import columns as column_util
from oslo_utils import uuidutils
from neutron_taas.taas_client.osc import tap_flow as osc_tap_flow
from neutron_taas.tests.unit.taas_client.osc import fakes
columns_long = tuple(col for col, _, listing_mode in osc_tap_flow._attr_map
if listing_mode in (column_util.LIST_BOTH,
column_util.LIST_LONG_ONLY))
headers_long = tuple(head for _, head, listing_mode in
osc_tap_flow._attr_map if listing_mode in
(column_util.LIST_BOTH, column_util.LIST_LONG_ONLY))
sorted_attr_map = sorted(osc_tap_flow._attr_map, key=operator.itemgetter(1))
sorted_columns = tuple(col for col, _, _ in sorted_attr_map)
sorted_headers = tuple(head for _, head, _ in sorted_attr_map)
def _get_data(attrs, columns=sorted_columns):
return osc_utils.get_dict_properties(attrs, columns)
class TestCreateTapService(test_fakes.TestNeutronClientOSCV2):
columns = (
'Direction',
'ID',
'Name',
'Status',
'Tenant',
's
|
ource_port',
'tap_service_id',
)
def setUp(self):
super(TestCreateTapService, self).setUp()
self.cmd = osc_tap_flow.CreateTapFlow(self.app, self.namespace)
def test_create_tap_flow(self):
"""Test Create Tap Flow."""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
|
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid()
}
)
self.neutronclient.post = mock.Mock(
return_value={osc_tap_flow.TAP_FLOW: fake_tap_flow})
arg_list = [
'--name', fake_tap_flow['name'],
'--port', fake_tap_flow['source_port'],
'--tap-service', fake_tap_flow['tap_service_id'],
'--direction', fake_tap_flow['direction'],
]
verify_list = [
('name', fake_tap_flow['name']),
('port', fake_tap_flow['source_port']),
('tap_service', fake_tap_flow['tap_service_id']),
]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
with mock.patch.object(self.neutronclient, 'find_resource') as nc_find:
nc_find.side_effect = [
{'id': fake_tap_flow['source_port']},
{'id': fake_tap_flow['tap_service_id']}
]
columns, data = self.cmd.take_action(parsed_args)
self.neutronclient.post.assert_called_once_with(
'/taas/tap_flows',
body={
osc_tap_flow.TAP_FLOW:
{
'name': fake_tap_flow['name'],
'source_port': fake_tap_flow['source_port'],
'tap_service_id': fake_tap_flow['tap_service_id'],
'direction': fake_tap_flow['direction']
}
}
)
self.assertEqual(self.columns, columns)
self.assertItemEqual(_get_data(fake_tap_flow), data)
class TestListTapFlow(test_fakes.TestNeutronClientOSCV2):
def setUp(self):
super(TestListTapFlow, self).setUp()
self.cmd = osc_tap_flow.ListTapFlow(self.app, self.namespace)
def test_list_tap_flows(self):
"""Test List Tap Flow."""
fake_tap_flows = fakes.FakeTapFlow.create_tap_flows(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
},
count=2)
self.neutronclient.list = mock.Mock(return_value=fake_tap_flows)
arg_list = []
verify_list = []
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
headers, data = self.cmd.take_action(parsed_args)
self.neutronclient.list.assert_called_once()
self.assertEqual(headers, list(headers_long))
self.assertListItemEqual(
list(data),
[_get_data(fake_tap_flow, columns_long) for fake_tap_flow
in fake_tap_flows[osc_tap_flow.TAP_FLOWS]]
)
class TestDeleteTapFlow(test_fakes.TestNeutronClientOSCV2):
def setUp(self):
super(TestDeleteTapFlow, self).setUp()
self.neutronclient.find_resource = mock.Mock(
side_effect=lambda _, name_or_id: {'id': name_or_id})
self.cmd = osc_tap_flow.DeleteTapFlow(self.app, self.namespace)
def test_delete_tap_flow(self):
"""Test Delete tap flow."""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
}
)
self.neutronclient.delete = mock.Mock()
arg_list = [
fake_tap_flow['id'],
]
verify_list = [
(osc_tap_flow.TAP_FLOW, [fake_tap_flow['id']]),
]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
result = self.cmd.take_action(parsed_args)
self.neutronclient.delete.assert_called_once_with(
osc_tap_flow.resource_path % ('tap_flows',
fake_tap_flow['id']))
self.assertIsNone(result)
class TestShowTapFlow(test_fakes.TestNeutronClientOSCV2):
def setUp(self):
super(TestShowTapFlow, self).setUp()
self.neutronclient.find_resource = mock.Mock(
side_effect=lambda _, name_or_id: {'id': name_or_id})
self.cmd = osc_tap_flow.ShowTapFlow(self.app, self.namespace)
def test_show_tap_flow(self):
"""Test Show tap flow."""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uuidutils.generate_uuid(),
}
)
self.neutronclient.get = mock.Mock(
return_value={osc_tap_flow.TAP_FLOW: fake_tap_flow})
arg_list = [
fake_tap_flow['id'],
]
verify_list = [
(osc_tap_flow.TAP_FLOW, fake_tap_flow['id']),
]
parsed_args = self.check_parser(self.cmd, arg_list, verify_list)
headers, data = self.cmd.take_action(parsed_args)
self.neutronclient.get.assert_called_once_with(
osc_tap_flow.resource_path % ('tap_flows',
fake_tap_flow['id']))
self.assertEqual(sorted_headers, headers)
self.assertItemEqual(_get_data(fake_tap_flow), data)
class TestUpdateTapFlow(test_fakes.TestNeutronClientOSCV2):
_new_name = 'new_name'
columns = (
'Direction',
'ID',
'Name',
'Status',
'Tenant',
'source_port',
'tap_service_id',
)
def setUp(self):
super(TestUpdateTapFlow, self).setUp()
self.cmd = osc_tap_flow.UpdateTapFlow(self.app, self.namespace)
self.neutronclient.find_resource = mock.Mock(
side_effect=lambda _, name_or_id: {'id': name_or_id})
def test_update_tap_flow(self):
"""Test update tap service"""
fake_tap_flow = fakes.FakeTapFlow.create_tap_flow(
attrs={
'source_port': uuidutils.generate_uuid(),
'tap_service_id': uu
|
saketkc/bio-tricks
|
meme_parser/meme_processory.py
|
Python
|
mit
| 2,759
| 0.007249
|
#!/usr/bin/env python
"""
Process meme.txt files to
generate conservation plots
"""
import argparse
import csv
import sys
import seaborn as sns
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats.stats import pearsonr
from Bio import motifs
def plot_meme_against_phylo(meme_record, phylo):
sns.set(style="darkgrid")
def position_wise_profile(counts_dict, length):
profile = map(dict, zip(*[[(k, v) for v in value] for k, value in counts_dict.items()]))
return profile
def find_max_occurence(profile, max_count=2):
sorted_profile = []
for p in profile:
sorted_profile.append(sorted(p.items(), key=lambda x:x[1]))
for i,p in enumerate(sorted_profile):
sorted_profile[i] = p[-max_count:]
return sorted_profile
def main(argv):
parser = argparse.ArgumentParser(description='Process meme files')
parser.add_argument('-i', '--meme', metavar='<meme_out>', help='Meme input file', required=True)
parser.add_argument('-m', '--motif', metavar='<motif_no>', help='Motif number', required=True, type=int)
parser.add_argument('-c', '--phylo', metavar='<phylo_out>', help='PhyloP conservation scores', required=True)
parsed = parser.parse_args(argv)
handle = open(parsed.meme)
records = motifs.parse(handle, 'meme')
record = records[parsed.motif-1]
phylo_data = csv.reader(open(parsed.phylo,'r'), delimiter='\t')
phylo_scores = []
for line in phylo_data:
phylo_scores.append(float(line[2]))
print "Motif length", record.length
print "phylo length", len(phylo_scores)
profile = position_wise_profile(record.counts, record.length)
max_occur = find_max_occurence(profile, max_count=1)
motif_scores = []
for position in max_occur:
motif_scores.append(position[0][1])
pr = pearsonr(np.array(motif_scores), np.array(phylo_scores))
print 'Pearson correlation: {}'.format(pr)
fig, ax = plt.subplots()
ax= sns.regplot(y=np.array(motif_scores), x=np.array(phylo_scores), scatter=True)
ax.set(ylabel="Count of most freq nucleotide", xlabel="PhyloP scores", title='CTCF | pearsonr = {}, p-val={}'.for
|
mat(pr[0],pr[1]));
fig.savefig('{}_motif{}_scatter.png'.format(parsed.phylo, parsed.motif))
x = np.linspace(1,len(phylo_scores)+1,num=len(phylo_scores), endpoint=False)
f, (ax1, ax2) = plt.subplots(2, 1)
x1 = sns.barplot(x,y=np.array(motif_scores), ax=ax1)
x2 = sns.barplot(x
|
,y=np.array(phylo_scores), ax=ax2)
x1.set(ylabel='Counts of most freq nucleotide', xlabel='Position in motif')
x2.set(ylabel='Phylop Score', xlabel='Position in motif')
f.tight_layout()
f.savefig('{}_motif{}_trend.png'.format(parsed.phylo, parsed.motif))
if __name__ == "__main__":
main(sys.argv[1:])
|
cateee/fosdem-volunteers
|
volunteers/urls.py
|
Python
|
agpl-3.0
| 403
| 0.027295
|
from django.conf.urls import patterns, include, url
from volunteers import views
urlpatterns = patterns('',
#url(r'^$', views.index, name='volunteer_index'),
#url(r'^(?P<volunteer_id>\d+)/$', views.volunteer_deta
|
il, name='volunteer_detail'),
#url(r'^AddTasks/$', views.add_tasks, name='add_tasks'),
#url(r'^(?P<volunteer_id>\d+)/edit/$', views.volunteer_edit, name='volunteer_edit'
|
),
)
|
mjg2203/edx-platform-seas
|
common/test/bok_choy/edxapp_pages/lms/open_response.py
|
Python
|
agpl-3.0
| 7,752
| 0.001806
|
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise, fulfill_after, fulfill_before
class OpenResponsePage(PageObject):
"""
Open-ended response in the courseware.
"""
@property
def name(self):
return "lms.open_response"
@property
def requirejs(self):
return []
@property
def js_globals(self):
return []
def url(self):
"""
Open-response isn't associated with a particular URL.
"""
raise NotImplemented
def is_browser_on_page(self):
return self.is_css_present('section.xmodule_CombinedOpenEndedModule')
@property
def assessment_type(self):
"""
Return the type of assessment currently active.
Options are "self", "ai", or "peer"
"""
labels = self.css_text('section#combined-open-ended-status>div.statusitem-current')
if len(labels) < 1:
self.warning("Could not find assessment type label")
# Provide some tolerance to UI changes
label_compare = labels[0].lower().strip()
if 'self' in label_compare:
return 'self'
elif 'ai' in label_compare:
return 'ai'
elif 'peer' in label_compare:
return 'peer'
else:
raise ValueError("Unexpected assessment type: '{0}'".format(label))
@property
def prompt(self):
"""
Return an HTML string representing the essay prompt.
"""
prompt_css = "section.open-ended-child>div.prompt"
prompts = self.css_map(prompt_css, lambda el: el.html.strip())
if len(prompts) == 0:
self.warning("Could not find essay prompt on page.")
return ""
elif len(prompts) > 1:
self.warning("Multiple essay prompts found on page; using the first one.")
return prompts[0]
@property
def has_rubric(self):
"""
Return a boolean indicating whether the rubric is available.
"""
return self.is_css_present('div.rubric')
@property
def rubric_categories(self):
"""
Return a list of categories available in the essay rubric.
Example:
["Writing Applications", "Language Conventions"]
The rubric is not always visible; if it's not available,
this will return an empty list.
"""
return self.css_text('span.rubric-category')
@property
def rubric_feedback(self):
"""
Return a list of correct/incorrect feedback for each rubric category (e.g. from self-assessment).
Example: ['correct', 'incorrect']
If no feedback is available, returns an empty list.
If feedback could not be interpreted (unexpected CSS class),
the list will contain a `None` item.
"""
# Get the green checkmark / red x labels
# We need to filter out the similar-looking CSS classes
# for the rubric items that are NOT marked correct/incorrect
feedback_css = 'div.rubric-label>label'
labels = filter(
lambda el_class: el_class != 'rubric-elements-info',
self.css_map(feedback_css, lambda el: el['class'])
)
# Map CSS classes on the labels to correct/incorrect
def map_feedback(css_class):
if 'choicegroup_incorrect' in css_class:
return 'incorrect'
elif 'choicegroup_correct' in css_class:
return 'correct'
else:
return None
return map(map_feedback, labels)
@property
def alert_message(self):
"""
Alert message displayed to the user.
"""
alerts = self.css_text("div.open-ended-alert")
if len(alerts) < 1:
return ""
else:
return alerts[0]
@property
def grader_status(self):
"""
Status message from the grader.
If not present, return an empty string.
"""
status_list = self.css_text('div.grader-status')
if len(status_list) < 1:
self.warning("No grader status found")
return ""
elif len(status_list) > 1:
self.warning("Multiple grader statuses found; returning the first one")
return status_list[0]
def set_response(self, response_str):
"""
Input a response to the prompt.
"""
input_css = "textarea.short-form-response"
self.css_fill(input_css, response_str)
def save_response(self):
"""
Save the response for later submission.
"""
status_msg_shown = EmptyPromise(
lambda: 'save' in self.alert_message.lower(),
"Status message saved"
)
with fulfill_after(status_msg_shown):
self.css_click('input.save-button')
def submit_response(self):
"""
Submit a response for grading.
"""
with fulfill_after(self._submitted_promise(self.assessment_type)):
with self.handle_alert():
self.css_click('input.submit-button')
def submit_self_assessment(self, scores):
"""
Submit a self-assessment rubric.
`scores` is a list of scores (0 to max score) for each category in the rubric.
"""
# Warn if we have the wrong number of scores
num_categories = len(self.rubric_categories)
if len(scores) != num_categories:
msg = "Recieved {0} scores but there are {1} rubric categories".format(
len(scores), num_categories
)
self.warning(msg)
# Set the score for each category
for score_index in range(len(scores)):
# Check that we have the enough radio buttons
category_css = "div.rubric>ul.rubric-list:nth-of-type({0})".format(score_index + 1)
if scores[score_index] > self.css_count(category_css + ' input.score-selection'):
msg = "Tried to select score {0} but there are only {1} options".format(score_num, len(inputs))
self.warning(msg)
# Check the radio button at the correct index
else:
input_css = (category_css +
">li.rubric-list-item:nth-of-type({0}) input.score-selection".format(
scores[score_index] + 1)
)
self.css_check(input_css)
# Wait for the button to become enabled
button_css = 'input.submit-button'
button_enabled = EmptyPromise(
lambda: all(self.css_map(button_css, lambda el: not el
|
['disabled'])),
"Submit
|
button enabled"
)
# Submit the assessment
with fulfill_before(button_enabled):
self.css_click(button_css)
def _submitted_promise(self, assessment_type):
"""
Return a `Promise` that the next step is visible after submitting.
This will vary based on the type of assessment.
`assessment_type` is either 'self', 'ai', or 'peer'
"""
if assessment_type == 'self':
return EmptyPromise(lambda: self.has_rubric, "Rubric has appeared")
elif assessment_type == 'ai':
return EmptyPromise(
lambda: self.grader_status != 'Unanswered',
"Problem status is no longer 'unanswered'"
)
elif assessment_type == 'peer':
return EmptyPromise(lambda: False, "Peer assessment not yet implemented")
else:
self.warning("Unrecognized assessment type '{0}'".format(assessment_type))
return EmptyPromise(lambda: True, "Unrecognized assessment type")
|
rockyzhengwu/mlpractice
|
algorithm/perceptron.py
|
Python
|
mit
| 44
| 0.045455
|
#!/usr/bin/env python
#-*-codi
|
ng=utf-8-*-
| |
sstjohn/pox
|
tests/topology/topology.py
|
Python
|
gpl-3.0
| 423
| 0.014184
|
# TODO: use a unit-testing library for asserts
# invoke with:
#
|
./pox.py --script=tests.topology.topology topology
#
# Maybe there is a less awkward way to invoke tests...
from pox.core import core
from pox.lib.revent import *
topology = core.components['topology']
def autobinds_correctly():
topology.listenTo(core)
return True
if not autobi
|
nds_correctly():
raise AssertionError("Did no autobind correctly")
|
btrent/knave
|
pychess/__init__.py
|
Python
|
gpl-3.0
| 49
| 0
|
VERSION = "0.12beta4"
VERSION_NAME =
|
"Anderssen"
|
|
sokolowskik/Tutorials
|
ThinkPython/chap11/ex10.py
|
Python
|
mit
| 980
| 0.006122
|
f = raw_input('filename: ')
def make_word_dict(f):
"""
Read the file f and, store each word as dictionary key.
Values are not important.
Return the dictioonary.
"""
c = open(f)
d = dict()
i =
|
0
for word in c:
w = word.strip('\n\r')
d[w] = i
i += 1
#l.append(w)
c.close()
return d
def rotate_word(s, n):
"""
Rotate each char in
|
a string by the given amount.
Wrap around to the beginning (if necessary).
"""
rotate = ''
for c in s:
start = ord('a')
num = ord(c) - start
r_num = (num + n) % 26 + start
r_c = chr(r_num)
rotate += r_c
return rotate
if __name__ == '__main__':
word_dict = make_word_dict(f)
for k in word_dict:
for i in range(1, 14):
rot_k = rotate_word(k, i)
if rot_k in word_dict:
if not rot_k == k:
print k, 'and', rot_k, 'are a rotate pair'
|
PrismTech/opensplice
|
src/api/dcps/python/test/TestSequence.py
|
Python
|
gpl-3.0
| 2,566
| 0.005456
|
#
# Vortex OpenSplice
#
# This software and documentation are Copyright 2006 to TO_YEAR ADLINK
# Technology Limited, its affiliated companies and licensors. All rights
# reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Created on Dec 8, 2017
@author: prismtech
'''
import unittest
import struct
import countTest
if countTest.count_test == False:
import Sequence.basic.module_Sequence
class TestSequence(unittest.TestCase):
def testCopyInCopyOut(self):
data = Sequence.basic.module_Sequence.Sequence_struct(
long1=12,
seq1=[21, 32, 43]
)
print('data: ' + str(data))
print('data._get_packing_fmt(): ', data._get_packing_fmt())
print('data._get_packing_args(): ', data._get_packing_args())
buffer = data._serialize()
print('buffer: ', buffer)
values = struct.unpack(data._get_packing_fmt(), buffer)
data1 = Sequence.basic.module_Sequence.Sequence_struct()
data1._deserialize(list(values))
self.assertEqual(data.long1, data1.long1)
self.assertEqual(data.seq1[0],data1.seq1[0])
self.assertEqual(data.seq1[1],data1.seq1[1])
self.assertEqual(data.seq1[2],data1.seq1[2])
def testCopyInCopyOutEmpty(self):
data = Sequence.basic.module_Sequence.Sequence_struct(
long1=12,
seq1=[]
|
)
print('data: ' + str(data))
print('data._get_packing_fmt(): ', data._get_p
|
acking_fmt())
print('data._get_packing_args(): ', data._get_packing_args())
buffer = data._serialize()
print('buffer: ', buffer)
values = struct.unpack(data._get_packing_fmt(), buffer)
data1 = Sequence.basic.module_Sequence.Sequence_struct()
data1._deserialize(list(values))
self.assertEqual(data.long1, data1.long1)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'TestSequence.testCopyInCopyOut']
unittest.main()
|
ulif/pulp
|
server/test/unit/server/webservices/views/test_consumer_groups.py
|
Python
|
gpl-2.0
| 24,673
| 0.003364
|
import json
import unittest
import mock
from django.http import HttpResponseBadRequest
from base import (assert_auth_CREATE, assert_auth_READ, assert_auth_UPDATE, assert_auth_DELETE,
assert_auth_EXECUTE)
from pulp.server.exceptions import InvalidValue, MissingResource, MissingValue, OperationPostponed
from pulp.server.managers.consumer.group import query
from pulp.server.webservices.views import util
from pulp.server.webservices.views.consumer_groups import (serialize,
ConsumerGroupAssociateActionView,
ConsumerGroupBindingView,
ConsumerGroupBindingsView,
ConsumerGroupContentActionView,
ConsumerGroupResourceView,
ConsumerGroupSearchView,
ConsumerGroupUnassociateActionView,
ConsumerGroupView,)
class TestconsumerGroupView(unittest.TestCase):
"""
Test consumer groups view.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_all_consumer_groups(self, mock_collection, mock_resp):
"""
Test the consumer groups retrieval.
"""
consumer_mock = mock.MagicMock()
resp = [{'id': 'foo', 'display_name': 'bar'}]
consumer_mock.find.return_value = resp
mock_collection.return_value = consumer_mock
request = mock.MagicMock()
consumer_group = ConsumerGroupView()
response = consumer_group.get(request)
expected_cont = [{'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}]
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
@mock.patch('pulp.server.webservices.views.consumer_groups.generate_redirect_response')
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_create_consumer_group(self, mock_factory, mock_resp, mock_redirect):
"""
Test consumer group creation.
"""
resp = {'id': 'foo', 'display_name': 'bar'}
expected_cont = {'id': 'foo', 'display_name': 'bar', '_href': '/v2/consumer_groups/foo/'}
request = mock.MagicMock()
request.body = json.dumps({'id': 'foo', 'display_name': 'bar'})
mock_factory.consumer_group_manager.return_value.create_consumer_group.return_value = resp
consumer_group = ConsumerGroupView()
response = consumer_group.post(request)
mock_resp.assert_called_once_with(expected_cont)
mock_redirect.assert_called_once_with(mock_resp.return_value, expected_cont['_href'])
self.assertTrue(response is mock_redirect.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_create_consumer_group_invalid_param(self):
"""
Test consumer group creation with invalid parameters.
"""
request = mock.MagicMock()
request.body = json.dumps({'id': 'foo', 'display_name': 'bar', 'invalid_param': 'some'})
consumer_group = ConsumerGroupView()
try:
response = consumer_group.post(request)
except InvalidValue, response:
pass
else:
raise AssertionError("Invalidvalue should be raised with invalid options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['invalid_param'])
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_CREATE())
def test_create_consumer_group_missing_param(self):
"""
Test consumer group creation with missing required group id.
"""
request = mock.MagicMock()
request.body
|
= json.dumps({'display_name': 'bar'})
consumer_group = ConsumerGroupView()
try:
response = consumer_group.post(request)
except MissingValue, response:
pass
|
else:
raise AssertionError("MissingValue should be raised with missing options")
self.assertEqual(response.http_status_code, 400)
self.assertEqual(response.error_data['property_names'], ['id'])
class TestconsumerGroupResourceView(unittest.TestCase):
"""
Test consumer groups resource view.
"""
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_DELETE())
@mock.patch('pulp.server.webservices.views.consumer_groups.generate_json_response')
@mock.patch('pulp.server.webservices.views.consumer_groups.factory')
def test_delete_consumer_group_resource(self, mock_factory, mock_resp):
"""
Test consumer group delete resource.
"""
mock_group_manager = mock.MagicMock()
mock_factory.consumer_group_manager.return_value = mock_group_manager
mock_group_manager.delete_consumer_group.return_value = None
request = mock.MagicMock()
consumer_group_resource = ConsumerGroupResourceView()
response = consumer_group_resource.delete(request, 'test-group')
mock_group_manager.delete_consumer_group.assert_called_once_with('test-group')
mock_resp.assert_called_once_with(None)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch(
'pulp.server.webservices.views.consumer_groups.generate_json_response_with_pulp_encoder')
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_consumer_group_resource(self, mock_collection, mock_resp):
"""
Test single consumer group retrieval.
"""
consumer_mock = mock.MagicMock()
consumer_mock.find_one.return_value = {'id': 'foo'}
mock_collection.return_value = consumer_mock
request = mock.MagicMock()
consumer_group = ConsumerGroupResourceView()
response = consumer_group.get(request, 'foo')
expected_cont = {'id': 'foo', '_href': '/v2/consumer_groups/foo/'}
mock_resp.assert_called_once_with(expected_cont)
self.assertTrue(response is mock_resp.return_value)
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_READ())
@mock.patch('pulp.server.webservices.views.consumer_groups.ConsumerGroup.get_collection')
def test_get_invalid_consumer_group_resource(self, mock_collection):
"""
Test nonexistent consumer group retrieval.
"""
mock_collection.return_value.find_one.return_value = None
request = mock.MagicMock()
consumer_group = ConsumerGroupResourceView()
try:
response = consumer_group.get(request, 'nonexistent_id')
except MissingResource, response:
pass
else:
raise AssertionError("MissingResource should be raised with nonexistent_group")
self.assertEqual(response.http_status_code, 404)
self.assertEqual(response.error_data['resources'], {'consumer_group': 'nonexistent_id'})
@mock.patch('pulp.server.webservices.views.decorators._verify_auth',
new=assert_auth_UPDATE())
@mock.patch(
'pulp.server.webse
|
tfgraph/tfgraph
|
setup.py
|
Python
|
apache-2.0
| 1,296
| 0
|
#!/usr/bin/env python3
# coding: utf-8
import io
from setuptools import setup, find_packages
# http://blog.ionelmc.ro/2014/05/25/python-packaging/
setup(
name="tfgraph",
version="0.2",
description="Python's Tensorflow Graph Library",
author="garciparedes",
author_email="sergio@garciparedes.me",
url="http://tfgraph.readthedocs.io/en/latest/",
download_url="https://github.com/tfgraph/tfgraph",
keywords=[
"tfg", "bigdata", "tensorflow",
"graph theory", "pagerank", "university of valladolid",
],
python_requires=">=3.5",
|
install_requires=[
"numpy>=1.11",
"pandas>=0.20",
"tensorflow>=1.0",
],
tests_require=[
"pytest"
],
package
|
s=find_packages(),
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
],
long_description=io.open('README.rst', encoding='utf-8').read(),
include_package_data=True,
zip_safe=False,
)
|
Akagi201/learning-python
|
pyglet/mygame/game/physicalobject.py
|
Python
|
mit
| 2,456
| 0.000814
|
import pyglet
import util
class PhysicalObject(pyglet.sprite.Sprite):
"""A sprite with physical properties such as velocity"""
def __init__(self, *args, **kwargs):
super(PhysicalObject, self).__init__(*args, **kwargs)
# Velocity
self.velocity_x, self.velocity_y = 0.0, 0.0
# Flags to toggle collision with bullets
self.reacts_to_bullets = True
self.is_bullet = False
# Flag to remove this object from the game_object list
self.dead = False
# List of new objects to go in the game_objects list
self.new_objects = []
# Tell the game handler about any event handlers
# Only applies to things with keyboard/mouse input
self.event_handlers = []
def update(self, dt):
"""This method should be called every frame."""
# Update position according to velocity and time
self.x += self.velocity_x * dt
self.y += self.velocity_y * dt
# Wrap around the screen if necessary
self.check_bounds()
def check_bounds(self):
"""Use the classic Asteroids screen wrapping behavior"""
min_x = -self.image.width / 2
min_y = -self.image.height / 2
max_x = 800 + self.image.width / 2
max_y = 600 + self.image.height / 2
if self.x < min_x:
self.x = max_x
if self.y < min_y:
self.y = max_y
if self.x > max_x:
self.x = min_x
if self.y > max_y:
self.y = min_y
def collides_with(self, other_object):
"""Determine if this object collides with another"""
# Ignore bullet collisions if we're supposed to
if not self.reacts_to_bullets and other_object.is_bullet:
return False
if self.is_bullet and not other_object.reacts_to_bullets:
return False
# Calculate distance between object centers that would be a collision,
# assuming square resources
collision_distance = self.image.width * 0.5 * self.scale \
+ other_object.image.width * 0.5 * other_object.scale
# Get distance using position tuples
actual_distance = util.distance(self.position, other_object.position
|
)
return (actual_distance <= collision_distance)
def handle_collision_wi
|
th(self, other_object):
if other_object.__class__ is not self.__class__:
self.dead = True
|
andrefarzat/django-styleguide
|
styleguide/views.py
|
Python
|
mit
| 897
| 0
|
# -*- coding: utf-8 -*-
from django.core.cache import cache
from django.shortcuts import render
from django.http import Http404
from styleguide.utils import (Styleguide, STYLEGUIDE_DIR_NAME,
STYLEGUIDE_DEBUG, STYLEGUIDE_CACHE_NAME,
|
STYLEGUIDE_ACCESS)
def index(request, module_name=None, component_name=No
|
ne):
if not STYLEGUIDE_ACCESS(request.user):
raise Http404()
styleguide = None
if not STYLEGUIDE_DEBUG:
styleguide = cache.get(STYLEGUIDE_CACHE_NAME)
if styleguide is None:
styleguide = Styleguide()
cache.set(STYLEGUIDE_CACHE_NAME, styleguide, None)
if module_name is not None:
styleguide.set_current_module(module_name)
context = {'styleguide': styleguide}
index_path = "%s/index.html" % STYLEGUIDE_DIR_NAME
return render(request, index_path, context)
|
aifeiasdf/Template-tookit
|
t/macro_test.py
|
Python
|
artistic-2.0
| 2,554
| 0.002741
|
from template.test import TestCase, main
class MacroTest(TestCase):
def testMacro(self):
config = { "INCLUDE_PATH": "test/src",
"TRIM": 1,
"EVAL_PYTHON": True }
self.Expect(DATA, config, self._callsign())
DATA = r"""
-- test --
[% MACRO foo INCLUDE foo -%]
foo: [% foo %]
foo(b): [% foo(a = b) %]
-- expect --
foo: This is the foo file, a is alpha
foo(b): This is the foo file, a is bravo
-- test --
foo: [% foo %].
-- expect --
foo: .
-- test --
[% MACRO f
|
oo(a) INCLUDE foo -%]
foo: [% foo %]
foo(c): [% foo(c) %]
-- expect --
foo: This is the foo file, a is
foo(c): This is the foo file, a is charlie
-- test --
[% BLOCK mypage %]
Header
[% content %]
Footer
[% END %]
[%- MACRO content BLOCK -%]
This is a macro which encapsulates a template block.
a: [% a -%]
[% END -%]
begin
[% INCLUDE mypage %]
mid
[% INCLUDE mypage a = 'New Alpha' %]
end
-- expect --
begin
Header
This is a macro which encapsulates a template block.
a: alpha
|
Footer
mid
Header
This is a macro which encapsulates a template block.
a: New Alpha
Footer
end
-- test --
[% BLOCK table %]
<table>
[% rows %]
</table>
[% END -%]
[% # define some dummy data
udata = [
{ id => 'foo', name => 'Fubar' },
{ id => 'bar', name => 'Babar' }
]
-%]
[% # define a macro to print each row of user data
MACRO user_summary INCLUDE user_row FOREACH user = udata
%]
[% # here's the block for each row
BLOCK user_row %]
<tr>
<td>[% user.id %]</td>
<td>[% user.name %]</td>
</tr>
[% END -%]
[% # now we can call the main table template, and alias our macro to 'rows'
INCLUDE table
rows = user_summary
%]
-- expect --
<table>
<tr>
<td>foo</td>
<td>Fubar</td>
</tr><tr>
<td>bar</td>
<td>Babar</td>
</tr>
</table>
-- test --
[% MACRO one BLOCK -%]
one: [% title %]
[% END -%]
[% saveone = one %]
[% MACRO two BLOCK; title="2[$title]" -%]
two: [% title %] -> [% saveone %]
[% END -%]
[% two(title="The Title") %]
-- expect --
two: 2[The Title] -> one:
-- test --
[% MACRO one BLOCK -%]
one: [% title %]
[% END -%]
[% saveone = \one %]
[% MACRO two BLOCK; title="2[$title]" -%]
two: [% title %] -> [% saveone %]
[% END -%]
[% two(title="The Title") %]
-- expect --
two: 2[The Title] -> one: 2[The Title]
-- test --
-- name number macro --
[% MACRO number(n) GET n.chunk(-3).join(',') -%]
[% number(1234567) %]
-- expect --
1,234,567
-- test --
-- name python macro --
[% MACRO triple(n) PYTHON %]
n = stash.get('n').value()
print n * 3
[% END -%]
[% triple(10) %]
-- expect --
30
"""
main()
|
CityofPittsburgh/pittsburgh-purchasing-suite
|
migrations/versions/29562eda8fbc_add_vendor_opportunity_category_models.py
|
Python
|
bsd-3-clause
| 3,927
| 0.012478
|
"""add vendor, opportunity, category models
Revision ID: 29562eda8fbc
Revises: 3473ff14af7e
Create Date: 2015-05-28 02:31:47.039725
"""
# revision identifiers, used by Alembic.
revision = '29562eda8fbc'
down_revision = '3473ff14af7e'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('category',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('nigp_code', sa.Integer(), nullable=True),
sa.Column('category', sa.String(length=255), nullable=True),
sa.Column('subcategory', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_category_id'), 'category', ['id'], unique=False)
op.create_table('vendor',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('business_name', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=80), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('first_name', sa.String(length=30), nullable=True),
sa.Column('last_name', sa.String(length=30), nullable=True),
sa.Column('phone_number', sa.String(length=20), nullable=True),
sa.Column('fax_number', sa.String(length=20), nullable=True),
sa.Column('minority_owned', sa.Boolean(), nullable=True),
sa.Column('veteran_owned', sa.Boolean(), nullable=True),
sa.Column('woman_owned', sa.Boolean(), nullable=True),
sa.Column('disadvantaged_owned', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_index(op.f('ix_vendor_id'), 'vendor', ['id'], unique=False)
op.create_table('category_vendor_association',
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('vendor_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['vendor_id'], ['vendor.id'], ondelete='SET NULL')
)
op.create_index(op.f('ix_category_vendor_association_category_id'), 'category_vendor_association', ['category_id'], unique=False)
op.create_index(op.f('ix_category_vendor_association_vendor_id'), 'category_vendor_association', ['vendor_id'], unique=False)
op.create_table('opportunity',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contract_id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('title', sa.String(length=255), nullable=True),
sa.Column('department', sa.String(length=255), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=False),
sa.Column('bid_open', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ondelete='SET NULL'),
sa.ForeignKeyConstraint(['contract_id'], ['contract.id'], ondelete='cascade'),
|
sa.PrimaryKeyConstraint('id')
)
op.add_column('app_status', sa.Column('county_max_deadline', sa.DateTime(), nullable=True))
op.add_column('line_item', sa.Column('percentage', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('opportunity')
op.drop_index(op.f('ix_category_vend
|
or_association_vendor_id'), table_name='category_vendor_association')
op.drop_index(op.f('ix_category_vendor_association_category_id'), table_name='category_vendor_association')
op.drop_table('category_vendor_association')
op.drop_index(op.f('ix_vendor_id'), table_name='vendor')
op.drop_table('vendor')
op.drop_index(op.f('ix_category_id'), table_name='category')
op.drop_table('category')
op.drop_column('line_item', 'percentage')
op.drop_column('app_status', 'county_max_deadline')
### end Alembic commands ###
|
mhkyg/OrangePIStuff
|
lcd/lcd_update.py
|
Python
|
mit
| 1,190
| 0.012605
|
#!/usr/bin/python3
# Example using a character LCD connected to a Raspberry Pi or BeagleBone Black.
import time
import datetime
import Adafruit_CharLCD as LCD
def file_get_contents(filename):
with open(filename) as f:
return f.read()
# Raspberry Pi pin configuration:
lcd_rs = 24 # Note this might need to be changed to 21 for older revision Pi's.
lcd_en = 23
lcd_d4 = 9
lcd_d5 = 11
lcd_d6 = 10
lcd_d7 = 18
lcd_backlight = 8
# BeagleBone Black configuration:
# lcd_rs = 'P8_8'
# lcd_en = 'P8_10'
# lcd_d4 = 'P8_18'
# lcd_d5 = 'P8_16'
# lcd_d6 = 'P8_14'
# lcd_d7 = 'P8_12'
# lcd_backlight = 'P8_7'
# Define LCD column and row size for 16x2 LCD.
lcd_columns = 16
lcd_rows = 2
# Alternatively specify a 20x4 LCD.
# lcd_columns = 20
# lcd_rows = 4
# Initialize the LCD using the pins above.
lcd = LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7,
|
lcd_columns, lcd_rows, lcd_backlight)
datestring = datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
lcd.clear()
lcd.message(file_get_contents
|
("../data/lcd.txt") );
|
shirk3y/cyclone
|
demos/helloworld/helloworld.py
|
Python
|
apache-2.0
| 1,114
| 0
|
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2010 Alexandre Fiori
# based on the original Tornado by Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRAN
|
TIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cyclone.web
import sys
from twisted.internet import reactor
from twisted.python import log
class MainHandler(cyclone.web.RequestHandler):
def get(self):
self.write("Hello, world")
def main():
log.startLogging(sys.stdout)
application = cyclone.web.Application([
(r"/", MainHandler)
])
reactor.l
|
istenTCP(8888, application, interface="127.0.0.1")
reactor.run()
if __name__ == "__main__":
main()
|
0x0mar/RATDecoders
|
Xtreme.py
|
Python
|
gpl-3.0
| 10,565
| 0.030951
|
#!/usr/bin/env python
'''
xtreme Rat Config Decoder
'''
__description__ = 'xtreme Rat Config Extractor'
__author__ = 'Kevin Breen http://techanarchy.net http://malwareconfig.com'
__version__ = '0.1'
__date__ = '2014/04/10'
#Standard Imports Go Here
import os
import sys
import string
from struct import unpack
from optparse import OptionParser
#Non Standard Imports
try:
import pefile
except ImportError:
print "Couldn't Import pefile. Try 'sudo pip install pefile'"
# Main Decode Function Goes Here
'''
data is a read of the file
Must return a python dict of values
'''
def run(data):
key = "C\x00O\x00N\x00F\x00I\x00G"
codedConfig = configExtract(data)
if codedConfig is not None:
rawConfig = rc4crypt(codedConfig, key)
#1.3.x # Not implemented yet
if len(rawConfig) == 0xe10:
config = None
#2.9.x #Not a stable extract
elif len(rawConfig) == 0x1390 or len(rawConfig) == 0x1392:
config = v29(rawConfig)
#3.1 & 3.2
elif len(rawConfig) == 0x5Cc:
config = v32(rawConfig)
#3.5
elif len(rawConfig) == 0x7f0:
config = v35(rawConfig)
else:
config = None
return config
else:
print '[-] Coded config not found'
sys.exit()
#Helper Functions Go Here
def rc4crypt(data, key): # modified for bad implemented key length
x = 0
box = range(256)
for i in range(256):
x = (x + box[i] + ord(key[i % 6])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = []
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256]))
return ''.join(out)
def configExtract(rawData):
try:
pe = pefile.PE(data=rawData)
try:
rt_string_idx = [
entry.id for entry in
pe.DIRECTORY_ENTRY_RESOURCE.entries].index(pefile.RESOURCE_TYPE['RT_RCDATA'])
except ValueError, e:
return None
except AttributeError, e:
return None
rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx]
for entry in rt_string_directory.directory.entries:
if str(entry.name) == "XTREME":
data_rva = entry.directory.entries[0].data.struct.OffsetToData
size = entry.directory.entries[0].data.struct.Size
data = pe.get_memory_mapped_image()[data_rva:data_rva+size]
return data
except:
return None
def v29(rawConfig):
dict = {}
dict["ID"] = getUnicodeString(rawConfig, 0x9e0)
dict["Group"] = getUnicodeString(rawConfig, 0xa5a)
dict["Version"] = getUnicodeString(rawConfig, 0xf2e) # use this to recalc offsets
dict["Mutex"] = getUnicodeString(rawConfig, 0xfaa)
dict["Install Dir"] = getUnicodeString(rawConfig, 0xb50)
dict["Install Name"] = getUnicodeString(rawConfig, 0xad6)
dict["HKLM"] = getUnicodeString(rawConfig, 0xc4f)
dict["HKCU"] = getUnicodeString(rawConfig, 0xcc8)
dict["Custom Reg Key"] = getUnicodeString(rawConfig, 0xdc0)
dict["Custom Reg Name"] = getUnicodeString(rawConfig, 0xe3a)
dict["Custom Reg Value"] = getUnicodeString(rawConfig, 0xa82)
dict["ActiveX Key"] = getUnicodeString(rawConfig, 0xd42)
dict["Injection"] = getUnicodeString(rawConfig, 0xbd2)
dict["FTP Server"] = getUnicodeString(rawConfig, 0x111c)
dict["FTP UserName"] = getUnicodeString(rawConfig, 0x1210)
dict["FTP Password"] = getUnicodeString(rawConfig, 0x128a)
dict["FTP Folder"] = getUnicodeString(rawConfig, 0x1196)
dict["Domain1"] = str(getUnicodeString(rawConfig, 0x50)+":"+str(unpack("<I",rawConfig[0:4])[0]))
dict["Domain2"] = str(getUnicodeString(rawConfig, 0xca)+":"+str(unpack("<I",rawConfig[4:8])[0]))
dict["Domain3"] = str(getUnicodeString(rawConfig, 0x144)+":"+str(unpack("<I",rawConfig[8:12])[0]))
dict["Domain4"] = str(getUnicodeString(rawConfig, 0x1be)+":"+str(unpack("<I",rawConfig[12:16])[0]))
dict["Domain5"] = str(getUnicodeString(rawConfig, 0x238)+":"+str(unpack("<I",rawConfig[16:20])[0]))
dict["Domain6"] = str(getUnicodeString(rawConfig, 0x2b2)+":"+str(unpack("<I",rawConfig[20:24])[0]))
dict["Domain7"] = str(getUnicodeString(rawConfig, 0x32c)+":"+str(unpack("<I",rawConfig[24:28])[0]))
dict["Domain8"] = str(getUnicodeString(rawConfig, 0x3a6)+":"+str(unpack("<I",rawConfig[28:32])[0]))
dict["Domain9"] = str(getUnicodeString(rawConfig, 0x420)+":"+str(unpack("<I",rawConfig[32:36])[0]))
dict["Domain10"] = str(getUnicodeString(rawConfig, 0x49a)+":"+str(unpack("<I",rawConfig[36:40])[0]))
dict["Domain11"] = str(getUnicodeString(rawConfig, 0x514)+":"+str(unpack("<I",rawConfig[40:44])[0]))
dict["Domain12"] = str(getUnicodeString(rawConfig, 0x58e)+":"+str(unpack("<I",rawConfig[44:48])[0]))
dict["Domain13"] = str(getUnicodeString(rawConfig, 0x608)+":"+str(unpack("<I",rawConfig[48:52])[0]))
dict["Domain14"] = str(getUnicodeString(rawConfig, 0x682)+":"+str(unpack("<I",rawConfig[52:56])[0]))
dict["Domain15"] = str(getUnicodeString(rawConfig, 0x6fc)+":"+str(unpack("<I",rawConfig[56:60])[0]))
dict["Domain16"] = str(getUnicodeString(rawConfig, 0x776)+":"+str(unpack("<I",rawConfig[60:64])[0]))
dict["Domain17"] = str(getUnicodeString(rawConfig, 0x7f0)+":"+str(unpack("<I",rawConfig[64:68])[0]))
dict["Domain18"] = str(getUnicodeString(rawConfig, 0x86a)+":"+str(unpack("<I",rawConfig[68:72])[0]))
dict["Domain19"] = str(getUnicodeString(rawConfig, 0x8e4)+":"+str(unpack("<I",rawConfig[72:76])[0]))
dict["Domain20"] = str(getUnicodeString(rawConfig, 0x95e)+":"+str(unpack("<I",rawConfig[76:80])[0]))
return dict
def v32(rawConfig):
dict = {}
dict["ID"] = getUnicodeString(rawConfig, 0x1b4)
dict["Group"] = getUnicodeString(rawConfig, 0x1ca)
dict["Version"] = getUnicodeString(rawConfig, 0x2bc)
dict["Mutex"] = getUnicodeString(rawConfig, 0x2d4)
dict["Install Dir"] = getUnicodeString(rawConfig, 0x1f8)
dict["Install Name"] = getUnicodeString(rawConfig, 0x1e2)
dict["HKLM"] = getUnicodeString(rawConfig, 0x23a)
dict["HKCU"] = getUnicodeString(rawConfig, 0x250)
dict["ActiveX Key"] = getUnicodeString(rawConfig, 0x266)
dict["Injection"] = getUnicodeString(rawConfig, 0x216)
dict["FTP Server"] = getUnicodeString(rawConfig, 0x35e)
dict["FTP UserName"] = getUnicodeString(rawConfig, 0x402)
dict["FTP Password"] = getUnicodeString(rawConfig, 0x454)
dict["FTP Folder"] = getUnicodeString(rawConfig, 0x3b0)
dict["Domain1"] = str(getUnicodeString(rawConfig, 0x14)+":"+str(unpack("<I",rawConfig[0:4])[0]))
dict["Domain2"] = str(getUnicodeString(rawConfig, 0x66)+":"+str(unpack("<I",rawConfig[4:8])[0]))
dict["Domain3"] = str(getUnicodeString(rawConfig, 0xb8)+":"+str(unpack("<I",rawConfig[8:12])[0]))
dict["Domain4"] = str(getUnicodeString(rawConfig, 0x10a)+":"+str(unpack("<I",rawConfig[12:16])[0]))
dict["Domain5"] = str(getUnicodeString(rawConfig, 0x15c)+":"+str(unpack("<I",rawConfig[16:20])[0]))
dict["Msg Box Title"] = getUnicodeString(rawConfig, 0x50c)
dict["Msg Box Text"] = getUnicodeString(rawConfig, 0x522)
return dict
def v35(rawConfig):
dict = {}
dict["ID"] = getUnicodeString(rawConfig, 0x1b4)
dict["Group"] = getUnicodeString(rawConfig, 0x1ca)
dict["Version"] = getUnicodeString(rawConfig, 0x2d8)
dict["Mutex"] = getUnicodeString(rawConfig, 0x2f0)
dict["Install Dir"] = getUnicodeString(rawConfig, 0x1f8)
dict["Install Name"] = getUnicodeString(rawConfig, 0x1e2)
dict["HKLM"] = getUnicodeString(rawConfig, 0x23a)
dict["HKCU"] = getUnicodeString(rawConfig, 0x250)
dict["ActiveX Key"] = getUnicodeString(rawConfig, 0x266)
dict["Injection"] = getUnicodeString(rawConfig, 0x216)
dict["FTP Server
|
"] = getUnicodeString(rawConfig, 0x380)
dict["FTP UserName"] = getUnicodeString(rawConfig, 0x422)
dict["FTP Password"] = getUnicodeString(rawConfig, 0x476)
dict["FTP Folder"] = getUnicodeString(rawCon
|
fig, 0x3d2)
dict["Domain1"] = str(getUnicodeString(rawConfig, 0x14)+":"+str(unpack("<I",rawConfig[0:4])[0]))
dict["Domain2"] = str(getUnicodeString(rawConfig, 0x66)+":"+str(unpack("<I",rawConfig[4:8])[0]))
dict["Domain3"] = str(getUnicodeString(rawConfig, 0xb8)+":"+str(unpack("<I",rawConfig[8:12])[0]))
dict["Domain4"] = str(getUnicodeString(rawConfig, 0x10a)+":"+str(unpack("<I",rawConfig[12:16])[
|
ABcDexter/cython
|
Cython/Compiler/Optimize.py
|
Python
|
apache-2.0
| 169,504
| 0.004501
|
from __future__ import absolute_import
from . import TypeSlots
from .ExprNodes import not_a_constant
import cython
cython.declare(UtilityCode=object, EncodedString=object, BytesLiteral=object,
Nodes=object, ExprNodes=object, PyrexTypes=object, Builtin=object,
UtilNodes=object)
from . import Nodes
from . import ExprNodes
from . import PyrexTypes
from . import Visitor
from . import Builtin
from . import UtilNodes
from . import Options
from .Code import UtilityCode
from .StringEncoding import EncodedString, BytesLiteral
from .Errors import error
from .ParseTreeTransforms import SkipDeclarations
import copy
import codecs
try:
from __builtin__ import reduce
except ImportError:
from functools import reduce
try:
from __builtin__ import basestring
except ImportError:
basestring = str # Python 3
def load_c_utility(name):
return UtilityCode.load_cached(name, "Optimize.c")
def unwrap_coerced_node(node, coercion_nodes=(ExprNodes.CoerceToPyTypeNode, ExprNodes.CoerceFromPyTypeNode)):
if isinstance(node, coercion_nodes):
return node.arg
return node
def unwrap_node(node):
while isinstance(node, UtilNodes.
|
ResultRefNode):
node = node.expression
return node
def is_common_value(a, b):
a = unwrap_node(a)
b = unwrap_node(b)
if isinstance(a, ExprNodes.NameNode) and isinstance(b, ExprNodes.NameNo
|
de):
return a.name == b.name
if isinstance(a, ExprNodes.AttributeNode) and isinstance(b, ExprNodes.AttributeNode):
return not a.is_py_attr and is_common_value(a.obj, b.obj) and a.attribute == b.attribute
return False
def filter_none_node(node):
if node is not None and node.constant_result is None:
return None
return node
class IterationTransform(Visitor.EnvTransform):
"""Transform some common for-in loop patterns into efficient C loops:
- for-in-dict loop becomes a while loop calling PyDict_Next()
- for-in-enumerate is replaced by an external counter variable
- for-in-range loop becomes a plain C for loop
"""
def visit_PrimaryCmpNode(self, node):
if node.is_ptr_contains():
# for t in operand2:
# if operand1 == t:
# res = True
# break
# else:
# res = False
pos = node.pos
result_ref = UtilNodes.ResultRefNode(node)
if isinstance(node.operand2, ExprNodes.IndexNode):
base_type = node.operand2.base.type.base_type
else:
base_type = node.operand2.type.base_type
target_handle = UtilNodes.TempHandle(base_type)
target = target_handle.ref(pos)
cmp_node = ExprNodes.PrimaryCmpNode(
pos, operator=u'==', operand1=node.operand1, operand2=target)
if_body = Nodes.StatListNode(
pos,
stats = [Nodes.SingleAssignmentNode(pos, lhs=result_ref, rhs=ExprNodes.BoolNode(pos, value=1)),
Nodes.BreakStatNode(pos)])
if_node = Nodes.IfStatNode(
pos,
if_clauses=[Nodes.IfClauseNode(pos, condition=cmp_node, body=if_body)],
else_clause=None)
for_loop = UtilNodes.TempsBlockNode(
pos,
temps = [target_handle],
body = Nodes.ForInStatNode(
pos,
target=target,
iterator=ExprNodes.IteratorNode(node.operand2.pos, sequence=node.operand2),
body=if_node,
else_clause=Nodes.SingleAssignmentNode(pos, lhs=result_ref, rhs=ExprNodes.BoolNode(pos, value=0))))
for_loop = for_loop.analyse_expressions(self.current_env())
for_loop = self.visit(for_loop)
new_node = UtilNodes.TempResultFromStatNode(result_ref, for_loop)
if node.operator == 'not_in':
new_node = ExprNodes.NotNode(pos, operand=new_node)
return new_node
else:
self.visitchildren(node)
return node
def visit_ForInStatNode(self, node):
self.visitchildren(node)
return self._optimise_for_loop(node, node.iterator.sequence)
def _optimise_for_loop(self, node, iterator, reversed=False):
if iterator.type is Builtin.dict_type:
# like iterating over dict.keys()
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_dict_iteration(
node, dict_obj=iterator, method=None, keys=True, values=False)
# C array (slice) iteration?
if iterator.type.is_ptr or iterator.type.is_array:
return self._transform_carray_iteration(node, iterator, reversed=reversed)
if iterator.type is Builtin.bytes_type:
return self._transform_bytes_iteration(node, iterator, reversed=reversed)
if iterator.type is Builtin.unicode_type:
return self._transform_unicode_iteration(node, iterator, reversed=reversed)
# the rest is based on function calls
if not isinstance(iterator, ExprNodes.SimpleCallNode):
return node
if iterator.args is None:
arg_count = iterator.arg_tuple and len(iterator.arg_tuple.args) or 0
else:
arg_count = len(iterator.args)
if arg_count and iterator.self is not None:
arg_count -= 1
function = iterator.function
# dict iteration?
if function.is_attribute and not reversed and not arg_count:
base_obj = iterator.self or function.obj
method = function.attribute
# in Py3, items() is equivalent to Py2's iteritems()
is_safe_iter = self.global_scope().context.language_level >= 3
if not is_safe_iter and method in ('keys', 'values', 'items'):
# try to reduce this to the corresponding .iter*() methods
if isinstance(base_obj, ExprNodes.SimpleCallNode):
inner_function = base_obj.function
if (inner_function.is_name and inner_function.name == 'dict'
and inner_function.entry
and inner_function.entry.is_builtin):
# e.g. dict(something).items() => safe to use .iter*()
is_safe_iter = True
keys = values = False
if method == 'iterkeys' or (is_safe_iter and method == 'keys'):
keys = True
elif method == 'itervalues' or (is_safe_iter and method == 'values'):
values = True
elif method == 'iteritems' or (is_safe_iter and method == 'items'):
keys = values = True
if keys or values:
return self._transform_dict_iteration(
node, base_obj, method, keys, values)
# enumerate/reversed ?
if iterator.self is None and function.is_name and \
function.entry and function.entry.is_builtin:
if function.name == 'enumerate':
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_enumerate_iteration(node, iterator)
elif function.name == 'reversed':
if reversed:
# CPython raises an error here: not a sequence
return node
return self._transform_reversed_iteration(node, iterator)
# range() iteration?
if Options.convert_range and node.target.type.is_int:
if iterator.self is None and function.is_name and \
function.entry and function.entry.is_builtin and \
function.name in ('range', 'xrange'):
return self._transform_range_iteration(node, iterator, reversed=reversed)
return node
def _transform_reversed_iteration(self, node, reversed_function):
args = reversed_function.arg_tuple.args
if
|
bitmazk/django-event-rsvp
|
event_rsvp/views.py
|
Python
|
mit
| 8,144
| 0.000491
|
"""Views for the ``event_rsvp`` app."""
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponseRedirect
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.generic import (
CreateView,
DeleteView,
DetailView,
ListView,
UpdateView,
)
from .forms import EventForm, GuestForm
from .models import Event, Guest
from .signals import post_guest_create
#--------#
# Mixins #
#--------#
class StaffMixin(object):
"""Mixin to let only staff member pass."""
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
if not request.user.is_staff:
raise Http404
return super(StaffMixin, self).dispatch(request, *args, **kwargs)
class EventViewMixin(object):
"""Mixin to handle event-specific options."""
model = Event
form_class = EventForm
def get_form_kwargs(self):
kwargs = super(EventViewMixin, self).get_form_kwargs()
kwargs.update({'created_by': self.request.user})
return kwargs
def get_success_url(self):
return reverse('rsvp_event_staff')
class EventSecurityMixin(object):
"""Mixin to handle event-specific security options."""
def dispatch(self, request, *args, **kwargs):
self.kwargs = kwargs
self.object = self.get_object()
date = self.object.start
# Check the right starting date within the slug
if (date.year != int(kwargs.get('year'))
or date.month != int(kwargs.get('month'))
or date.day != int(kwargs.get('day'))):
redirect_url = getattr(self.object, 'get_{0}_url'.format(
self.url_mode))
return HttpResponseRedirect(redirect_url())
return super(EventSecurityMixin, self).dispatch(request, *args,
**kwargs)
class GuestViewMixin(object):
"""Mixin to handle guest-specific functions."""
model = Guest
form_class = GuestForm
def dispatch(self, request, *args, **kwargs):
try:
self.event = Event.objects.get(slug=kwargs.get('event_slug'))
except Event.DoesNotExist:
raise Http404
return super(GuestViewMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(GuestViewMixin, self).get_context_data(**kwargs)
context.update({'event': self.event, 'user': self.request.user})
if (self.request.user.is_authenticated()
or self.event.allow_anonymous_rsvp):
context.update({'permission_to_book': True})
return context
def get_form_kwargs(self):
kwargs = super(GuestViewMixin, self).get_form_kwargs()
kwargs.update({'event': self.event, 'user': self.request.user})
return kwargs
def get_success_url(self):
return self.event.get_absolute_url()
class GuestSecurityMixin(object):
"""Mixin to handle guest-specific security options."""
def get_object(self, *args, **kwargs):
obj = super(GuestSecurityMixin, self).get_object(*args, **kwargs)
if obj.event != self.event:
raise Http404
return obj
#--------#
# Views #
#--------#
class EventListView(ListView):
"""List view to display upcoming events."""
def get_queryset(self):
return Event.objects.filter(start__gt=timezone.now(),
is_published=True)
def get_context_data(self, **kwargs):
context = super(EventListView, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
context.update({
'my_participations': self.request.user.guest_set.all()})
return context
class EventDetailView(EventSecurityMixin, EventViewMixin, DetailView):
"""Detail view to display information of an event."""
url_mode = 'absolute'
def dispatch(self, request, *args, **kwargs):
|
self.kwargs = kwargs
self.object = self.get_object()
if not self.object.is_published and not request.user.is_staff:
raise Http404
return super(EventDetailView, self).dispatch(request, *args, **kwargs)
class EventCreateView(StaffMixin, EventViewMixin, CreateVi
|
ew):
"""Create view to handle information of an event."""
pass
class EventUpdateView(StaffMixin, EventSecurityMixin, EventViewMixin,
UpdateView):
"""Update view to handle information of an event."""
url_mode = 'update'
class EventDeleteView(StaffMixin, EventSecurityMixin, EventViewMixin,
DeleteView):
"""Delete view to remove the relevant event."""
url_mode = 'delete'
class EventCreateFromTemplateView(StaffMixin, EventViewMixin, CreateView):
"""Create view to create information of an event from a template."""
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
try:
# Check if it's really a template
self.template = Event.objects.get(pk=kwargs.get('pk'),
template_name__gt='')
except Event.DoesNotExist:
raise Http404
return super(EventCreateFromTemplateView, self).dispatch(
request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(EventCreateFromTemplateView, self).get_form_kwargs()
kwargs.update({'instance': self.template,
'create_from_template': True})
return kwargs
class StaffDashboardView(StaffMixin, ListView):
"""View to display event related functions and lists."""
model = Event
template_name = 'event_rsvp/staff_dashboard.html'
def get_context_data(self, **kwargs):
context = super(StaffDashboardView, self).get_context_data(**kwargs)
templates = self.object_list.exclude(template_name__exact='')
self.object_list = self.object_list.filter(template_name__exact='')
context.update({
'upcoming': self.object_list.filter(start__gt=timezone.now()),
'current': self.object_list.filter(start__lte=timezone.now(),
end__gte=timezone.now()),
'past': self.object_list.filter(end__lt=timezone.now()),
'templates': templates,
})
return context
class GuestDetailView(StaffMixin, GuestSecurityMixin, GuestViewMixin,
DetailView):
"""View to display guest related functions and lists."""
pass
class GuestCreateView(GuestViewMixin, CreateView):
"""Create view to add a guest to an event."""
def form_valid(self, form):
resp = super(GuestCreateView, self).form_valid(form)
post_guest_create.send(
sender=self, request=self.request, user=form.user,
event=form.event)
return resp
def get_form_kwargs(self):
kwargs = super(GuestCreateView, self).get_form_kwargs()
if self.request.user.is_authenticated():
kwargs.update({'initial': {
'name': self.request.user.get_full_name(),
'email': self.request.user.email}})
return kwargs
class GuestUpdateView(GuestSecurityMixin, GuestViewMixin, UpdateView):
"""Update view to handle a guest."""
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
try:
self.event = Event.objects.get(slug=kwargs.get('event_slug'))
except Event.DoesNotExist:
raise Http404
self.kwargs = kwargs
self.object = self.get_object()
if (not request.user.is_staff and not self.object.user
and not self.object.user == request.user):
raise Http404
return super(GuestViewMixin, self).dispatch(request, *args, **kwargs)
class GuestDeleteView(StaffMixin, GuestViewMixin, GuestSecurityMixin,
DeleteView):
"""Delete view to remove the relevant guest."""
pass
|
elliotpeele/pyramid_oauth2_provider
|
pyramid_oauth2_provider/models.py
|
Python
|
mit
| 6,539
| 0
|
#
# Copyright (c) Elliot Peele <elliot@bentlogic.net>
#
# This program is distributed under the terms of the MIT License as found
# in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/mit-license.php.
#
# This program is distributed in the hope that it will be useful, but
# without any warrenty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the MIT License for full details.
#
import time
from datetime import datetime
from base64 import b64decode
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Binary
from sqlalchemy import String
from sqlalchemy import Integer
from sqlalchemy import Boolean
from sqlalchemy import DateTime
from sqlalchemy import Unicode
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import synonym
from zope.sqlalchemy import ZopeTransactionExtension
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from cryptography.hazmat.backends import default_backend
from .util import oauth2_settings
from .generators import gen_token
from .generators import gen_client_id
from .generators import gen_client_secret
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
backend = default_backend()
class Oauth2Client(Base):
__tablename__ = 'oauth2_provider_clients'
id = Column(Integer, primary_key=True)
client_id = Column(Unicode(64), unique=True, nullable=False)
_client_secret = Column(Binary(255), nullable=False)
revoked = Column(Boolean, default=False)
revocation_date = Column(DateTime)
_salt = None
def __init__(self, salt=None):
self._salt = salt
self.client_id = gen_client_id()
self.client_secret = gen_client_secret()
def new_client_secret(self):
secret = gen_client_secret()
self.client_secret = secret
return secret
def _get_client_secret(self):
return self._client_secret
def _set_client_secret(self, client_secret):
if self._salt:
salt = b64decode(self._salt.encode('utf-8'))
else:
try:
if not oauth2_settings('salt'):
raise ValueError(
'oauth2_provider.salt configuration required.'
)
salt = b64decode(oauth2_settings('salt').encode('utf-8'))
except AttributeError:
return
kdf = Scrypt(
salt=salt,
length=64,
n=2 ** 14,
r=8,
p=1,
backend=backend
)
try:
client_secret = bytes(client_secret, 'utf-8')
except Ty
|
peError:
pass
self._client_secret = kdf.derive(client_secret)
client_secret = synonym('_client_secret', descriptor=property(
_get_client_secret, _set_client_secret))
def revoke(self):
self.revoked = True
self.revocation_date = datetime.utcnow()
def isRevoked(self):
return self.revoked
class
|
Oauth2RedirectUri(Base):
__tablename__ = 'oauth2_provider_redirect_uris'
id = Column(Integer, primary_key=True)
uri = Column(Unicode(256), unique=True, nullable=False)
client_id = Column(Integer, ForeignKey(Oauth2Client.id))
client = relationship(Oauth2Client, backref=backref('redirect_uris'))
def __init__(self, client, uri):
self.client = client
self.uri = uri
class Oauth2Code(Base):
__tablename__ = 'oauth2_provider_codes'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, nullable=False)
authcode = Column(Unicode(64), unique=True, nullable=False)
expires_in = Column(Integer, nullable=False, default=10*60)
revoked = Column(Boolean, default=False)
revocation_date = Column(DateTime)
creation_date = Column(DateTime, default=datetime.utcnow)
client_id = Column(Integer, ForeignKey(Oauth2Client.id))
client = relationship(Oauth2Client, backref=backref('authcode'))
def __init__(self, client, user_id):
self.client = client
self.user_id = user_id
self.authcode = gen_token(self.client)
def revoke(self):
self.revoked = True
self.revocation_date = datetime.utcnow()
def isRevoked(self):
expiry = time.mktime(self.create_date.timetuple()) + self.expires_in
if datetime.frometimestamp(expiry) < datetime.utcnow():
self.revoke()
return self.revoked
class Oauth2Token(Base):
__tablename__ = 'oauth2_provider_tokens'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, nullable=False)
access_token = Column(Unicode(64), unique=True, nullable=False)
refresh_token = Column(Unicode(64), unique=True, nullable=False)
expires_in = Column(Integer, nullable=False, default=60*60)
revoked = Column(Boolean, default=False)
revocation_date = Column(DateTime)
creation_date = Column(DateTime, default=datetime.utcnow)
client_id = Column(Integer, ForeignKey(Oauth2Client.id))
client = relationship(Oauth2Client, backref=backref('tokens'))
def __init__(self, client, user_id):
self.client = client
self.user_id = user_id
self.access_token = gen_token(self.client)
self.refresh_token = gen_token(self.client)
def revoke(self):
self.revoked = True
self.revocation_date = datetime.utcnow()
def isRevoked(self):
expiry = time.mktime(self.creation_date.timetuple()) + self.expires_in
if datetime.fromtimestamp(expiry) < datetime.utcnow():
self.revoke()
return self.revoked
def refresh(self):
"""
Generate a new token for this client.
"""
cls = self.__class__
self.revoke()
return cls(self.client, self.user_id)
def asJSON(self, **kwargs):
token = {
'access_token': self.access_token,
'refresh_token': self.refresh_token,
'user_id': self.user_id,
'expires_in': self.expires_in,
}
kwargs.update(token)
return kwargs
def initialize_sql(engine, settings):
DBSession.configure(bind=engine)
Base.metadata.bind = engine
Base.metadata.create_all(engine)
|
dustinvtran/bayesrl
|
bayesrl/environments/chainworld.py
|
Python
|
mit
| 1,441
| 0.002082
|
import numpy as np
from ..utils import check_random_state
class ChainWorld(object):
def __init__(self, left_length, left_reward, right_length, right_reward, on_chain_reward, p_return_to_start, random_state=None):
self.left_length = left_length
self.left_reward = left_reward
self.right_length = right_length
self.right_rewar
|
d = right_reward
self.on_chain_reward = on_chain_reward
self.p_return_to_start = p_return_to_start
self.num_states = self.left_length + self.right_length + 1
self.num_actions = 2
self.random_state = check_random_state(random_state)
self.reset()
def reset(self):
self.state = self.left_length
def observe(self):
return self.state
|
def is_terminal(self, state):
return state == 0 or state == self.num_states - 1
def perform_action(self, action):
if self.p_return_to_start and self.random_state.rand() < self.p_return_to_start:
self.reset()
elif action == 0:
self.state -= 1
else:
self.state += 1
if self.state == 0:
reward = self.left_reward
elif self.state == self.num_states - 1:
reward = self.right_reward
else:
reward = self.on_chain_reward
return self.observe(), reward
def get_max_reward(self):
return max(self.left_reward, self.right_reward)
|
obi-two/Rebelion
|
data/scripts/templates/object/building/poi/shared_tatooine_hutt_assassin_camp_large1.py
|
Python
|
mit
| 465
| 0.047312
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE L
|
OST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_tatooine_hutt_assassin_camp_large1.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END
|
MODIFICATIONS ####
return result
|
wmvanvliet/mne-python
|
mne/preprocessing/__init__.py
|
Python
|
bsd-3-clause
| 1,572
| 0
|
"""Preprocessing with artifact detection, SSP, and ICA."""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Matti Hämäläinen <msh@nmr.mgh.harvard.edu>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD-3-Clause
from .annotate_amplitude import annotate_amplitude
from .flat import annotate_flat
from .maxfilter import apply_maxfilter
from .ssp import compute_proj_ecg, compute_proj_eog
from .eog import find_eog_events, create_eog_epochs
from .ecg import find_ecg_events, create_ecg_epochs
from .ica import (ICA, ica_find_eog_events, ica_find_ecg_events,
get_score_funcs, read_ica, c
|
orrmap, read_ica_eeglab)
from .otp import oversampled_temp
|
oral_projection
from ._peak_finder import peak_finder
from .infomax_ import infomax
from .stim import fix_stim_artifact
from .maxwell import (maxwell_filter, find_bad_channels_maxwell,
compute_maxwell_basis)
from .realign import realign_raw
from .xdawn import Xdawn
from ._csd import compute_current_source_density
from . import nirs
from .artifact_detection import (annotate_movement, compute_average_dev_head_t,
annotate_muscle_zscore, annotate_break)
from ._regress import regress_artifact
from ._fine_cal import (compute_fine_calibration, read_fine_calibration,
write_fine_calibration)
from .annotate_nan import annotate_nan
from .interpolate import equalize_bads
from . import ieeg
from ._css import cortical_signal_suppression
|
fancl20/pili-python
|
pili/client.py
|
Python
|
mit
| 163
| 0.006135
|
from .hub import H
|
ub
class Client(object):
|
def __init__(self, mac):
self.__mac__ = mac
def hub(self, hub):
return Hub(self.__mac__, hub)
|
Sanji-IO/sanji-firmware
|
tests/test_e2e/view_firmware.py
|
Python
|
gpl-2.0
| 4,216
| 0
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import logging
from time import sleep
from sanji.core import Sanji
from sanji.connection.mqtt import Mqtt
REQ_RESOURCE = "/system/firmware"
class View(Sanji):
# This function will be executed after registered.
def run(self):
for count in xrange(0, 100, 1):
# Normal CRUD Operation
# self.publish.[get, put, delete, post](...)
# One-to-One Messaging
# self.publish.direct.[get, put, delete, post](...)
# (if block=True return Message, else return mqtt mid number)
# Agruments
# (resource[, data=None, block=True, timeout=60])
# case 1: test GET
print "GET %s" % REQ_RESOURCE
res = self.publish.get(REQ_RESOURCE)
if res.code != 200:
print "GET is supported, code 200 is expected"
print res.to_json()
self.stop()
# case 2: test PUT with no data
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, None)
if res.code != 400:
print "data is required, code 400 is expected"
print res.to_json()
self.stop()
# case 3: test PUT with empty data (no required attributes)
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={})
if res.code != 400:
print "data.reset, data.server, or data.upgrade is required," \
" code 400 is expected"
print res.to_json()
self.stop()
# case 4: test PUT with reset=0
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"reset": 0})
if res.code != 200:
print "data.reset=0 should reply code 200"
print res.to_json()
self.stop()
# case 5: test PUT with reset=1 (setdef)
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"reset": 1})
if res.code != 200:
print "data.reset=1 should reply code 200 and cause setdef"
print res.to_json()
# case 6: test PUT with server="something"
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE,
|
data={"server": "test.server"})
if res.code != 200:
print "data.reset=0 should reply code 200"
print res.to_json()
self.stop()
print "GET %s" % REQ_RESOURCE
res = self.publish.get(REQ_RESOURCE)
if res.code != 200:
print "GET is supported, code 200 is expected"
print res.to_json()
|
self.stop()
elif "test.server" != res.data["server"]:
print "PUT failed, server (%s) should be \"test.server\"" \
% res.data["server"]
self.stop()
# case 7: test PUT with upgrade=0
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"upgrade": 0})
if res.code != 200:
print "data.upgrade=0 should reply code 200"
print res.to_json()
self.stop()
# case 8: test PUT with upgrade=1 (upgradehfm)
sleep(2)
print "PUT %s" % REQ_RESOURCE
res = self.publish.put(REQ_RESOURCE, data={"upgrade": 1})
if res.code != 200:
print "data.upgrade=1 should reply code 200 and cause" \
"upgradehfm"
print res.to_json()
# stop the test view
self.stop()
if __name__ == "__main__":
FORMAT = "%(asctime)s - %(levelname)s - %(lineno)s - %(message)s"
logging.basicConfig(level=0, format=FORMAT)
logger = logging.getLogger("Firmware")
view = View(connection=Mqtt())
view.start()
|
chemelnucfin/tensorflow
|
tensorflow/python/autograph/impl/api_test.py
|
Python
|
apache-2.0
| 30,339
| 0.008141
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for api module.""
|
"
from __future__ import absolute_import
from __future__ import division
from __future_
|
_ import print_function
import collections
import functools
import gc
import imp
import os
import re
import textwrap
import types
import numpy as np
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.util import function_utils
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
tf = utils.fake_tf()
global_n = 2
class TestResource(object):
def __init__(self):
self.x = 3
class ApiTest(test.TestCase):
@test_util.run_deprecated_v1
def test_decorator_recursive(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_decorator_not_recursive(self):
class TestClass(object):
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=False)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
@test_util.run_deprecated_v1
def test_convert_then_do_not_convert(self):
class TestClass(object):
@api.do_not_convert
def called_member(self, a):
return tf.negative(a)
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant((2, 4)), constant_op.constant(1),
constant_op.constant(-2))
self.assertAllEqual((0, 1), self.evaluate(x))
@test_util.run_deprecated_v1
def test_decorator_calls_decorated(self):
class TestClass(object):
@api.convert()
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= self.called_member(a)
return x
tc = TestClass()
with self.cached_session() as sess:
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_decorator_preserves_argspec(self):
class TestClass(object):
def test_method(self, a):
if a < 0:
a = -a
return a
test_method_converted = api.convert()(test_method)
tc = TestClass()
self.assertListEqual(
list(tf_inspect.getfullargspec(tc.test_method)),
list(tf_inspect.getfullargspec(tc.test_method_converted)))
def test_do_not_convert_argspec(self):
class TestClass(object):
def test_method(self, x, y):
z = x + y
return z
test_method_whitelisted = api.do_not_convert(test_method)
tc = TestClass()
self.assertTrue(tf_inspect.ismethod(tc.test_method_whitelisted))
# Because the wrapped function is not generated, we can't preserve its
# arg spec.
self.assertEqual((),
tuple(function_utils.fn_args(tc.test_method_whitelisted)))
def test_do_not_convert_callable_object(self):
class TestClass(object):
def __call__(self):
return 1
tc = TestClass()
self.assertEqual(1, api.do_not_convert(tc)())
@test_util.run_deprecated_v1
def test_convert_call_site_decorator(self):
class TestClass(object):
def called_member(self, a):
if a < 0:
a = -a
return a
@api.convert(recursive=True)
def test_method(self, x, s, a):
while tf.reduce_sum(x) > s:
x //= api.converted_call(self.called_member,
converter.ConversionOptions(recursive=True),
(a,), {})
return x
tc = TestClass()
x = tc.test_method(
constant_op.constant([2, 4]), constant_op.constant(1),
constant_op.constant(-2))
self.assertListEqual([0, 1], self.evaluate(x).tolist())
def test_converted_call_builtin(self):
x = api.converted_call(range, converter.ConversionOptions(recursive=True),
(3,), {})
self.assertEqual((0, 1, 2), tuple(x))
x = api.converted_call(re.compile,
converter.ConversionOptions(recursive=True),
('mnas_v4_a.*\\/.*(weights|kernel):0$',), {})
self.assertIsNotNone(x.match('mnas_v4_a/weights:0'))
def test_converted_call_function(self):
def test_fn(x):
if x < 0:
return -x
return x
x = api.converted_call(test_fn, converter.ConversionOptions(recursive=True),
(constant_op.constant(-1),), {})
self.assertEqual(1, self.evaluate(x))
@test_util.run_v1_only('b/120545219')
def test_converted_call_functools_partial(self):
def test_fn(x, y, z):
if x < 0:
return -x, -y, -z
return x, y, z
x = api.converted_call(
functools.partial(test_fn, constant_op.constant(-1), z=-3),
converter.ConversionOptions(recursive=True),
(constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
x = api.converted_call(
functools.partial(
functools.partial(test_fn, constant_op.constant(-1)), z=-3),
converter.ConversionOptions(recursive=True),
(constant_op.constant(-2),), {})
self.assertEqual((1, 2, 3), self.evaluate(x))
def test_converted_call_method(self):
class TestClass(object):
def __init__(self, x):
self.x = x
def test_method(self):
if self.x < 0:
return -self.x
return self.x
tc = TestClass(constant_op.constant(-1))
x = api.converted_call(tc.test_method,
converter.ConversionOptions(recursive=True), (), {})
self.assertEqual(1, self.evaluate(x))
def test_converted_call_synthetic_method(self):
class TestClass(object):
def __init__(self, x):
|
conan-io/conan
|
conans/test/unittests/tools/gnu/autotools_test.py
|
Python
|
mit
| 857
| 0.002334
|
import os
from conan.tools.files.files import save_toolchain_args
from conan.tools.gnu import Autotools
from conans.test.utils.mocks import ConanFileMock
from conans.test.utils.test_files import temp_folder
def test_source_folder_works():
folder = temp_folder()
os.chdir(folder)
save_toolchain_args({
"configure_args": "-foo bar",
"make_args": ""}
)
conanfile = ConanFileMock()
conanfile.folders.set_base_install(folder)
sources = "/path/to/sources"
conanfile.folders.set_base_source(sources)
autotools = Autot
|
ools(conanfile)
autotools.configure(build_script_folder="subfolder")
assert conanfile.com
|
mand.replace("\\", "/") == '"/path/to/sources/subfolder/configure" -foo bar'
autotools.configure()
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/configure" -foo bar'
|
remybaranx/qtaste
|
Testbeds/ControlScripts/do_nothing.py
|
Python
|
gpl-3.0
| 507
| 0.027613
|
from controlscript import *
print "This is a simple control script. It just does nothing and exits successfully."
print "Start parameter is %s, additional parameters are %s" % (star
|
t, arguments)
class DoNothing(ControlAction):
""" Control script action for exiting with error 1 on stop """
def __init__(self):
ControlAction.__init__(self, "Do nothing")
def start(self):
print "Do nothing on start"
print
|
def stop(self):
print "Do nothing on stop"
print
ControlScript([
DoNothing()
])
|
kuzeko/Twitter-Importer
|
twitter_helper/twitter_data.py
|
Python
|
mit
| 8,560
| 0.002921
|
import re
import Queue
import HTMLParser
import dateutil.parser as parser
class TwitterData:
#Twitter Datum properties
tweet_fields_list = ['id', 'user_id', 'in_reply_to_status_id', 'in_reply_to_user_id', 'favorited', 'retweeted', 'retweet_count', 'lang', 'created_at']
tweet_text_fields_list = ['tweet_id', 'user_id', 'text', 'geo_lat', 'geo_long', 'place_full_name', 'place_id']
tweet_url_fields_list = ['tweet_id', 'user_id', 'progressive', 'url']
tweet_hashtag_fields_list = ['tweet_id', 'user_id', 'hashtag_id']
user_fields_list = ['id', 'screen_name', 'name', 'verified', 'protected', 'followers_count', 'friends_count', 'statuses_count', 'favourites_count', 'location', 'utc_offset', 'time_zone', 'geo_enabled', 'lang', 'description', 'url', 'created_at']
#Utils for cleaning
highpoints = re.compile(u'[\U00010000-\U0010ffff]')
alphanum = re.compile(u'^[\w]+$')
def __init__(self, buffer_size):
#Queue of Twitter data records
self.tweets_queue = Queue.Queue(buffer_size)
self.tweet_texts_queue = Queue.Queue(buffer_size)
self.users_queue = Queue.Queue(buffer_size)
self.hashtags_queue = Queue.Queue()
self.urls_queue = Queue.Queue()
def contains_fields(self, data_array, fields_list, skip_list=[]):
for field in fields_list:
if not field in data_array and not field in skip_list:
return False
return True
def parse_tweet_basic_infos(self, tweet, tweet_fields_list):
tweet_record = []
user_data = tweet['user']
user_id = user_data['id']
for field in tweet_fields_list:
if field == 'user_id':
tweet_record.append(user_id)
elif field == 'created_at':
datetime = parser.parse(tweet['created_at'])
datetime = datetime.isoformat(' ')[:-6]
tweet_record.append(datetime)
elif field in tweet:
if not tweet[field]:
value = 0
else:
value = tweet[field]
tweet_record.append(value)
return tweet_record
def parse_tweet_text_infos(self, tweet, tweet_text_fields_list):
tweet_text_record = []
user_data = tweet['user']
user_id = user_data['id']
html_parser = HTMLParser.HTMLParser()
for field in tweet_text_fields_list:
if field == 'tweet_id':
tweet_text_record.append(tweet['id'])
elif field == 'user_id':
tweet_text_record.append(user_id)
elif field == 'text':
if not tweet['text']:
value = ''
else:
value = tweet['text'].strip()
value = self.highpoints.sub(u'', value)
value = html_parser.unescape(value)
tweet_text_record.append(value)
elif field == 'geo_lat':
if not tweet['coordinates']:
tweet_text_record.append(0)
else:
tweet_text_record.append(tweet['coordinates']['coordinates'][0])
elif field == 'geo_long':
if not tweet['coordinates']:
tweet_text_record.append(0)
else:
tweet_text_record.append(t
|
weet['coordinates']['coordinates'][1])
elif field == 'place_full_name':
if not tweet['place']:
value = ''
else:
value = tweet['place']['full_name'].strip()
value = self.highpoints.sub(u'', value)
value = html_parser.unescape(value)
|
tweet_text_record.append(value)
elif field == 'place_id':
# http://api.twitter.com/1/geo/id/6b9ed4869788d40e.json
if not tweet['place']:
tweet_text_record.append('')
else:
tweet_text_record.append(tweet['place']['id'])
elif field in tweet:
if not tweet[field]:
value = 0
else:
value = tweet[field]
tweet_text_record.append(value)
return tweet_text_record
def parse_user_infos(self, user_data, user_fields_list):
user_record = []
#user_id = user_data['id']
html_parser = HTMLParser.HTMLParser()
for field in user_fields_list:
if field == 'created_at':
datetime = parser.parse(user_data['created_at'])
datetime = datetime.isoformat(' ')[:-6]
user_record.append(datetime)
elif field == 'lang':
if not user_data['lang']:
value = 'NN'
else:
#TODO: lang codes are longer than 2
value = user_data['lang'][:2]
user_record.append(value)
elif field == 'utc_offset':
if not user_data['utc_offset']:
user_record.append(0)
else:
user_record.append(user_data['utc_offset'])
elif field == 'url':
if not user_data['url']:
user_record.append('')
else:
value = user_data['url'][:159]
user_record.append(value)
elif field in ['description', 'name', 'location']:
if not user_data[field]:
value = ''
else:
value = user_data[field].strip()
value = self.highpoints.sub(u'', value)
value = html_parser.unescape(value)
user_record.append(value)
elif field in ['followers_count', 'friends_count', 'statuses_count', 'favourites_count']:
value = user_data[field]
if value is None or value < 0:
return None
user_record.append(value)
elif field in ['verified', 'protected', 'geo_enabled']:
user_record.append(user_data[field])
elif field in user_data:
if not user_data[field]:
value = ''
else:
value = user_data[field]
user_record.append(value)
return user_record
def enqueue_tweet_data(self, tweet):
tweet_record = []
tweet_text_record = []
user_record = []
user_data = tweet['user']
user_id = user_data['id']
tweet_record = self.parse_tweet_basic_infos(tweet, self.tweet_fields_list)
if tweet_record is None:
#logger.error("Problem parsing tweet {0} ".format(tweet['id']))
return False
tweet_text_record = self.parse_tweet_text_infos(tweet, self.tweet_text_fields_list)
if tweet_text_record is None:
#logger.error("Problem parsing text for tweet {0} ".format(tweet['id']))
return False
user_record = self.parse_user_infos(user_data, self.user_fields_list)
if user_record is None:
# logger.info("Problem parsing user {0} for tweet {1} ".format(user_id, tweet['id']))
return False
#Enqueue
self.tweets_queue.put(tweet_record)
self.tweet_texts_queue.put(tweet_text_record)
self.users_queue.put(user_record)
#To avoid duplicates
tweet_inserted_hashtags = []
if len(tweet['entities']) > 0:
if len(tweet['entities']['urls']) > 0:
url_count = 0
for url in tweet['entities']['urls']:
url_count = url_count + 1
self.urls_queue.put([tweet['id'], user_id, url_count, url['expanded_url']])
if len(tweet['entities']['hashtags']) > 0:
for hash in tweet['entities']['hashtags']:
hash_text = self.highpoints.sub(u'', hash['text'])
hash_text = hash_text.lower()
|
pydicom/sendit
|
sendit/celery.py
|
Python
|
mit
| 512
| 0.001953
|
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
from sendit.settings import (
INSTALLED_APPS,
BROKER_URL
)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sendit.settings')
app =
|
Celery('sendit',broker=BROKER_URL)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: I
|
NSTALLED_APPS)
|
jammon/gemeinde
|
gottesdienste/models.py
|
Python
|
mit
| 2,564
| 0.007419
|
# encoding: utf-8
from django.db import models
from gemeinde import settings
from mezzanine.generic.fields import KeywordsField
from mezzanine.pages.models import Page
from pytz import timezone
# from django.utils.timezone import activate
# activate()
class Gottesdienst(models.Model):
u"""Repräsentiert einen Gottesdienst an einem Tag
Besonderheiten:
Abendmahl, Taufen, Kirchencafe, Glaubensgespräch
"""
datum = models.DateTimeField()
dauer = models.IntegerField('Dauer in Minuten', default=60)
titel = models.CharField(max_length=200, blank=True)
prediger = models.CharField(max_length=50, blank=True,
help_text='Kann leer bleiben. '
'Wenn Prediger-Auswahl leer bleibt, hier Prediger eintragen.')
prediger_key = models.ForeignKey('Prediger', blank=True, null=True,
verbose_name="Prediger-Auswahl")
keywords = KeywordsField(verbose_name=u"Stichwörter")
freitext = models.TextField(null=True, blank=True)
predigttext_stelle = models.CharField("Bibelstelle des Predigttexts",
max_length=50, blank=True)
predigttext = models.TextField(null=True, blank=True)
ort = models.CharField('Ort (wenn nicht Thomaskirche)', max_length=50,
blank=True)
class Meta:
verbose_name = u'Gottesdienst'
verbose_name_plural = u'Gottesdienste'
get_latest_by = 'datum'
def __unicode__(self):
return (u'{} am {:%d.%m.%Y um %H:%M} Uhr'
.format(self.titel or u'Gottesdiens
|
t',
self.datum.astimezone(timezone(settings.TIME_ZONE))))
def save(self, *args, **kwargs):
if self.prediger_key:
self.prediger = unicode(self.prediger_key)
super(Gottesdienst, self).save(*args, **kwargs)
def prediger_name(self):
return (self.prediger_key.nachname if self.prediger_
|
key
else self.prediger)
class Prediger(models.Model):
"""Ein Prediger, der Gottesdienste feiert"""
nachname = models.CharField(max_length=50)
vorname = models.CharField(max_length=50, blank=True)
titel = models.CharField(max_length=10, blank=True)
class Meta:
verbose_name = u'Prediger'
verbose_name_plural = u'Prediger'
def __unicode__(self):
return " ".join([p for p in
(self.titel, self.vorname, self.nachname) if p])
class Gottesdienste(Page):
class Meta:
verbose_name = 'Gottesdienst-Seite'
verbose_name_plural = 'Gottesdienst-Seiten'
# def __unicode__(self):
# pass
|
bfirsh/pytest_django
|
tests/views.py
|
Python
|
bsd-3-clause
| 270
| 0.003704
|
from django.http import HttpResponse
from django.template import Template
d
|
ef admin_required_view(request):
if request.user.is_staff:
return HttpResponse(Template('You are an admin').render({}))
return HttpResponse(Template('Access denied').render(
|
{}))
|
NeoBelerophon/Arietta.GPIO
|
test/test.py
|
Python
|
mit
| 18,250
| 0.003671
|
#!/usr/bin/env python
"""
Copyright (c) 2013-2014 Ben Croston
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""This test suite assumes the following circuit is connected:
GND_PIN = 6
LED_PIN = 12 (with resistor to 0v)
SWITCH_PIN = 18 (with 0.1 uF capacitor around switch) to 0v
LOOP_IN = 16 connected with 1K resistor to LOOP_OUT
LOOP_OUT = 22
"""
import sys
import warnings
import time
from threading import Timer
import RPi.GPIO as GPIO
if sys.version[:3] == '2.6':
import unittest2 as unittest
else:
import unittest
GND_PIN = 6
LED_PIN = 12
LED_PIN_BCM = 18
SWITCH_PIN = 18
LOOP_IN = 16
LOOP_OUT = 22
# Test starts with 'AAA' so that it is run first
class TestAAASetup(unittest.TestCase):
def runTest(self):
# Test mode not set (BOARD or BCM) exception
with self.assertRaises(RuntimeError) as e:
GPIO.setup(LED_PIN, GPIO.OUT)
self.assertEqual(str(e.exception), 'Please set pin numbering mode using GPIO.setmode(GPIO.BOARD) or GPIO.setmode(GPIO.BCM)')
GPIO.setmode(GPIO.BOARD)
# Test not set as OUTPUT message
with self.assertRaises(RuntimeError) as e:
GPIO.output(LED_PIN, GPIO.HIGH)
self.assertEqual(str(e.exception), 'The GPIO channel has not been set up as an OUTPUT')
GPIO.
|
setup(LED_PIN, GPIO.IN)
# Test setup(..., pull_up_down=GPIO.HIGH) raises exception
with self.assertRaises(ValueError):
GPIO.setup(LED_PIN, GPIO.IN, pull_up_down=GPIO.HIGH)
# Test 'already in use' warning
GPIO.cleanup()
with open('/sys/class/gpio/export','wb') as f:
f.write(str(LED_PIN_BCM).encode())
|
with open('/sys/class/gpio/gpio%s/direction'%LED_PIN_BCM,'wb') as f:
f.write(b'out')
with open('/sys/class/gpio/gpio%s/value'%LED_PIN_BCM,'wb') as f:
f.write(b'1')
with warnings.catch_warnings(record=True) as w:
GPIO.setup(LED_PIN, GPIO.OUT) # generate 'already in use' warning
self.assertEqual(w[0].category, RuntimeWarning)
with open('/sys/class/gpio/unexport','wb') as f:
f.write(str(LED_PIN_BCM).encode())
GPIO.cleanup()
# test initial value of high reads back as high
GPIO.setup(LED_PIN, GPIO.OUT, initial=GPIO.HIGH)
self.assertEqual(GPIO.input(LED_PIN), GPIO.HIGH)
GPIO.cleanup()
# test initial value of low reads back as low
GPIO.setup(LED_PIN, GPIO.OUT, initial=GPIO.LOW)
self.assertEqual(GPIO.input(LED_PIN), GPIO.LOW)
GPIO.cleanup()
class TestInputOutput(unittest.TestCase):
def test_outputread(self):
"""Test that an output() can be input()"""
GPIO.setup(LED_PIN, GPIO.OUT)
GPIO.output(LED_PIN, GPIO.HIGH)
self.assertEqual(GPIO.input(LED_PIN), GPIO.HIGH)
GPIO.output(LED_PIN, GPIO.LOW)
self.assertEqual(GPIO.input(LED_PIN), GPIO.LOW)
GPIO.cleanup()
def test_loopback(self):
"""Test output loops back to another input"""
GPIO.setup(LOOP_IN, GPIO.IN, pull_up_down=GPIO.PUD_OFF)
GPIO.setup(LOOP_OUT, GPIO.OUT, initial=GPIO.LOW)
self.assertEqual(GPIO.input(LOOP_IN), GPIO.LOW)
GPIO.output(LOOP_OUT, GPIO.HIGH)
self.assertEqual(GPIO.input(LOOP_IN), GPIO.HIGH)
GPIO.cleanup()
def test_output_on_input(self):
"""Test output() can not be done on input"""
GPIO.setup(SWITCH_PIN, GPIO.IN)
with self.assertRaises(RuntimeError):
GPIO.output(SWITCH_PIN, GPIO.LOW)
GPIO.cleanup()
class TestSoftPWM(unittest.TestCase):
def runTest(self):
GPIO.setup(LED_PIN, GPIO.OUT)
pwm = GPIO.PWM(LED_PIN, 50)
pwm.start(100)
print "\nPWM tests"
response = raw_input('Is the LED on (y/n) ? ').upper()
self.assertEqual(response,'Y')
pwm.start(0)
response = raw_input('Is the LED off (y/n) ? ').upper()
self.assertEqual(response,'Y')
print "LED Brighten/fade test..."
for i in range(0,3):
for x in range(0,101,5):
pwm.ChangeDutyCycle(x)
time.sleep(0.1)
for x in range(100,-1,-5):
pwm.ChangeDutyCycle(x)
time.sleep(0.1)
pwm.stop()
response = raw_input('Did it work (y/n) ? ').upper()
self.assertEqual(response,'Y')
GPIO.cleanup()
class TestSetWarnings(unittest.TestCase):
def test_alreadyinuse(self):
"""Test 'already in use' warning"""
GPIO.setwarnings(False)
with open('/sys/class/gpio/export','wb') as f:
f.write(str(LED_PIN_BCM).encode())
with open('/sys/class/gpio/gpio%s/direction'%LED_PIN_BCM,'wb') as f:
f.write(b'out')
with open('/sys/class/gpio/gpio%s/value'%LED_PIN_BCM,'wb') as f:
f.write(b'1')
with warnings.catch_warnings(record=True) as w:
GPIO.setup(LED_PIN, GPIO.OUT) # generate 'already in use' warning
self.assertEqual(len(w),0) # should be no warnings
with open('/sys/class/gpio/unexport','wb') as f:
f.write(str(LED_PIN_BCM).encode())
GPIO.cleanup()
GPIO.setwarnings(True)
with open('/sys/class/gpio/export','wb') as f:
f.write(str(LED_PIN_BCM).encode())
with open('/sys/class/gpio/gpio%s/direction'%LED_PIN_BCM,'wb') as f:
f.write(b'out')
with open('/sys/class/gpio/gpio%s/value'%LED_PIN_BCM,'wb') as f:
f.write(b'1')
with warnings.catch_warnings(record=True) as w:
GPIO.setup(LED_PIN, GPIO.OUT) # generate 'already in use' warning
self.assertEqual(w[0].category, RuntimeWarning)
with open('/sys/class/gpio/unexport','wb') as f:
f.write(str(LED_PIN_BCM).encode())
GPIO.cleanup()
def test_cleanupwarning(self):
"""Test initial GPIO.cleanup() produces warning"""
GPIO.setwarnings(False)
GPIO.setup(SWITCH_PIN, GPIO.IN)
with warnings.catch_warnings(record=True) as w:
GPIO.cleanup()
self.assertEqual(len(w),0) # no warnings
GPIO.cleanup()
self.assertEqual(len(w),0) # no warnings
GPIO.setwarnings(True)
GPIO.setup(SWITCH_PIN, GPIO.IN)
with warnings.catch_warnings(record=True) as w:
GPIO.cleanup()
self.assertEqual(len(w),0) # no warnings
GPIO.cleanup()
self.assertEqual(w[0].category, RuntimeWarning) # a warning
class TestVersions(unittest.TestCase):
def test_rpi_revision(self):
if GPIO.RPI_REVISION == 0:
revision = 'Compute Module'
elif GPIO.RPI_REVISION == 1:
revision = 'revision 1'
elif GPIO.RPI_REVISION == 2:
revision = 'revision 2'
elif GPIO.RPI_REVISION == 3:
revision = 'Model B+'
else:
revision = '**undetected**'
response = raw_input('\nThis board appears to be a %s - is this correct (y/n) ? '%revision).upper()
self.assertEqual(response, 'Y')
def test_gpio_version(self):
response = raw_input('\nRPi.GPI
|
mjumbewu/django-model-blocks
|
setup.py
|
Python
|
bsd-3-clause
| 1,497
| 0.008684
|
from setuptools import setup, find_packages
model_blocks = __import__('model_blocks')
readme_file = 'README.rst'
try:
long_description = open(readme_file).read()
except IOError, err:
sys.stderr.write("[ERROR] Cannot find file specified as "
"``long_description`` (%s)\n" % readme_file)
sys.exit(1)
setup(name='django-model-blocks',
version='0.8.9',
description=('Simple filters and tags for generic Django '
'model template partials'),
long_description=long_description,
zip_safe=False,
author='Mjumbe Wawatu Ukweli',
author_email='mjumbewu@kwawatu.com',
url='https://github.com/mjumbewu/django-model-blocks/',
download_url='https://github.com/mjumbewu/django-model-blocks/downloads',
packages = find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
install_requires = [
'Django>=1.2.1',
],
obsoletes = [
'model_filters',
],
provides = [
'model_blocks',
],
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
|
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD
|
License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
)
|
svenfraeys/sftoolbox
|
sftoolboxqt/editor.py
|
Python
|
mit
| 4,569
| 0
|
"""gui systems to manage actions
"""
import os
from sftoolbox.content import ActionContent, PanelContent
from sftoolboxqt import qtgui, qtcore
from sftoolboxqt.tree import PanelsModel, PanelsTreeWidget
class ActionsTreeWidget(qtgui.QTreeWidget):
"""tree widget holding actions
"""
def startDrag(self, dropAction):
# create mime data object
mime = qtcore.QMimeData()
mime.setData('application/x-item', '???')
# start drag
drag = qtgui.QDrag(self)
drag.setMimeData(mime)
# drag.start(qtcore.Qt.CopyAction)
# drag.start(qtcore.Qt.CopyAction)
drag.exec_(dropAction, qtcore.Qt.MoveAction)
class PanelsWidget(qtgui.QWidget):
"""browser for panels
"""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(PanelsWidget, self).__init__(parent=parent)
self.setWindowTitle('Panels Browser')
self._project = project
self._tree_model = PanelsModel(project)
self._tree = self._create_panels_tree_widget(self._tree_model)
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree)
self.setLayout(layout)
def _create_panels_tree_widget(self, model):
"""return tree widget that will contain the actions
"""
tree = PanelsTreeWidget()
tree.setModel(model)
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
tree.setAcceptDrops(True)
return tree
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._tree_model.project = value
class ActionsWidget(qtgui.QWidget):
"""browser system for browsing trough the actions
"""
def _create_actions_tree_widget(self):
"""return tree widget that will contain the actions
"""
tree = ActionsTreeWidget()
tree.setHeaderLabels(['Action', 'IDName', 'Tags'])
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
return tree
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(ActionsWidget, self).__init__(parent=parent)
self.setWindowTitle('Actions Browser')
self._project = project
self._tree_widget = self._create_actions_tree_widget()
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree_widget)
self.setLayout(layout)
layout.addWidget(self._tree_widget)
self._refresh_content()
@property
def project(self):
return self._project
@project.setter
def project(self, value):
|
self._project = value
self._refresh_content()
def _handle_item_double_clicked(self, item):
"""handle doubleclicking item
"""
item.action.run()
def _refresh_content(self):
"""refresh the content
"""
|
self._tree_widget.clear()
self._tree_widget.itemDoubleClicked.connect(
self._handle_item_double_clicked)
if not self.project:
return
for action in self.project.actions:
item = qtgui.QTreeWidgetItem()
icon_filepath = action.absolute_icon_filepath
if icon_filepath and os.path.exists(icon_filepath):
item.setIcon(0, qtgui.QIcon(icon_filepath))
item.setText(0, action.human_label)
item.setText(1, action.idname)
item.setText(2, ', '.join(map(str, action.tags)))
item.action = action
self._tree_widget.addTopLevelItem(item)
class EditorWidget(qtgui.QWidget):
""""""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(EditorWidget, self).__init__(parent=parent)
self.setWindowTitle('Editor')
self._actions_widget = ActionsWidget(project)
self._panels_widget = PanelsWidget(project)
layout = qtgui.QHBoxLayout()
splitter = qtgui.QSplitter(qtcore.Qt.Horizontal)
splitter.addWidget(self._panels_widget)
splitter.addWidget(self._actions_widget)
layout.addWidget(splitter)
self.setLayout(layout)
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._actions_widget.project = value
self._panels_widget.project = value
def sizeHint(self):
return qtcore.QSize(900, 800)
|
OniOni/ril
|
wsgi.py
|
Python
|
apache-2.0
| 37
| 0
|
from
|
app im
|
port setup
app = setup()
|
kkris/wheatley
|
strategies.py
|
Python
|
bsd-3-clause
| 10,513
| 0.003044
|
"""
strategies
~~~~~~~~~~
Various strategies.
:copyright: (c) 2013 by Matthias Hummel and Kristoffer Kleine.
:license: BSD, see LICENSE for more details.
"""
from graph import (State, make_walkable, split_into_extended_islands,
split_into_subgraphs, make_flooded)
def get_direction(current, target):
x = target.x - current.x
y = target.y - current.y
if x == -1:
return 'WEST'
elif x == 1:
return 'EAST'
elif y == -1:
return 'NORTH'
elif y == 1:
return 'SOUTH'
else:
return 'CURRENT'
class Strategy(object):
def __init__(self, debug=False, round_=0):
self.actions = []
self.position = (0, 0)
self.floodlevel = 0
self.extended_islands = None
self.debug = debug
self.round_ = round_
def do(self, cmd, direction, x=None, y=None):
self.actions.append((cmd + ' ' + direction, x, y))
if cmd == 'GO':
self.position = (x, y)
def commit(self):
actions = self.actions
self.actions = []
return actions
def get_actions(self, graph, position):
raise NotImplementedError()
def split_graph_into_extended_islands(self, graph):
self.extended_islands = split_into_extended_islands(graph)
def dry_one_if_possible(self, graph):
def _set_dry_state(node):
graph.get_node(node.x, node.y).state = State.redry
for island in self.extended_islands:
n = island.get_node(node.x, node.y)
if n is not None:
n.state = State.redry
current_node = graph.get_node(*self.position)
if current_node.state == State.flooded:
self.do('DRY', 'CURRENT', current_node.x, current_node.y)
_set_dry_state(current_node)
return True
for node in current_node.neighbors:
if node.state == State.flooded:
direction = get_direction(current_node, node)
self.do('DRY', direction, node.x, node.y)
_set_dry_state(node)
return True
return False
def go(self, graph, start, target):
next_node = graph.get_next_node_on_path_to(start, target)
if next_node is not None:
direction = get_direction(start, next_node)
self.do('GO', direction, next_node.x, next_node.y)
def find_target(self, graph):
current_node = graph.get_node(*self.position)
target_island = None
for island in self.extended_islands:
other_node = graph.get_node(island.nodes[0].x, island.nodes[0].y)
if not graph.is_reachable(current_node, other_node):
continue
if target_island is None:
target_island = island
else:
if island.calculate_island_value() > target_island.calculate_island_value():
target_island = island
target = target_island.get_middle()
return graph.get_node(target.x, target.y)
class DryMaxStrategy(Strategy):
"""
Dries as many fields as possible
"""
def get_actions(self, graph, position):
self.position = position
current_node = graph.get_node(*self.position)
while len(self.actions) < 3 and self.dry_one_if_possible(graph):
pass
while len(self.actions) < 3:
if len(current_node.neighbors) == 0:
self.do('GO', 'CURRENT')
|
continue
next_node = min(current_node.neighbors, key=lambda n: n.distance_to_flooded)
direction = get_direction(current_node, next_node)
self.do('GO', direction)
current_node = next_node
return self.commit()
class MetaStrategy(Strategy):
"""
Eval
|
uates the current situation and chooses the right strategy
"""
def evaluate_mode(self, walkable, position):
current_node = walkable.get_node(*position)
target_island = None
for island in self.extended_islands:
other_node = walkable.get_node(island.nodes[0].x, island.nodes[0].y)
if not walkable.is_reachable(current_node, other_node): continue
if target_island is None:
target_island = island
elif island.calculate_island_value() > target_island.calculate_island_value():
target_island = island
if target_island is None:
return 'DRYMAX'
if target_island.get_node(*position) is not None:
return 'FARMING'
return 'MOVING'
def get_actions(self, graph, position):
walkable = make_walkable(graph)
self.split_graph_into_extended_islands(walkable)
mode = self.evaluate_mode(walkable, position)
if mode == 'MOVING':
strategy = MovingStrategy(self.debug, self.round_)
elif mode == 'FARMING':
strategy = FarmingStrategy(self.debug, self.round_)
elif mode == 'DRYMAX':
strategy = DryMaxStrategy(self.debug, self.round_)
strategy.extended_islands = self.extended_islands
return strategy.get_actions(walkable, position), mode
class MovingStrategy(Strategy):
"""
This Strategy moves the bot towards more safe places on the island while
drying fields on its way there.
"""
def get_actions(self, graph, position):
self.position = position
moved = False
while len(self.actions) < 2 or (moved and len(self.actions) < 3):
if not self.dry_one_if_possible(graph):
moved = True
current_node = graph.get_node(*self.position)
target = self.find_target(graph)
self.go(graph, current_node, target)
if len(self.actions) < 3:
current_node = graph.get_node(*self.position)
target = self.find_target(graph)
if target == current_node: # we already are at our destination, go towards water
graph.calculate_distance_to_flooded()
target = min(current_node.neighbors, key=lambda n: n.distance_to_flooded)
self.go(graph, current_node, target)
else:
self.go(graph, current_node, target)
return self.commit()
class FarmingStrategy(Strategy):
"""
This Strategy aims to dry as many fields as possible.
"""
def get_nearest_node(self, walkable, island, current_node):
"""
Returns the node of the island that is nearest to our current position
"""
x, y = current_node.x, current_node.y
nearest_node = (None, -1)
for node in island.nodes:
if abs(node.x - x) + abs(node.y - y) > 10:
continue
node = walkable.get_node(node.x, node.y)
if nearest_node[0] is None:
distance = walkable.get_distance_between(current_node, node)
nearest_node = (node, distance)
else:
distance = walkable.get_distance_between(current_node, node)
if distance < nearest_node[1]:
nearest_node = (node, distance)
return nearest_node
def calculate_distance_value(self, walkable, island, current_node):
"""
Calculates a value of an island consisting of the normal island value
minus the distance times 4. This way islands which are near by get a
better overall value
"""
nearest_node, distance = self.get_nearest_node(walkable, island, current_node)
self.nearest_nodes[island] = nearest_node
if nearest_node is None:
distance = 1000
return island.calculate_island_value() - (distance*4)
def get_best_island(self, walkable, current_node):
"""
Returns a flooded island (which is not really an island) which has the
most fields that can be dried, but is not too far away.
"""
best_island = (None, -1)
for island in self.flooded_islands:
node = walkable.get_node
|
jrcapriles/armSimulator
|
ButtonTest.py
|
Python
|
mit
| 1,443
| 0.028413
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 26 23:01:44 2014
@author: Jose Capriles
"""
import pygame, Buttons
from pygame.locals import *
#Initialize pygame
pygame.init()
class Button_Test:
def __init__(self):
self.loopFlag = True
self.main()
#Create a display
def display(self):
self.screen = pygame.display.set_mode((650,370),0,32)
pygame.display.set_caption("Button.py TEST")
#Update the display and show the button
def update_display(self):
self.screen.fill((30,144,255))
self.Button1.update()
pygame.display.flip()
|
#Run the loop
def main(self):
self.display()
self.Button1 = Buttons.Button(self.screen, color = (107,142,35), x = 225, y = 135, length = 200, height = 100, width = 0, text ="Button Test", t
|
ext_color = (255,255,255), font_size=25, fade_on = False)
while self.loopFlag:
self.update_display()
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.loopFlag = False
if event.type == KEYDOWN:
self.loopFlag=False
if event.type == MOUSEBUTTONDOWN:
if self.Button1.pressed(pygame.mouse.get_pos()):
print "Test Passed!"
if __name__ == '__main__':
obj = Button_Test()
|
MOA-2011/enigma2-plugin-extensions-openwebif
|
plugin/controllers/models/config.py
|
Python
|
gpl-2.0
| 7,243
| 0.034102
|
# -*- coding: utf-8 -*-
from enigma import eEnv
from Components.SystemInfo import SystemInfo
from Components.config import config
from os import path, listdir
import xml.etree.cElementTree
from Plugins.Extensions.OpenWebif.__init__ import _
def addCollapsedMenu(name):
tags = config.OpenWebif.webcache.collapsedmenus.value.split("|")
if name not in tags:
tags.append(name)
config.OpenWebif.webcache.collapsedmenus.value = "|".join(tags).strip("|")
config.OpenWebif.webcache.collapsedmenus.save()
return {
"result": True
}
def removeCollapsedMenu(name):
tags = config.OpenWebif.webcache.collapsedmenus.value.split("|")
if name in tags:
tags.remove(name)
config.OpenWebif.webcache.collapsedmenus.value = "|".join(tags).strip("|")
config.OpenWebif.webcache.collapsedmenus.save()
return {
"result": True
}
def getCollapsedMenus():
return {
"result": True,
"collapsed": config.OpenWebif.webcache.collapsedmenus.value.split("|")
}
def setRemoteGrabScreenshot(value):
config.OpenWebif.web
|
cache.remotegrabscreenshot.value = value
config.OpenWebif.webcache.remotegrabscreenshot.save()
return {
"result": True
}
def getRemoteGrabScreenshot():
return {
"result": True,
"remotegrabscreenshot": config.OpenWebif.webcache.remotegrabscreenshot.value
}
def setZapStream(value):
config.OpenWebif.webcache.zapstr
|
eam.value = value
config.OpenWebif.webcache.zapstream.save()
return {
"result": True
}
def getZapStream():
return {
"result": True,
"zapstream": config.OpenWebif.webcache.zapstream.value
}
def getJsonFromConfig(cnf):
if cnf.__class__.__name__ == "ConfigSelection" or cnf.__class__.__name__ == "ConfigSelectionNumber":
if type(cnf.choices.choices) == dict:
choices = []
for choice in cnf.choices.choices:
choices.append((choice, _(cnf.choices.choices[choice])))
elif type(cnf.choices.choices[0]) == tuple:
choices = []
for choice_tuple in cnf.choices.choices:
choices.append((choice_tuple[0], _(choice_tuple[1])))
else:
choices = []
for choice in cnf.choices.choices:
choices.append((choice, _(choice)))
return {
"result": True,
"type": "select",
"choices": choices,
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigBoolean" or cnf.__class__.__name__ == "ConfigEnableDisable" or cnf.__class__.__name__ == "ConfigYesNo":
return {
"result": True,
"type": "checkbox",
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigSet":
return {
"result": True,
"type": "multicheckbox",
"choices": cnf.choices.choices,
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigNumber":
return {
"result": True,
"type": "number",
"current": cnf.value
}
elif cnf.__class__.__name__ == "ConfigInteger":
return {
"result": True,
"type": "number",
"current": cnf.value,
"limits": (cnf.limits[0][0], cnf.limits[0][1])
}
print "[OpenWebif] Unknown class ", cnf.__class__.__name__
return {
"result": False,
"type": "unknown"
}
def saveConfig(path, value):
try:
cnf = eval(path)
if cnf.__class__.__name__ == "ConfigBoolean" or cnf.__class__.__name__ == "ConfigEnableDisable" or cnf.__class__.__name__ == "ConfigYesNo":
cnf.value = value == "true"
elif cnf.__class__.__name__ == "ConfigSet":
values = cnf.value
if int(value) in values:
values.remove(int(value))
else:
values.append(int(value))
cnf.value = values
elif cnf.__class__.__name__ == "ConfigNumber":
cnf.value = int(value)
elif cnf.__class__.__name__ == "ConfigInteger":
cnf_min = int(cnf.limits[0][0])
cnf_max = int(cnf.limits[0][1])
cnf_value = int(value)
if cnf_value < cnf_min:
cnf_value = cnf_min
elif cnf_value > cnf_max:
cnf_value = cnf_max
cnf.value = cnf_value
else:
cnf.value = value
cnf.save()
except Exception, e:
print "[OpenWebif] ", e
return {
"result": False
}
return {
"result": True
}
def getConfigs(key):
configs = []
title = None
if not len(configfiles.sections):
configfiles.getConfigs()
if key in configfiles.section_config:
config_entries = configfiles.section_config[key][1]
title = configfiles.section_config[key][0]
if config_entries:
for entry in config_entries:
try:
data = getJsonFromConfig(eval(entry.text or ""))
text = _(entry.get("text", ""))
if "limits" in data:
text = "%s (%d - %d)" % (text, data["limits"][0], data["limits"][1])
configs.append({
"description": text,
"path": entry.text or "",
"data": data
})
except Exception, e:
pass
return {
"result": True,
"configs": configs,
"title": title
}
def getConfigsSections():
if not len(configfiles.sections):
configfiles.parseConfigFiles()
return {
"result": True,
"sections": configfiles.sections
}
def privSettingValues(prefix, top, result):
for (key, val) in top.items():
name = prefix + "." + key
if isinstance(val, dict):
privSettingValues(name, val, result)
elif isinstance(val, tuple):
result.append((name, val[0]))
else:
result.append((name, val))
def getSettings():
configkeyval = []
privSettingValues("config", config.saved_value, configkeyval)
return {
"result": True,
"settings": configkeyval
}
class ConfigFiles:
def __init__(self):
self.setupfiles = []
self.sections = []
self.section_config = {}
self.allowedsections = ["usage", "recording", "subtitlesetup", "autolanguagesetup", "avsetup", "harddisk", "keyboard", "timezone"]
self.getConfigFiles()
def getConfigFiles(self):
setupfiles = [eEnv.resolve('${datadir}/enigma2/setup.xml')]
locations = ('SystemPlugins', 'Extensions')
libdir = eEnv.resolve('${libdir}')
for location in locations:
plugins = listdir(('%s/enigma2/python/Plugins/%s' % (libdir,location)))
for plugin in plugins:
setupfiles.append(('%s/enigma2/python/Plugins/%s/%s/setup.xml' % (libdir, location, plugin)))
for setupfile in setupfiles:
if path.exists(setupfile):
self.setupfiles.append(setupfile)
def parseConfigFiles(self):
sections = []
for setupfile in self.setupfiles:
# print "[OpenWebif] loading configuration file :", setupfile
setupfile = file(setupfile, 'r')
setupdom = xml.etree.cElementTree.parse(setupfile)
setupfile.close()
xmldata = setupdom.getroot()
for section in xmldata.findall("setup"):
configs = []
key = section.get("key")
if key not in self.allowedsections:
showOpenWebIF = section.get("showOpenWebIF")
if showOpenWebIF == "1":
self.allowedsections.append(key)
else:
continue
# print "[OpenWebif] loading configuration section :", key
for entry in section:
if entry.tag == "item":
requires = entry.get("requires")
if requires and not SystemInfo.get(requires, False):
continue;
if int(entry.get("level", 0)) > config.usage.setup_level.index:
continue
configs.append(entry)
if len(configs):
sections.append({
"key": key,
"description": _(section.get("title"))
})
title = _(section.get("title", ""))
self.section_config[key] = (title, configs)
sections = sorted(sections, key=lambda k: k['description'])
self.sections = sections
configfiles = ConfigFiles()
|
Zex0n/django-simple-cms
|
news/urls.py
|
Python
|
mit
| 286
| 0
|
from django.conf.urls import url
from . import views
urlpat
|
terns = [
# url(r'^news/([\w-]+)$', views.ListView.as_view(), name='list'),
url(r'^news/$', views.ListV
|
iew.as_view(), name='list_all'),
url(r'^news/(?P<pk>[0-9]+)/$', views.DetailView.as_view(), name='detail'),
]
|
bwhite/picarus
|
server/jobs.py
|
Python
|
apache-2.0
| 9,515
| 0.002207
|
#!/usr/bin/env python
if __name__ == '__main__':
from gevent import monkey
monkey.patch_all()
import redis
import json
import pprint
import argparse
import mturk_vision
import base64
import uuid
import pickle
import time
import databases
from hadoop_parse import scrape_hadoop_jobs
class UnauthorizedException(Exception):
"""User is not authorized to make this call"""
class NotFoundException(Exception):
"""Task was not found"""
class Jobs(object):
def __init__(self, host, port, db, annotation_redis_host, annotation_redis_port):
self.args = (host, port, db, annotation_redis_host, annotation_redis_port)
self.redis_host = host
self.redis_port = port
self.db = redis.StrictRedis(host=host, port=port, db=db)
self._owner_prefix = 'owner:'
self._task_prefix = 'task:'
self._lock_prefix = 'lock:'
self.annotation_redis_host = annotation_redis_host
self.annotation_redis_port = annotation_redis_port
self.hadoop_completed_jobs_cache = set()
def __reduce__(self):
return (Jobs, self.args)
def add_task(self, type, owner, params, secret_params):
task = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
data = {'owner': owner, '_params': json.dumps(secret_params),
'params': json.dumps(params), 'type': type, 'startTime': str(time.time())}
if not self.db.set(self._lock_prefix + task, '', nx=True):
raise UnauthorizedException
# TODO: Do these atomically
self.db.hmset(self._task_prefix + task, data)
self.db.sadd(self._owner_prefix + owner, task)
return task
def _check_owner(self, task, owner):
if self.db.hget(self._task_prefix + task, 'owner') != owner:
raise UnauthorizedException
def _get_task_type(self, task):
out = self.db.hgetall(self._task_prefix + task)
out = self.db.hget(self._task_prefix + task, 'type')
if out is None:
raise NotFoundException
return out
def _check_type(self, task, type):
if self._get_task_type(task) != type:
raise NotFoundException
def _exists(self, task):
if not self.db.exists(self._lock_prefix + task):
raise NotFoundException
def get_task(self, task, owner):
self._exists(task)
self._check_owner(task, owner)
out = self.db.hgetall(self._task_prefix + task)
out = dict((k, v) for k, v in out.items() if not k.startswith('_'))
return out
def get_task_secret(self, task, owner):
self._exists(task)
self._check_owner(task, owner)
return json.loads(self.db.hget(self._task_prefix + task, '_params'))
def delete_task(self, task, owner, **kw):
self._exists(task)
self._check_owner(task, owner)
task_type = self._get_task_type(task)
if task_type == 'annotation':
manager = self.get_annotation_manager(task, data_connection=kw['data_connection'])
# TODO: Do these atomically
self.db.delete(self._task_prefix + task, self._lock_prefix + task)
self.db.srem(self._owner_prefix + owner, task)
if task_type == 'annotation':
manager.destroy() # TODO: MTurk specific
# TODO: For Hadoop jobs kill the task if it is running
# TODO: For worker/crawl/model jobs kill the worker process or send it a signal
def update_task(self, row, columns):
self.db.hmset(self._task_prefix + row, columns)
def update_hadoop_jobs(self, hadoop_jobtracker):
for row, columns in scrape_hadoop_jobs(hadoop_jobtracker, self.hadoop_completed_jobs_cache).items():
# NOTE: We do this at this point as a job may not exist but is finished completed/failed in hadoop
if columns.get('status', '') in ('completed', 'failed'):
self.hadoop_completed_jobs_cache.add(row)
try:
self._exists(row)
self._check_type(row, 'process')
except NotFoundException:
continue
# TODO: Need to do this atomically with the exists check
self.update_task(row, columns)
def get_tasks(self, owner):
outs = {}
for job_key in self.db.smembers(self._owner_prefix + owner):
# TODO: Error check if something gets removed while we are accumulating
task = self._task_prefix + job_key
if self.db.hget(task, 'owner') == owner:
out = self.db.hgetall(task)
out = dict((k, v) for k, v in out.items() if not k.startswith('_'))
outs[task.split(':', 1)[1]] = out
return outs
def get_annotation_manager(self, task, data_connection, sync=False):
self._exists(task)
self._check_type(task, 'annotation')
data = self.db.hgetall(self._task_prefix + task)
p = json.loads(data['params'])
ps = json.loads(data['_params'])
p['sync'] = sync
p['secret'] = str(ps['secret'])
p['redis_address'] = self.annotation_redis_host
p['redis_port'] = int(self.annotation_redis_port)
p['task_key'] = task
# TODO: Currently only compatible with thrift based datastores
if data_connection:
data_connection = data_connection._thrift
return mturk_vision.manager(data=str(ps['data']), data_connection=data_connection, **p)
def get_annotation_manager_check(self, task, owner, data_connection):
self._exists(task)
self._check_type(task, 'annotation')
self._check_owner(task, owner)
return self.get_annotation_manager(task, data_connection)
def add_work(self, front, queue, **kw):
push = self.db.lpush if front else self.db.rpush
push('queue:' + queue, pickle.dumps(kw, -1))
def get_work(self, queues, timeout=0):
out = self.db.brpop(['queue:' + x for x in queues], timeout=timeout)
if not out:
return
queue = out[0][:len('queue:')]
data = pickle.loads(out[1])
print('Processing job from [%s][%s]' % (queue, data['func']))
pprint.pprint(data['method_args'])
return queue, data
def main():
def _get_all_tasks(jobs):
outs = []
for job_key in jobs.db.keys('task:*'):
out = jobs.db.hgetall(job_key)
outs.append(out)
return outs
def _info(args, jobs):
pprint.pprint(_get_all_tasks(jobs))
def _destroy(args, jobs):
jobs.db.flushall()
def job_worker(db, func, method_args, method_kwargs):
getattr(db, func)(*method_args, **method_kwargs)
def _work(args, jobs):
if args.raven:
import raven
RAVEN = raven.Client(args.raven)
else:
RAVEN = None
import gevent_inotifyx as inotifyx
fd = inotifyx.init()
# NOTE: .git/logs/HEAD is the last thing updated after a git pull/merge
inotifyx.add_watch(fd, '../.git/logs/HEAD', inotifyx.IN_MODIFY)
inotifyx.add_watch(fd, '.reloader', inotifyx.IN_MODIFY | inotifyx.IN_ATTRIB)
db = THRIFT_CONSTRUCTOR()
while 1:
try:
work = jobs.get_work(args.queues, timeout=5)
if work:
jobs.add_work(True, 'old' + work[0], **work[1])
job_worker(db=db, **work[1])
if inotifyx.get_events(fd, 0):
print('Shutting down due to new update')
break
except:
if RAVEN:
|
RAVEN.captureException()
raise
parser = argparse.ArgumentParser(description='
|
Picarus job operations')
parser.add_argument('--redis_host', help='Redis Host', default='localhost')
parser.add_argument('--redis_port', type=int, help='Redis Port', default=6379)
parser.add_argument('--raven', help='URL to the Raven/Sentry logging server')
parser.add_argument('--annotations_redis_host', help='Annotations Host', default='localhost')
parser.add_argument('--annotations_redis_port', type=int, help='Annotations Port', def
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/shapely/geometry/point.py
|
Python
|
agpl-3.0
| 6,390
| 0.002817
|
"""Points and related utilities
"""
from ctypes import c_double
from ctypes import cast, POINTER
from shapely.coords import required
from shapely.geos import lgeos, DimensionError
from
|
shapely.geometry.base import BaseGeometry
from shapely.geometry.proxy import CachingGeometryProxy
__all__ = ['Point', 'asPoint']
class Point(BaseGeometry):
"""
A zero dimensional feature
A point has zero length and zero area.
Attributes
----------
x, y
|
, z : float
Coordinate values
Example
-------
>>> p = Point(1.0, -1.0)
>>> print p
POINT (1.0000000000000000 -1.0000000000000000)
>>> p.y
-1.0
>>> p.x
1.0
"""
def __init__(self, *args):
"""
Parameters
----------
There are 2 cases:
1) 1 parameter: this must satisfy the numpy array protocol.
2) 2 or more parameters: x, y, z : float
Easting, northing, and elevation.
"""
BaseGeometry.__init__(self)
if len(args) > 0:
self._set_coords(*args)
# Coordinate getters and setters
@property
def x(self):
"""Return x coordinate."""
return self.coords[0][0]
@property
def y(self):
"""Return y coordinate."""
return self.coords[0][1]
@property
def z(self):
"""Return z coordinate."""
if self._ndim != 3:
raise DimensionError("This point has no z coordinate.")
return self.coords[0][2]
@property
def __geo_interface__(self):
return {
'type': 'Point',
'coordinates': self.coords[0]
}
@property
def ctypes(self):
if not self._ctypes_data:
array_type = c_double * self._ndim
array = array_type()
xy = self.coords[0]
array[0] = xy[0]
array[1] = xy[1]
if self._ndim == 3:
array[2] = xy[2]
self._ctypes_data = array
return self._ctypes_data
def array_interface(self):
"""Provide the Numpy array protocol."""
ai = self.array_interface_base
ai.update({'shape': (self._ndim,)})
return ai
__array_interface__ = property(array_interface)
@property
def bounds(self):
xy = self.coords[0]
return (xy[0], xy[1], xy[0], xy[1])
# Coordinate access
def _set_coords(self, *args):
self.empty()
if len(args) == 1:
self._geom, self._ndim = geos_point_from_py(args[0])
else:
self._geom, self._ndim = geos_point_from_py(tuple(args))
coords = property(BaseGeometry._get_coords, _set_coords)
@property
def xy(self):
"""Separate arrays of X and Y coordinate values
Example:
>>> x, y = Point(0, 0).xy
>>> list(x)
[0.0]
>>> list(y)
[0.0]
"""
return self.coords.xy
class PointAdapter(CachingGeometryProxy, Point):
_owned = False
def __init__(self, context):
self.context = context
self.factory = geos_point_from_py
@property
def _ndim(self):
try:
# From array protocol
array = self.context.__array_interface__
n = array['shape'][0]
assert n == 2 or n == 3
return n
except AttributeError:
# Fall back on list
return len(self.context)
@property
def __array_interface__(self):
"""Provide the Numpy array protocol."""
try:
return self.context.__array_interface__
except AttributeError:
return self.array_interface()
_get_coords = BaseGeometry._get_coords
def _set_coords(self, ob):
raise NotImplementedError("Adapters can not modify their sources")
coords = property(_get_coords)
def asPoint(context):
"""Adapt an object to the Point interface"""
return PointAdapter(context)
def geos_point_from_py(ob, update_geom=None, update_ndim=0):
"""Create a GEOS geom from an object that is a coordinate sequence
or that provides the array interface.
Returns the GEOS geometry and the number of its dimensions.
"""
# If numpy is present, we use numpy.require to ensure that we have a
# C-continguous array that owns its data. View data will be copied.
ob = required(ob)
try:
# From array protocol
array = ob.__array_interface__
assert len(array['shape']) == 1
n = array['shape'][0]
assert n == 2 or n == 3
dz = None
da = array['data']
if type(da) == type((0,)):
cdata = da[0]
# If we had numpy, we would do
# from numpy.ctypeslib import as_ctypes
# cp = as_ctypes(ob) - check that code?
cp = cast(cdata, POINTER(c_double))
dx = c_double(cp[0])
dy = c_double(cp[1])
if n == 3:
dz = c_double(cp[2])
ndim = 3
else:
dx, dy = da[0:2]
if n == 3:
dz = da[2]
ndim = 3
except AttributeError:
# Fall back on the case of Python sequence data
# Accept either (x, y) or [(x, y)]
if type(ob[0]) == type(tuple()):
coords = ob[0]
else:
coords = ob
n = len(coords)
dx = c_double(coords[0])
dy = c_double(coords[1])
dz = None
if n == 3:
dz = c_double(coords[2])
if update_geom:
cs = lgeos.GEOSGeom_getCoordSeq(update_geom)
if n != update_ndim:
raise ValueError(
"Wrong coordinate dimensions; this geometry has dimensions: %d" \
% update_ndim)
else:
cs = lgeos.GEOSCoordSeq_create(1, n)
# Because of a bug in the GEOS C API, always set X before Y
lgeos.GEOSCoordSeq_setX(cs, 0, dx)
lgeos.GEOSCoordSeq_setY(cs, 0, dy)
if n == 3:
lgeos.GEOSCoordSeq_setZ(cs, 0, dz)
if update_geom:
return None
else:
return lgeos.GEOSGeom_createPoint(cs), n
def update_point_from_py(geom, ob):
geos_point_from_py(ob, geom._geom, geom._ndim)
# Test runner
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
|
blab/nextstrain-db
|
download_all.py
|
Python
|
agpl-3.0
| 7,407
| 0.006615
|
# Simple script to run required operations to
# 1. Download FASTAs from database
# 2. Copy FASTAs to nextflu directory
# 3. Download titer tables from database
# 4. Copy titer tables to nextflu directory
# Run from base fauna directory with python flu/download_all.py
# Assumes that nextflu/, nextflu-cdc/ and nextflu-cdc-fra/ are
# sister directories to fauna/
import os, subprocess
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--virus', default="flu", help="virus to download; default is flu")
parser.add_argument('--flu_lineages', default=["h3n2", "h1n1pdm", "vic", "yam"], nargs='+', type
|
= str, help ="seasonal flu lineages to download, options are h3n2, h1n1pdm, vic and yam")
parser.add_argument('--segments', type=str, default=['ha', 'na'], nargs='+', help="specify segment(s) to download")
parser.add_argument('--sequences', default=False, action="store_true", help="download sequences from vdb")
parser.add_argument('--titers', default=False, action="store_true", help="download titers from tdb")
parser.add_argument('--titers_s
|
ources', default=["base", "crick", "cdc", "niid", "vidrl"], nargs='+', type = str, help ="titer sources to download, options are base, cdc, crick, niid and vidrl")
parser.add_argument('--titers_passages', default=["egg", "cell"], nargs='+', type = str, help ="titer passage types to download, options are egg and cell")
def concatenate_titers(params, passage, assay):
for lineage in params.flu_lineages:
out = 'data/%s_who_%s_%s_titers.tsv'%(lineage, assay, passage)
hi_titers = []
for source in params.titers_sources:
hi_titers_file = 'data/%s_%s_%s_%s_titers.tsv'%(lineage, source, assay, passage)
if os.path.isfile(hi_titers_file):
hi_titers.append(hi_titers_file)
if len(hi_titers) > 0:
with open(out, 'w+') as f:
call = ['cat'] + hi_titers
print call
subprocess.call(call, stdout=f)
for lineage in params.flu_lineages:
out = 'data/%s_public_%s_%s_titers.tsv'%(lineage, assay, passage)
hi_titers = []
for source in ["base", "cdc"]:
hi_titers_file = 'data/%s_%s_%s_%s_titers.tsv'%(lineage, source, assay, passage)
if os.path.isfile(hi_titers_file):
hi_titers.append(hi_titers_file)
if len(hi_titers) > 0:
with open(out, 'w+') as f:
call = ['cat'] + hi_titers
print call
subprocess.call(call, stdout=f)
if __name__=="__main__":
params = parser.parse_args()
if params.virus == "flu":
# Download FASTAs from database
if params.sequences:
segments = params.segments
for segment in segments:
for lineage in params.flu_lineages:
call = "python vdb/flu_download.py -db vdb -v flu --select locus:%s lineage:seasonal_%s --fstem %s_%s --resolve_method split_passage"%(segment.upper(), lineage, lineage, segment)
print(call)
os.system(call)
if params.titers:
# download titers
for source in params.titers_sources:
if source == "base":
for lineage in params.flu_lineages:
call = "python tdb/download.py -db tdb -v flu --subtype %s --select assay_type:hi --fstem %s_base_hi_cell"%(lineage, lineage)
print(call)
os.system(call)
if source in ["cdc", "crick", "niid", "vidrl"]:
for passage in params.titers_passages:
for lineage in params.flu_lineages:
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:hi serum_passage_category:%s --fstem %s_%s_hi_%s"%(source, lineage, passage, lineage, source, passage)
print(call)
os.system(call)
lineage = 'h3n2'
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:fra serum_passage_category:%s --fstem %s_%s_fra_%s"%(source, lineage, passage, lineage, source, passage)
print(call)
os.system(call)
if source == "cdc":
for lineage in params.flu_lineages:
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:hi serum_host:human --fstem %s_%s_hi_%s_human"%(source, lineage, lineage, source, passage)
print(call)
os.system(call)
lineage = 'h3n2'
call = "python tdb/download.py -db %s_tdb -v flu --subtype %s --select assay_type:fra serum_host:human --fstem %s_%s_fra_%s_human"%(source, lineage, lineage, source, passage)
print(call)
os.system(call)
# concatenate to create default HI strain TSVs for each subtype
concatenate_titers(params, "cell", "hi")
concatenate_titers(params, "cell", "fra")
concatenate_titers(params, "egg", "hi")
concatenate_titers(params, "egg", "fra")
elif params.virus == "ebola":
call = "python vdb/ebola_download.py -db vdb -v ebola --fstem ebola"
print(call)
os.system(call)
elif params.virus == "dengue":
# Download all serotypes together.
call = "python vdb/dengue_download.py"
print(call)
os.system(call)
# Download individual serotypes.
serotypes = [1, 2, 3, 4]
for serotype in serotypes:
call = "python vdb/dengue_download.py --select serotype:%i" % serotype
print(call)
os.system(call)
# Download titers.
if params.titers:
call = "python tdb/download.py -db tdb -v dengue --fstem dengue"
print(call)
os.system(call)
elif params.virus == "zika":
call = "python vdb/zika_download.py -db vdb -v zika --fstem zika"
print(call)
os.system(call)
elif params.virus == "mumps":
call = "python vdb/mumps_download.py -db vdb -v mumps --fstem mumps --resolve_method choose_genbank"
print(call)
os.system(call)
elif params.virus == "h7n9" or params.virus == "avian":
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:PB2 --fstem h7n9_pb2")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:PB1 --fstem h7n9_pb1")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:PA --fstem h7n9_pa")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:HA --fstem h7n9_ha")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:NP --fstem h7n9_np")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:NA --fstem h7n9_na")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:MP --fstem h7n9_mp")
os.system("python vdb/h7n9_download.py -db vdb -v h7n9 --select locus:NS --fstem h7n9_ns")
else:
print("%s is an invalid virus type.\nValid viruses are flu, ebola, dengue, zika, mumps, h7n9, and avian."%(params.virus))
sys.exit(2)
|
tweemeterjop/thug
|
thug/ActiveX/modules/Kingsoft.py
|
Python
|
gpl-2.0
| 350
| 0
|
# Kingsoft An
|
tivirus
# CVE-NOMATCH
import logging
log = logging.getLogger("Thug")
def SetUninstallName(self, arg):
if len(arg) > 900:
log.ThugLogging.log_exploit_event(self._window.url,
"Kingsoft AntiVirus ActiveX",
"SetUninstallName Heap Ov
|
erflow")
|
suhe/odoo
|
addons/payroll_cancel/controllers/controllers.py
|
Python
|
gpl-3.0
| 806
| 0.003722
|
# -*- coding: utf-8 -*-
from openerp import http
# class PayrollCancel(http.Controller):
# @http.route('/payroll_cancel/payroll_cancel/', auth='publ
|
ic')
# def index(self, **kw):
# return "Hello, world"
# @http.route('/payroll_cancel/payroll_cancel/objects/', auth='public')
# def list(self, **kw)
|
:
# return http.request.render('payroll_cancel.listing', {
# 'root': '/payroll_cancel/payroll_cancel',
# 'objects': http.request.env['payroll_cancel.payroll_cancel'].search([]),
# })
# @http.route('/payroll_cancel/payroll_cancel/objects/<model("payroll_cancel.payroll_cancel"):obj>/', auth='public')
# def object(self, obj, **kw):
# return http.request.render('payroll_cancel.object', {
# 'object': obj
# })
|
diogocs1/comps
|
web/addons/note/note.py
|
Python
|
apache-2.0
| 8,900
| 0.01
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
from openerp.tools import html2plaintext
class note_stage(osv.osv):
""" Category of Note """
_name = "note.stage"
_description = "Note Stage"
_columns = {
'name': fields.char('Stage Name', translate=True, required=True),
'sequence': fields.integer('Sequence', help="Used to order the note stages"),
'user_id': fields.many2one('res.users', 'Owner', help="Owner of the note stage.", required=True, ondelete='cascade'),
'fold': fields.boolean('Folded by Default'),
}
_order = 'sequence asc'
_defaults = {
'fold': 0,
'user_id': lambda self, cr, uid, ctx: uid,
'sequence' : 1,
}
class note_tag(osv.osv):
_name = "note.tag"
_description = "Note Tag"
_columns = {
'name' : fields.char('Tag Name', required=True),
}
class note_note(osv.osv):
""" Note """
_name = 'note.note'
_inherit = ['mail.thread']
_description = "Note"
#writing method (no modification of values)
def name_create(self, cr, uid, name, context=None):
rec_id = self.create(cr, uid, {'memo': name}, context=context)
return self.name_get(cr, uid, [rec_id], context)[0]
#read the first line (convert hml into text)
def _get_note_first_line(self, cr, uid, ids, name="", args={}, context=None):
res = {}
for note in self.browse(cr, uid, ids, context=context):
res[note.id] = (note.memo and html2plaintext(note.memo) or "").strip().replace('*','').split("\n")[0]
return res
def onclick_note_is_done(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'open': False, 'date_done': fields.date.today()}, context=context)
def onclick_note_not_done(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'open': True}, context=context)
#return the default stage for the uid user
def _get_default_stage_id(self,cr,uid,context=None):
ids = self.pool.get('note.stage').search(cr,uid,[('user_id','=',uid)], context=context)
return ids and ids[0] or False
def _set_stage_per_user(self, cr, uid, id, name, value, args=None, context=None):
note = self.browse(cr, uid, id, context=context)
if not value: return False
stage_ids = [value] + [stage.id for stage in note.stage_ids if stage.user_id.id != uid ]
return self.write(cr, uid, [id], {'stage_ids': [(6, 0, set(stage_ids))]}, context=context)
def _get_stage_per_user(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for record in self.browse(cr, uid, ids, context=context):
for stage in record.stage_ids:
if stage.user_id.id == uid:
result[record.id] = stage.id
return result
_columns = {
'name': fields.function(_get_note_first_line,
string='Note Summary',
type='text', store=True),
'user_id': fields.many2one('res.users', 'Owner'),
'memo': fields.html('Note Content'),
'sequence': fields.integer('Sequence'),
'stage_id': fields.function(_get_stage_per_user,
fnct_inv=_set_stage_per_user,
string='Stage',
type='many2one',
relation='note.stage'),
'stage_ids': fields.many2many('note.stage','note_stage_rel','note_id','stage_id','Stages of Users'),
'open': fields.boolean('Active', track_visibility='onchange'),
'date_done': fields.date('Date done'),
'color': fields.integer('Color Index'),
'tag_ids' : fields.many2many('note.tag','note_tags_rel','note_id','tag_id','Tags'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx=None: uid,
'open' : 1,
'stage_id' : _get_default_stage_id,
}
_order = 'sequence'
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True):
if groupby and groupby[0]=="stage_id":
#search all stages
current_stage_ids = self.pool.get('note.stage').search(cr,uid,[('user_id','=',uid)], context=context)
if current_stage_ids: #if the user have some stages
stages = self.pool['note.stage'].browse(cr, uid, current_stage_ids, context=context)
result = [{ #notes by stage for stages user
'__context': {'group_by': groupby[1:]},
'__domain': domain + [('stage_ids.id', '=', stage.id)],
'stage_id': (stage.id, stage.name),
'stage_id_count': self.search(cr,uid, domain+[('stage_ids', '=', stage.id)], context=context, count=True),
'__fold': stage.fold,
} for stage in stages]
#note without user's stage
nb_notes_ws = self.search(cr,uid, domain+[('stage_ids', 'not in', current_stage_ids)], context=context, count=True)
if nb_notes_ws:
# add note to the first column if it's the first stage
dom_not_in = ('stage_ids', 'not in', current_stage_ids)
if result and result[0]['stage_id'][0] == current_stage_ids[0]:
dom_in = result[0]['__domain'].pop()
result[0]['__domain'] = domain + ['|', dom_in, dom_not_in]
result[0]['stage_id_count'] += nb_notes_ws
else:
# add the first stage column
result = [{
'__context': {'group_by': groupby[1:]},
'__domain': domain + [dom_not_in],
'stage_id': (stages[0].id, stages[0].name),
'stage_id_count':nb_notes_ws,
'__fold': stages[0].name,
}] + result
else: # if stage_ids is empty
#note without user's stage
nb_notes_ws = self.search(cr,uid, domain, context=context, count=True)
if nb_notes_ws:
result = [{ #notes for unknown stage
'__context': {'group_by': groupby[1:]},
'__domain': domain,
'stage_id': False,
'stage_id_count':nb_notes_ws
}]
else:
result = []
return result
else:
return super(note_note, self).read_group(self, cr, uid, domain, fields, groupby,
offset=offset, limit=limit, context=context, orderby=orderby,lazy=l
|
azy)
#upgrade config setting page to configure pad, fancy and tags mode
class note_base_config_settings(osv.osv_memory):
_inherit = 'base.config.settings'
_columns = {
|
'module_note_pad': fields.boolean('Use collaborative pads (etherpad)'),
'group_note_fancy': fields.boolean('Use fancy layouts for notes', implied_group='note.group_note_fancy'),
}
class res_users(osv.Model):
_name = 'res.users'
_inherit = ['
|
macarthur-lab/seqr
|
reference_data/migrations/0008_geneexpression.py
|
Python
|
agpl-3.0
| 724
| 0.002762
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-09-17 15:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.M
|
igration):
dependencies = [
('reference_data', '0007_auto_20180809_2053'),
]
operations = [
migrations.CreateModel(
name='GeneExpression',
fields=[
('id', models.AutoField(aut
|
o_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('expression_values', models.TextField(blank=True, null=True)),
('gene', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reference_data.GeneInfo')),
],
),
]
|
enthought/traitsgui
|
enthought/pyface/dock/dock_sizer.py
|
Python
|
bsd-3-clause
| 155,245
| 0.020265
|
#-------------------------------------------------------------------------------
#
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: David C. Morrill
# Date: 10/18/2005
#
#-------------------------------------------------------------------------------
""" Pyface 'DockSizer' support.
This package implements the sizer associated with a Pyface DockWindow
component. The sizer manages the layout of the DockWindow child controls
and the notebook tabs and dragbars associated with the DockWindow.
"""
#
|
-------------------------------------------------------------------------------
# Imports:
|
#-------------------------------------------------------------------------------
import wx, sys
from enthought.traits.api \
import HasPrivateTraits, Instance, Str, Int, List, Enum, Tuple, Any, \
Range, Property, Callable, Constant, Event, Undefined, Bool, \
cached_property
from enthought.traits.ui.dock_window_theme \
import dock_window_theme
from enthought.traits.ui.wx.helper \
import BufferDC
from enthought.pyface.api import SystemMetrics
from enthought.pyface.image_resource \
import ImageResource
from enthought.util.wx.drag_and_drop \
import PythonDropSource
from enthought.pyface.timer.api \
import do_later, do_after
from idockable \
import IDockable
from ifeature_tool \
import IFeatureTool
# Define version dependent values:
wx_26 = (wx.__version__[:3] == '2.6')
is_mac = (sys.platform == 'darwin')
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
# Standard font text height:
text_dy = 13
# Maximum allowed length of a tab label:
MaxTabLength = 30
# Size of a drag bar (in pixels):
DragBarSize = 14
# Images sizes (in pixels):
CloseTabSize = 10
CloseDragSize = 7
# Tab drawing states:
TabInactive = 0
TabActive = 1
TabHover = 2
NormalStates = ( TabInactive, TabActive )
NotActiveStates = ( TabInactive, TabHover )
# Feature overlay colors:
FeatureBrushColor = ( 255, 255, 255 )
FeaturePenColor = ( 92, 92, 92 )
# Color used to update the screen while dragging a splitter bar:
DragColor = ( 96, 96, 96 )
# Color used to update the screen while showing a docking operation in progress:
DockColorBrush = ( 255, 0, 0, 96 )
# Drop Info kinds:
DOCK_TOP = 0
DOCK_BOTTOM = 1
DOCK_LEFT = 2
DOCK_RIGHT = 3
DOCK_TAB = 4
DOCK_TABADD = 5
DOCK_BAR = 6
DOCK_NONE = 7
DOCK_SPLITTER = 8
DOCK_EXPORT = 9
# Splitter states:
SPLIT_VLEFT = 0
SPLIT_VMIDDLE = 1
SPLIT_VRIGHT = 2
SPLIT_HTOP = 3
SPLIT_HMIDDLE = 4
SPLIT_HBOTTOM = 5
# Empty clipping area:
no_clip = ( 0, 0, 0, 0 )
# Valid sequence types:
SequenceType = ( list, tuple )
# Tab scrolling directions:
SCROLL_LEFT = 1
SCROLL_RIGHT = 2
SCROLL_TO = 3
# Feature modes:
FEATURE_NONE = -1 # Has no features
FEATURE_NORMAL = 0 # Has normal features
FEATURE_CHANGED = 1 # Has changed or new features
FEATURE_DROP = 2 # Has drag data compatible drop features
FEATURE_DISABLED = 3 # Has feature icon, but is currently disabled
FEATURE_VISIBLE = 4 # Has visible features (mouseover mode)
FEATURE_DROP_VISIBLE = 5 # Has visible drop features (mouseover mode)
FEATURE_PRE_NORMAL = 6 # Has normal features (but has not been drawn yet)
FEATURE_EXTERNAL_DRAG = 256 # A drag started in another DockWindow is active
# Feature sets:
NO_FEATURE_ICON = ( FEATURE_NONE, FEATURE_DISABLED, FEATURE_VISIBLE,
FEATURE_DROP_VISIBLE )
FEATURES_VISIBLE = ( FEATURE_VISIBLE, FEATURE_DROP_VISIBLE )
FEATURE_END_DROP = ( FEATURE_DROP, FEATURE_VISIBLE, FEATURE_DROP_VISIBLE )
NORMAL_FEATURES = ( FEATURE_NORMAL, FEATURE_DISABLED )
#-------------------------------------------------------------------------------
# Global data:
#-------------------------------------------------------------------------------
# Standard font used by the DockWindow:
standard_font = None
# The list of available DockWindowFeatures:
features = []
#-------------------------------------------------------------------------------
# Trait definitions:
#-------------------------------------------------------------------------------
# Bounds (i.e. x, y, dx, dy):
Bounds = Tuple( Int, Int, Int, Int )
# Docking drag bar style:
DockStyle = Enum( 'horizontal', 'vertical', 'tab', 'fixed' )
#-------------------------------------------------------------------------------
# Adds a new DockWindowFeature class to the list of available features:
#-------------------------------------------------------------------------------
def add_feature ( feature_class ):
""" Adds a new DockWindowFeature class to the list of available features.
"""
global features
result = (feature_class not in features)
if result:
features.append( feature_class )
# Mark the feature class as having been installed:
if feature_class.state == 0:
feature_class.state = 1
return result
#-------------------------------------------------------------------------------
# Sets the standard font to use for a specified device context:
#-------------------------------------------------------------------------------
def set_standard_font ( dc ):
""" Sets the standard font to use for a specified device context.
"""
global standard_font
if standard_font is None:
standard_font = wx.SystemSettings_GetFont( wx.SYS_DEFAULT_GUI_FONT )
dc.SetFont( standard_font )
return dc
#-------------------------------------------------------------------------------
# Clears a window to the standard background color:
#-------------------------------------------------------------------------------
def clear_window ( window ):
""" Clears a window to the standard background color.
"""
bg_color = SystemMetrics().dialog_background_color
bg_color = wx.Colour(bg_color[0]*255, bg_color[1]*255, bg_color[2]*255)
dx, dy = window.GetSizeTuple()
dc = wx.PaintDC( window )
dc.SetBrush( wx.Brush( bg_color, wx.SOLID ) )
dc.SetPen( wx.TRANSPARENT_PEN )
dc.DrawRectangle( 0, 0, dx, dy )
#-------------------------------------------------------------------------------
# Gets a temporary device context for a specified window to draw in:
#-------------------------------------------------------------------------------
def get_dc ( window ):
""" Gets a temporary device context for a specified window to draw in.
"""
if is_mac:
dc = wx.ClientDC( window )
x, y = window.GetPositionTuple()
dx, dy = window.GetSizeTuple()
while True:
window = window.GetParent()
if window is None:
break
xw, yw = window.GetPositionTuple()
dxw, dyw = window.GetSizeTuple()
dx, dy = min( dx, dxw - x ), min( dy, dyw - y )
x += xw
y += yw
dc.SetClippingRegion( 0, 0, dx, dy )
return ( dc, 0, 0 )
x, y = window.ClientToScreenXY( 0, 0 )
return ( wx.ScreenDC(), x, y )
#-------------------------------------------------------------------------------
# 'DockImages' class:
#-------------------------------------------------------------------------------
class DockImages ( HasPrivateTraits ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Image for closing a tab:
close_tab = Instance( ImageResource, ImageResource( 'close_tab' ) )
# Image for closing a drag bar:
close_drag = Instance( ImageResource, ImageResource( 'close_drag' ) )
#
|
Uberlearner/uberlearner
|
uberlearner/courses/admin.py
|
Python
|
mit
| 848
| 0.008255
|
from django.contrib import admin
from courses.models import Course, Instructor, Page, Enrollment
class CourseAdmin(admin.ModelAdmin):
list_display = ['title', 'instructor', 'language', 'popularity', 'is_public', 'deleted']
prepopulated_fields = {
'slug': ('title', )
}
def queryset(self, request):
qs = self.model.all_objects.get_query_set()
|
# the following is needed from the superclass
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
class InstructorAdmin(admin.ModelAdmin):
list_display = ['user', 'popularity']
class PageAdmin(admin.ModelAdmin):
pass
admin.site.register(Course, CourseAdmin)
admin.site.register(Instructor, InstructorAdmin)
admin.site.register(Page, PageAdmin)
admin.site.register(Enrollm
|
ent)
|
hamperbot/factoids2
|
hamper_factoids/parser.py
|
Python
|
mpl-2.0
| 1,049
| 0.000953
|
import parsley
_grammar = r"""
parse = pair:head (' '+ pair)*:tail -> dict([head] + tail)
pair = ident:i '=' value:v -> (i, v)
ident = <letter letterOrDigit*>
value = string | regex | number | word
string = '"' (escapedChar | ~'"' anything)*:c '"' -> ''.join(c)
| "'" (escapedChar | ~"'" anything)*:c "'" -> ''.join(c)
regex = '/' (escapedChar | ~'/' anything)*:c '/'
|
-> '/' + ''.join(c) + '/'
word = <(~' ' anything)+>
# A number is optionally a negative sign, followed by an intPart, and then
# maybe a floatPart.
number = ('-' | -> ''):sign
( (intPart:i floatPart:f -> float(sign + i + f ))
| (intPart:i -> int(sign + i))
| (floatPart:f -> float(sign + '0' + f)))
digit = :x ?(x in '0123456789') -> x
digit1_9 = :x ?(x in '123456789') -> x
intPart = (digit1_9:first <digit+>:rest ->
|
first + rest)
| digit
floatPart = <'.' digit+>
# This matches a *single* backslash, followed by something else, which it returns.
escapedChar = "\\\\" anything
"""
learn_grammar = parsley.makeGrammar(_grammar, {})
|
mattew/mattew.github.io-src
|
publishconf.py
|
Python
|
mit
| 531
| 0.00565
|
#!/usr/bin/en
|
v python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://mpdev.mattew.se'
RELATIVE_URLS = False
FEED_ALL_ATO
|
M = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = False
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
|
cs-au-dk/Artemis
|
WebKit/Tools/Scripts/webkitpy/layout_tests/layout_package/json_layout_results_generator.py
|
Python
|
gpl-3.0
| 7,573
| 0.002377
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIM
|
ITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTA
|
L,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
class JSONLayoutResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
"""A JSON results generator for layout tests."""
LAYOUT_TESTS_PATH = "LayoutTests"
# Additional JSON fields.
WONTFIX = "wontfixCounts"
FAILURE_TO_CHAR = {test_expectations.PASS: json_results_generator.JSONResultsGeneratorBase.PASS_RESULT,
test_expectations.SKIP: json_results_generator.JSONResultsGeneratorBase.SKIP_RESULT,
test_expectations.FAIL: "Y",
test_expectations.CRASH: "C",
test_expectations.TIMEOUT: "T",
test_expectations.IMAGE: "I",
test_expectations.TEXT: "F",
test_expectations.MISSING: "O",
test_expectations.AUDIO: "A",
test_expectations.IMAGE_PLUS_TEXT: "Z"}
def __init__(self, port, builder_name, build_name, build_number,
results_file_base_path, builder_base_url,
test_timings, expectations, result_summary, all_tests,
test_results_server=None, test_type="", master_name=""):
"""Modifies the results.json file. Grabs it off the archive directory
if it is not found locally.
Args:
result_summary: ResultsSummary object storing the summary of the test
results.
"""
super(JSONLayoutResultsGenerator, self).__init__(
port, builder_name, build_name, build_number, results_file_base_path,
builder_base_url, {}, port.repository_paths(),
test_results_server, test_type, master_name)
self._expectations = expectations
self._result_summary = result_summary
self._failures = dict((test_name, result_summary.results[test_name].type) for test_name in result_summary.failures)
self._all_tests = all_tests
self._test_timings = dict((test_tuple.test_name, test_tuple.test_run_time) for test_tuple in test_timings)
self.generate_json_output()
def _get_path_relative_to_layout_test_root(self, test):
"""Returns the path of the test relative to the layout test root.
For example, for:
src/third_party/WebKit/LayoutTests/fast/forms/foo.html
We would return
fast/forms/foo.html
"""
index = test.find(self.LAYOUT_TESTS_PATH)
if index is not -1:
index += len(self.LAYOUT_TESTS_PATH)
if index is -1:
# Already a relative path.
relativePath = test
else:
relativePath = test[index + 1:]
# Make sure all paths are unix-style.
return relativePath.replace('\\', '/')
# override
def _get_test_timing(self, test_name):
if test_name in self._test_timings:
# Floor for now to get time in seconds.
return int(self._test_timings[test_name])
return 0
# override
def _get_failed_test_names(self):
return set(self._failures.keys())
# override
def _get_modifier_char(self, test_name):
if test_name not in self._all_tests:
return self.NO_DATA_RESULT
if test_name in self._failures:
return self.FAILURE_TO_CHAR[self._failures[test_name]]
return self.PASS_RESULT
# override
def _get_result_char(self, test_name):
return self._get_modifier_char(test_name)
# override
def _insert_failure_summaries(self, results_for_builder):
summary = self._result_summary
self._insert_item_into_raw_list(results_for_builder,
len((set(summary.failures.keys()) |
summary.tests_by_expectation[test_expectations.SKIP]) &
summary.tests_by_timeline[test_expectations.NOW]),
self.FIXABLE_COUNT)
self._insert_item_into_raw_list(results_for_builder,
self._get_failure_summary_entry(test_expectations.NOW),
self.FIXABLE)
self._insert_item_into_raw_list(results_for_builder,
len(self._expectations.get_tests_with_timeline(
test_expectations.NOW)), self.ALL_FIXABLE_COUNT)
self._insert_item_into_raw_list(results_for_builder,
self._get_failure_summary_entry(test_expectations.WONTFIX),
self.WONTFIX)
# override
def _normalize_results_json(self, test, test_name, tests):
super(JSONLayoutResultsGenerator, self)._normalize_results_json(
test, test_name, tests)
# Remove tests that don't exist anymore.
full_path = self._filesystem.join(self._port.layout_tests_dir(), test_name)
full_path = self._filesystem.normpath(full_path)
if not self._filesystem.exists(full_path):
del tests[test_name]
def _get_failure_summary_entry(self, timeline):
"""Creates a summary object to insert into the JSON.
Args:
summary ResultSummary object with test results
timeline current test_expectations timeline to build entry for
(e.g., test_expectations.NOW, etc.)
"""
entry = {}
summary = self._result_summary
timeline_tests = summary.tests_by_timeline[timeline]
entry[self.SKIP_RESULT] = len(
summary.tests_by_expectation[test_expectations.SKIP] &
timeline_tests)
entry[self.PASS_RESULT] = len(
summary.tests_by_expectation[test_expectations.PASS] &
timeline_tests)
for failure_type in summary.tests_by_expectation.keys():
if failure_type not in self.FAILURE_TO_CHAR:
continue
count = len(summary.tests_by_expectation[failure_type] &
timeline_tests)
entry[self.FAILURE_TO_CHAR[failure_type]] = count
return entry
|
antoinecarme/pyaf
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_LinearTrend_Seasonal_DayOfMonth_SVR.py
|
Python
|
bsd-3-clause
| 160
| 0.05
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Logit'] , ['LinearTrend'
|
] , ['Seasonal_DayOfMonth']
|
, ['SVR'] );
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.