repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
oomlout/oomlout-OOMP
|
old/OOMPpart_HESH_03_L_STAN_01.py
|
Python
|
cc0-1.0
| 241
| 0
|
import OOMP
|
newPart = OOMP.oompItem(9019)
newPart.addTag("oompType", "HESH")
newPart.addTag("oompSize", "03")
newP
|
art.addTag("oompColor", "L")
newPart.addTag("oompDesc", "STAN")
newPart.addTag("oompIndex", "01")
OOMP.parts.append(newPart)
|
braingineer/baal
|
baal/structures/gist_trees.py
|
Python
|
mit
| 54,149
| 0.005522
|
"""
Derivation and Elementary Trees live here.
"""
from __future__ import print_function
from baal.structures import Entry, ConstituencyTree, consts
from baal.semantics import Predicate, Expression
from collections import deque
from copy import copy, deepcopy
from math import floor, ceil
try:
input = raw_input
except:
pass
def prn_pairs(phead, thead):
pairs = [("-LRB-", "-RRB-"), ("-RSB-", "-RSB-"), ("-LCB-", "-RCB-"),
("--", "--"), (",", ",")]
return any([left.lower()==phead.lower() and right.lower()==thead.lower() for left,right in pairs])
class AttachmentPoint(object):
def __init__(self, free, pos_symbol, gorn, type, seq_index):
self.free = free
self.pos_symbol = pos_symbol
self.gorn = gorn
self.type = type
self.seq_index = seq_index
self.hlf_symbol = None
self.frontier_increment = 0.01
self.frontier = (-1,0)
def __repr__(self):
return "{}@{}".format(self.pos_symbol,self.gorn)
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
setattr(result, k, deepcopy(v, memo))
return result
@classmethod
def from_tree(cls, tree, address, seq_index, tree_type):
new_point = cls(True, tree.symbol, address, tree_type, seq_index)
if tree.spine_index >= 0:
new_point.frontier = (tree.spine_index, tree.spine_index)
return new_point
@property
def left_frontier(self):
l, r = self.frontier
self.frontier = (l-self.frontier_increment, r)
assert self.frontier[0] > floor(
|
self.frontier[0])
return
|
self.frontier[0]
@property
def right_frontier(self):
l, r = self.frontier
self.frontier = (l, r+self.frontier_increment)
assert self.frontier[1] < ceil(self.frontier[1])
return self.frontier[1]
def sibling_increment(self, left=True):
l, r = self.frontier
if left:
self.frontier = (ceil(l) - 1.0, r)
else:
self.frontier = (l, floor(r) + 1.0)
def match(self, op):
pos_match = self.pos_symbol == op.target['pos_symbol']
gorn_match = ((self.gorn == op.target['target_gorn'])
or op.target['target_gorn'] is None)
hlf_match = self.hlf_symbol == op.target['target_hlf']
type_match = self.type == op.type
fail = []
if not pos_match:
f = "failure because pos:"
f += "self: {}; op: {}".format(str(self.pos_symbol),
str(op.target['pos_symbol']))
fail.append(f)
if not gorn_match:
f = "failure because gorn:"
f += "self: {}; op: {}".format(str(self.gorn),
str(op.target['target_gorn']))
fail.append(f)
if not hlf_match:
f = "failure because hlf:"
f += "self: {}; op: {}".format(str(self.hlf_symbol),
str(op.target['target_hlf']))
fail.append(f)
#if len(fail) > 0:
# print(" & \n".join(fail))
#else:
# print("Success!")
return self.free and pos_match and gorn_match and hlf_match and type_match
def set_path_features(self, hlf_symbol):
self.hlf_symbol = hlf_symbol
def clone(self):
ret = AttachmentPoint(self.free, self.pos_symbol, self.gorn,
self.type, self.seq_index)
ret.hlf_symbol = self.hlf_symbol
ret.frontier = self.frontier
return ret
class AttachmentOperation(object):
"""Represents an elementary tree operation
Used by DerivationTrees when trying to find where an elementary tree should attach
There are two modes to the operation:
1. Use it as a general attachment. In this case it needs to know
the permissable attachments via the pos_symbol (and direction if insertion)
2. Use it in specific attachment. In this case it needs to know
identifying information about the tree it should be attaching to.
Current ideas: hlf_symbol, tree_id, argument_number, gorn_address
Thoughts: gorn_address won't work (for obvious reasons as the tree grows)
tree_id won't work because there might be duplicates
hlf_symbol could work, as long as this semantic form remains
argument_number requires planning, which CSG and others might handle
"""
def __init__(self, target, type):
"""Pass in the already made parameters to make the operation.
Args:
target: dict with keys 'pos_symbol' and 'parameter'
'pos_symbol' is the part of speech this operation looks for
'parameter' is direction for insertions, and argument number
for substitutions
type: the type of operation this is: consts.INSERTION or consts.SUBSTITUTION
Notes:
insertion direction: left means it inserts on the left side
e.g. (NP* (DT a)) inserts left.
the asterisk denotes the attachment point
right means it inserts on the right side
e.g. (*S (. .)) inserts right
the asterisk denotes the attachment point
"""
self.target = target
self.type = type
@property
def is_insertion(self):
return self.type == consts.INSERTION
@property
def direction(self):
if not self.is_insertion:
raise Exception("Not an insertion tree")
else:
return self.target['attach_direction']
def clone(self):
return AttachmentOperation(self.target, self.type)
def set_path_features(self, target_gorn, target_hlf):
if target_hlf is not None:
self.target['target_hlf'] = target_hlf
if target_gorn is not None:
self.target['target_gorn'] = tuple(target_gorn)
@classmethod
def from_tree(cls, tree):
"""Calculate the parameters for the operation from a parse tree
Args:
tree: A ConstituencyParse instance
"""
if tree.adjunct:
target = {'pos_symbol': tree.symbol, 'attach_direction': tree.direction,
'target_gorn': None, 'target_hlf': None}
type = consts.INSERTION
else:
target = {'pos_symbol': tree.symbol, 'attach_direction': "up",
'target_gorn': None, 'target_hlf': None}
type = consts.SUBSTITUTION
return cls(target, type)
return cls(root_op, "", (0,), None, "(ROOT)",
[root_subpoint], [], hlf_symbol="g-1")
class ElementaryTree(object):
"""represent a tree fragment, its operations, and its internal addresses
"""
def __init__(self, op, head, head_address, head_symbol, bracketed_string,
substitution_points, insertion_points,
hlf_symbol=None, tree_id=None, last_type=None, last_index=-1):
self.tree_operation = op
self.head = head
self.head_address = head_address
self.substitution_points = substitution_points
self.insertion_points = insertion_points
self.address = (0,)
self.last_type = last_type
self.last_index = last_index
self.hlf_symbol = hlf_symbol
self.bracketed_string = bracketed_string
self.tree_id = tree_id
self.head_symbol = head_symbol
@classmethod
def from_full_parse_tree(cls, parse_tree):
if parse_tree.symbol == "" and len(parse_tree.children) == 1:
parse_tree.symbol = "ROOT"
_, addressbook = parse_tree.clone()
@classmethod
def from_single_parse_tree(cls, parse_tree):
if parse_tree.save_str().upper() == "(ROOT ROOT)":
return cls.root_tree()
|
qe-team/marmot
|
marmot/representations/pos_representation_generator.py
|
Python
|
isc
| 2,447
| 0.002452
|
from subprocess import Popen
import os
import time
from marmot.representations.representation_generator import RepresentationGenerator
from marmot.experiment.import_utils import mk_tmp_dir
class POSRepresentationGenerator(RepresentationGenerator):
def _get_random_name(self, suffix=''):
return 'tmp_'+suffix+str(time.time())
def _get_pos_tagging(self, src, tagger, par_file, tmp_dir):
# tokenize and add the sentence end marker
# tokenization is done with nltk
tmp_tokenized_name = os.path.join(tmp_dir, self._get_random_name('tok'))
tmp_tok = open(tmp_tokenized_name, 'wr+')
for words in src:
tmp_tok.write('%s\nSentenceEndMarker\n' % '\n'.join([w.encode('utf-8') for w in words]))
tmp_tok.seek(0)
# pass to tree-tagger
tmp_tagged_name = os.path.join(tmp_dir, self._get_
|
random_name('tag'))
tmp_tagged = open(tmp_tagged_name, 'wr+')
tagger_call = Popen([tagger, '-token', par_file], stdin=tmp_tok, stdout=tmp_tagged)
tagger_call.wait()
tmp_tagged.seek(0)
# remove sentence markers, restore sentence structure
output = []
cur_sentence = []
for line in tmp_tagged:
wor
|
d_tag = line[:-1].decode('utf-8').strip().split('\t')
# each string has to be <word>\t<tag>
# TODO: if it's not of this format, it could be the end of sequence (empty string) or an error
if len(word_tag) != 2:
continue
if word_tag[0] == 'SentenceEndMarker':
output.append(cur_sentence)
cur_sentence = []
else:
cur_sentence.append(word_tag[1])
tmp_tok.close()
tmp_tagged.close()
# delete all temporary files
os.remove(tmp_tokenized_name)
os.remove(tmp_tagged_name)
return output
# <tagger> -- path to tree-tagger
# <parameters> -- parameters of tree-tagger
# <data_label> -- which data should be tagged ('source' or 'target')
def __init__(self, tagger, parameters, data_label, tmp_dir=None):
self.tmp_dir = mk_tmp_dir(tmp_dir)
self.tagger = tagger
self.parameters = parameters
self.data = data_label
def generate(self, data_obj):
data_obj[self.data+'_pos'] = self._get_pos_tagging(data_obj[self.data], self.tagger, self.parameters, self.tmp_dir)
return data_obj
|
fanout/pysmartfeed
|
test.py
|
Python
|
mit
| 542
| 0.01845
|
import sys
import json
import smartfeed.django
base = sys.argv[1]
command =
|
sys.argv[2]
db = smartfeed.django.get_default_model()
if command == 'add':
data = json.loads(sys.argv[3])
id = sys.argv[4] if len(sys.argv) >= 5 else None
db.add(base, data, id=id)
elif command == 'del':
id = sys.argv[3]
db.delete(base, id)
elif command == 'exp':
ttl = int(sys.argv[3])
db.clear_expired(base, ttl)
elif command == 'expany':
ttl = int(sys.argv[3])
db.clear_expired(base, ttl, deleted=False)
else:
raise ValueError('unsupp
|
orted command')
|
duyuan11/glumpy
|
examples/transform-pan-zoom.py
|
Python
|
bsd-3-clause
| 1,497
| 0.007348
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
""" This example shows reactive pan-zoom transform (2D). """
import numpy as np
from PIL import Image
from glumpy import app, gl, glm, gloo, data
from glum
|
py.tr
|
ansforms import PanZoom, Position
vertex = """
attribute vec2 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main()
{
gl_Position = <transform>;
v_texcoord = texcoord;
}
"""
fragment = """
uniform sampler2D texture;
varying vec2 v_texcoord;
void main()
{
gl_FragColor = texture2D(texture, v_texcoord);
// gl_FragColor = <interpolation>;
}
"""
window = app.Window(width=800, height=800)
@window.event
def on_draw(dt):
window.clear()
program.draw(gl.GL_TRIANGLE_STRIP)
@window.event
def on_key_press(key, modifiers):
if key == app.window.key.SPACE:
transform.reset()
program = gloo.Program(vertex, fragment, count=4)
program['position'] = [(-1,-1), (-1,1), (1,-1), (1,1)]
program['texcoord'] = [( 0, 1), ( 0, 0), ( 1, 1), ( 1, 0)]
program['texture'] = data.get("lena.png")
transform = PanZoom(Position("position"), aspect=1)
program['transform'] = transform
window.attach(transform)
app.run()
|
btian/market_correlator
|
extract_symbols.py
|
Python
|
bsd-3-clause
| 2,097
| 0.013829
|
#!/usr/bin/env python
# Copyright (c) 2014, Bo Tian <tianbo@gmail.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of
|
conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contrib
|
utors may
# be used to endorse or promote products derived from this software without specific
# prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
def main():
args = sys.argv
print str(args)
extract_symbols(args[1], args[2])
def extract_symbols(input_file, output_file):
fin = open(input_file, 'r')
fout = open(output_file, 'w')
for line in fin:
if '|' in line:
cols = line.split('|')
if not '$' in cols[1]: # Skip preferred shares, warrant etc.
symbol = cols[1].replace('.', '-') # e.g., BRK.B -> BRK-B for Yahoo finance.
fout.write(symbol + '\n')
fin.close()
fout.close()
if __name__ == "__main__":
main()
|
steventimberman/masterDebater
|
venv/lib/python2.7/site-packages/haystack/admin.py
|
Python
|
mit
| 6,567
| 0.002284
|
# encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from django.contrib.admin.options import ModelAdmin, csrf_protect_m
from django.contrib.admin.views.main import SEARCH_VAR, ChangeList
from django.core.exceptions import PermissionDenied
from django.core.paginator import InvalidPage, Paginator
from django.shortcuts import render
from django.utils.encoding import force_text
from django.utils.translation import ungettext
from haystack import connections
from haystack.query import SearchQuerySet
from haystack.utils import get_model_ct_tuple
def list_max_show_all(changelist):
"""
Returns the maximum amount of results a changelist can have for the
"Show all" link to be displayed in a manner compatible with both Django
1.4 and 1.3. See Django ticket #15997 for details.
"""
try:
# This import is available in Django 1.3 and below
from django.contrib.admin.views.main import MAX_SHOW_ALL_ALLOWED
return MAX_SHOW_ALL_ALLOWED
except ImportError:
return changelist.list_max_show_all
class SearchChangeList(ChangeList):
def __init__(self, **kwargs):
self.haystack_connection = kwargs.pop('haystack_connection', 'default')
super(SearchChangeList, self).__init__(**kwargs)
def get_results(self, request):
if not SEARCH_VAR in request.GET:
return super(SearchChangeList, self).get_results(request)
# Note that pagination is 0-based, not 1-based.
sqs = SearchQuerySet(self.haystack_connection).models(self.model).auto_query(request.GET[SEARCH_VAR]).load_all()
paginator = Paginator(sqs, self.list_per_page)
# Get the number of objects, with admin filters applied.
result_count = paginator.count
full_result_count = SearchQuerySet(self.haystack_connection).models(self.model).all().count()
can_show_all = result_count <= list_max_show_all(self)
multi_page = result_count > self.list_per_page
# Get the list of objects to display on this page.
try:
result_list = paginator.page(self.page_num + 1).object_list
# Grab just the Django models, since that's what everything else is
# expecting.
result_list = [result.object for result in result_list]
except InvalidPage:
result_list = ()
self.result_count = result_count
self.full_result_count = full_result_count
self.result_list = result_list
self.can_show_all = can_show_all
self.multi_page = multi_page
self.paginator = paginator
class SearchModelAdminMixin(object):
# haystack connection to use for searching
haystack_connection = 'default'
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
if not self.has_change_permission(request, None):
raise PermissionDenied
if not SEARCH_VAR in request.GET:
# Do the usual song and dance.
return super(SearchModelAdminMixin, self).changelist_view(request, extra_context)
# Do a search of just this model and populate a Changelist with the
# returned bits.
if not self.model in connections[self.haystack_connection].get_unified_index().get_indexed_models():
# Oops. That model isn't being indexed. Return the usual
# behavior instead.
return super(SearchModelAdminMixin, self).changelist_view(request, extra_context)
# So. Much. Boilerplate.
# Why copy-paste a few lines when you can copy-paste TONS of lines?
list_display = list(self.list_display)
kwargs = {
'haystack_connection': self.haystack_connection,
'request': request,
'model': self.model,
'list_display': list_display,
'list_display_links': self.list_display_links,
'list_filter': self.list_filter,
'date_hierarchy': self.date_hierarchy,
'search_fields': self.search_fields,
'list_select_related': self.list_select_related,
'list_per_page': self.list_per_page,
'list_editable': self.list_editable,
'model_admin': self
}
# Django 1.4 compatibility.
if hasattr(self, 'list_max_show_all'):
kwargs['list_max_show_all'] = self.list_max_show_all
changelist = SearchChangeList(**kwargs)
formset = changelist.formset = None
media = self.media
# Build the action form and populate it with available actions.
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
action_form =
|
self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note = ungettext('0 of %(count)d selected',
'of %(count)d selected', len(changelist.result_list))
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', changelist.result_count)
context = {
|
'module_name': force_text(self.model._meta.verbose_name_plural),
'selection_note': selection_note % {'count': len(changelist.result_list)},
'selection_note_all': selection_note_all % {'total_count': changelist.result_count},
'title': changelist.title,
'is_popup': changelist.is_popup,
'cl': changelist,
'media': media,
'has_add_permission': self.has_add_permission(request),
# More Django 1.4 compatibility
'root_path': getattr(self.admin_site, 'root_path', None),
'app_label': self.model._meta.app_label,
'action_form': action_form,
'actions_on_top': self.actions_on_top,
'actions_on_bottom': self.actions_on_bottom,
'actions_selection_counter': getattr(self, 'actions_selection_counter', 0),
}
context.update(extra_context or {})
request.current_app = self.admin_site.name
app_name, model_name = get_model_ct_tuple(self.model)
return render(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_name, model_name),
'admin/%s/change_list.html' % app_name,
'admin/change_list.html'
], context)
class SearchModelAdmin(SearchModelAdminMixin, ModelAdmin):
pass
|
yelongyu/chihu
|
app/main/__init__.py
|
Python
|
gpl-3.0
| 338
| 0.002959
|
# -*- coding:
|
utf-8 -*-
from flask import Blueprint
from ..models import Permission
main = Blueprint('main', __name__)
from . import views, errors, my_test
# app_context_processor
# let the variable Permission can ac
|
cessed by all templates
@main.app_context_processor
def inject_permissions():
return dict(Permission=Permission)
|
sileht/python-gnocchiclient
|
gnocchiclient/osc.py
|
Python
|
apache-2.0
| 1,874
| 0
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing p
|
ermissions and limitations
# under the License.
from osc_lib import utils
DEFAULT_METRICS_API_VERSION = '1'
API_VERSION_OPTION = 'os_metrics_api_version'
API_NAME = "metric"
API_VERSIONS = {
"1": "gnocchiclient.v1.client.Client",
}
def make_client(instance):
"""Returns a metrics service client."""
version = instance._api_version[API_NAME]
try:
version = int(version)
except ValueError:
version = float(version)
gnocchi_clie
|
nt = utils.get_client_class(
API_NAME,
version,
API_VERSIONS)
# NOTE(sileht): ensure setup of the session is done
instance.setup_auth()
return gnocchi_client(session=instance.session,
adapter_options={
'interface': instance.interface,
'region_name': instance.region_name
})
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument(
'--os-metrics-api-version',
metavar='<metrics-api-version>',
default=utils.env(
'OS_METRICS_API_VERSION',
default=DEFAULT_METRICS_API_VERSION),
help=('Metrics API version, default=' +
DEFAULT_METRICS_API_VERSION +
' (Env: OS_METRICS_API_VERSION)'))
return parser
|
georgecpr/openthread
|
tests/scripts/thread-cert/Cert_9_2_13_EnergyScan.py
|
Python
|
bsd-3-clause
| 4,676
| 0.001283
|
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import config
import node
COMMISSIONER = 1
LEADER = 2
ROUTER1 = 3
ED1 = 4
class Cert_9_2_13_EnergyScan(unittest.TestCase):
def setUp(self):
self.simulator = config.create_default_simulator()
self.nodes = {}
for i in range(1,5):
self.nodes[i] = node.Node(i, (i == ED1), simulator=self.simulator)
self.nodes[COMMISSIONER].set_panid(0xface)
self.nodes[COMMISSIONER].set_mode('rsdn')
self.nodes[COMMISSIONER].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[COMMISSIONER].enable_whitelist()
self.nodes[COMMISSIONER].set_router_selection_jitter(1)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[COMMISSIONER].get_addr64())
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER1].set_panid(0xface)
self.nodes[ROUTER1].set_mode('rsdn')
self.nodes[ROUTER1].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER1].add_whitelist(self.nodes[ED1].get_addr64())
self.nodes[ROUTER1].enable_whitelist()
self.nodes[ROUTER1].set_router_selection_jitter(1)
self.nodes[ED1].set_panid(0xface)
self.nodes[ED1].set_mode('rs')
self.nodes[ED1].add_whitelist(self.nodes[ROUTER1].
|
get_addr64())
self.nodes[ED1].enable_whitelist()
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
del self.simulator
def test(self):
self.nodes[LEADER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[COMMISSIONER].start()
self.simulator.go(5)
self.assertEqual(self.nodes[COMMISSIONER].get_state(),
|
'router')
self.nodes[COMMISSIONER].commissioner_start()
self.simulator.go(3)
self.nodes[ROUTER1].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER1].get_state(), 'router')
self.nodes[ED1].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ED1].get_state(), 'child')
ipaddrs = self.nodes[ROUTER1].get_addrs()
for ipaddr in ipaddrs:
if ipaddr[0:4] != 'fe80':
break
self.assertTrue(self.nodes[COMMISSIONER].ping(ipaddr))
self.nodes[COMMISSIONER].energy_scan(0x50000, 0x02, 0x20, 0x3e8, ipaddr)
ipaddrs = self.nodes[ED1].get_addrs()
for ipaddr in ipaddrs:
if ipaddr[0:4] != 'fe80':
break
self.assertTrue(self.nodes[COMMISSIONER].ping(ipaddr))
self.nodes[COMMISSIONER].energy_scan(0x50000, 0x02, 0x20, 0x3e8, ipaddr)
self.nodes[COMMISSIONER].energy_scan(0x50000, 0x02, 0x20, 0x3e8, 'ff33:0040:fdde:ad00:beef:0:0:1')
self.assertTrue(self.nodes[COMMISSIONER].ping(ipaddr))
if __name__ == '__main__':
unittest.main()
|
eri-trabiccolo/exaile
|
plugins/screensaverpause/__init__.py
|
Python
|
gpl-2.0
| 3,410
| 0.005279
|
# screensaverpause - pauses Exaile playback on screensaver activation
# Copyright (C) 2009-2011 Johannes Sasongko <sasongko@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
import dbus, gtk
from xl import event, player, settings
SERVICES = [
dict( # GNOME
bus_name='org.gnome.ScreenSaver',
path='/org/gnome/ScreenSaver',
dbus_interface='org.gnome.ScreenSaver',
),
dict( # KDE
bus_name='org.freedesktop.ScreenSaver',
path='/',
dbus_interface='org.freedesktop.ScreenSaver',
),
]
import prefs
def get_preferences_pane():
return prefs
matches = set()
bus = None
was_playing = None
def screensaver_active_changed(is_active):
global was_playing
if is_active:
was_playing = player.PLAYER.is_playing()
player.PLAYER.pause()
elif was_playing and settings.get_option("screensaverpause/unpause", 0):
player.PLAYER.unpause()
def enable(exaile):
if exaile.loading:
event.add_callback(_enable, 'exaile_loaded')
else:
_enable()
def _enable(*a):
global bus
bus = dbus.SessionBus()
for service in SERVICES:
matches.add(bus.add_signal_receiver(screensaver_active_changed,
signal_name='ActiveChanged', **service))
def disable(exaile):
if bus is None: return
for match in frozenset(matches):
match.remove()
matches.remove(match)
def test():
import glib, gobject
gobject.threads_init()
import dbus.mainloop.glib as dbgl
dbgl.DBusGMainLoop(set_as_default=True)
global bus
bus = dbus.SessionBus()
for service in SERVICES:
try:
proxy = bus.get_object(service['bus_name'], service['path'],
follow_name_owne
|
r_changes=True)
except dbus.DBusException:
continue
break
else:
return None
assert proxy
interface = dbus.Interface(proxy, service['dbus_interface'])
mainloop = glib.MainLoop()
def active_changed(new_value):
if not new_value:
mainloop.quit()
interfa
|
ce.connect_to_signal('ActiveChanged', screensaver_active_changed)
# For some reason Lock never returns.
interface.Lock(ignore_reply=True)
mainloop.run()
if __name__ == '__main__':
test()
# vi: et sts=4 sw=4 tw=80
|
DavidLi2010/ramcloud
|
scripts/objectsize_scale.py
|
Python
|
isc
| 2,086
| 0
|
#!/usr/bin/env python
# Copyright (c) 2011 Stanford University
#
# Permission to use, copy, modify, and distri
|
bute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGAR
|
D TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Generates data for a recovery performance graph.
Measures recovery time as a function of partition size for a
single recovery Master and 3 different object sizes.
"""
from __future__ import division, print_function
from common import *
import config
import recovery
import subprocess
dat = open('%s/recovery/objectsize_scale.data' % top_path, 'w', 1)
numBackups = len(config.hosts)
for objectSize in [128, 256, 1024]:
print('# objectSize:', objectSize, file=dat)
print('# Data sourced by %d backups' % objectSize, file=dat)
for partitionSize in range(1, 1050, 100):
args = {}
args['num_servers'] = numBackups
args['backups_per_server'] = 1
args['num_partitions'] = 1
args['object_size'] = objectSize
args['replicas'] = 3
args['master_ram'] = 8000
numObjectsPerMb = 2**20 / (objectSize + 38)
args['num_objects'] = int(numObjectsPerMb * partitionSize)
print('Running with %d backups' % numBackups)
print('Running with objects of size %d for a %d MB partition' %
(objectSize, partitionSize))
r = recovery.insist(**args)
print('->', r['ns'] / 1e6, 'ms', '(run %s)' % r['run'])
print(partitionSize, r['ns'] / 1e6, file=dat)
print(file=dat)
print(file=dat)
|
XDSETeamA/XD_SE_TeamA
|
team9/1/Sharing/manage.py
|
Python
|
mit
| 250
| 0
|
#!/usr/bin/env python
import os
|
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Sharing.settings")
from django.core.management import execute_from_command_line
execute_from_command_l
|
ine(sys.argv)
|
cloudbase/maas
|
src/maasserver/tests/test_preseed.py
|
Python
|
agpl-3.0
| 25,940
| 0.000578
|
# Copyright 2012, 2013 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test `maasserver.preseed` and related bits and bobs."""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
str = None
__metaclass__ = type
__all__ = []
import httplib
import os
from pipes import quote
from urlparse import urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from maasserver.enum import (
ARCHITECTURE,
DISTRO_SERIES,
NODE_STATUS,
NODEGROUPINTERFACE_MANAGEMENT,
PRESEED_TYPE,
)
from maasserver.models import Config
from maasserver.preseed import (
compose_enlistment_preseed_url,
compose_preseed_url,
GENERIC_FILENAME,
get_curtin_config,
get_curtin_context,
get_curtin_installer_url,
get_curtin_userdata,
get_enlist_preseed,
get_hostname_and_path,
get_node_preseed_context,
get_preseed,
get_preseed_context,
get_preseed_filenames,
get_preseed_template,
get_preseed_type_for,
load_preseed_template,
PreseedTemplate,
render_enlistment_preseed,
render_preseed,
split_subarch,
TemplateNotFoundError,
)
from maasserver.testing.factory import factory
from maasserver.testing.testcase import MAASServerTestCase
from maasserver.utils import map_enum
from maastesting.matchers import ContainsAll
from testtools.matchers import (
AllMatch,
Contains,
IsInstance,
MatchesAll,
Not,
StartsWith,
)
import yaml
class TestSplitSubArch(MAASServerTestCase):
"""Tests for `split_subarch`."""
def test_split_subarch_returns_list(self):
self.assertEqual(['amd64'], split_subarch('amd64'))
def test_split_subarch_splits_sub_architecture(self):
self.assertEqual(['amd64', 'test'], split_subarch('amd64/test'))
class TestGetHostnameAndPath(MAASServerTestCase):
"""Tests for `get_hostname_and_path`."""
def test_get_hostname_and_path(self):
input_and_results = [
('http://name.domain/my/path', ('name.domain', '/my/path')),
('https://domain/path', ('domain', '/path')),
('http://domain/', ('domain', '/')),
('http://domain', ('domain', '')),
]
inputs = [input for input, _ in input_and_results]
results = [result for _, result in input_and_results]
self.assertEqual(results, map(get_hostname_and_path, inputs))
class TestGetPreseedFilenames(MAASServerTestCase):
"""Tests for `get_preseed_filenames`."""
def test_get_preseed_filenames_returns_filenames(self):
hostname = factory.getRandomString()
prefix = factory.getRandomString()
release = factory.getRandomString()
node = factory.make_node(hostname=hostname)
arch, subarch = node.architecture.split('/')
self.assertSequenceEqual(
[
'%s_%s_%s_%s_%s' % (prefix, arch, subarch, release, hostname),
'%s_%s_%s_%s' % (prefix, arch, subarch, release),
'%s_%s_%s' % (prefix, arch, subarch),
'%s_%s' % (prefix, arch),
'%s' % prefix,
'generic',
],
list(get_preseed_filenames(node, prefix, release, default=True)))
def test_get_preseed_filenames_if_node_is_None(self):
release = factory.getRandomString()
prefix = factory.getRandomString()
self.assertSequenceEqual(
[
'%s_%s' % (prefix, release),
'%s' % prefix,
],
list(get_preseed_filenames(None, prefix, release)))
def test_get_preseed_filenames_supports_empty_prefix(self):
hostname = factory.getRandomString()
release = factory.getRandomString()
node = factory.make_node(hostname=hostname)
arch, subarch = node.architecture.split('/')
self.assertSequenceEqual(
[
'%s_%s_%s_%s' % (arch, subarch, release, hostname),
'%s_%s_%s' % (arch, subarch, release),
'%s_%s' % (arch, subarch),
'%s' % arch,
],
list(get_preseed_filenames(node, '', release)))
def test_get_preseed_filenames_returns_list_without_default(self):
# If default=False is passed to get_preseed_filenames, the
# returned list won't include the default template name as a
# last resort template.
hostname = factory.getRandomString()
prefix = factory.getRandomString()
release = factory.getRandomString()
node = factory.make_node(hostname=hostname)
self.assertSequenceEqual(
'generic',
list(get_preseed_filenames(
node, prefix, release, default=True))[-1])
def test_get_preseed_filenames_returns_list_with_default(self):
# If default=True is passed to get_preseed_filenames, the
# returned list will include the default template name as a
# last resort template.
hostname = factory.getRandomString()
prefix = factory.getRandomString()
release = factory.getRandomString()
node = factory.make_node(hostname=hostname)
self.assertSequenceEqual(
prefix,
list(get_preseed_filenames(
node, prefix, release, default=False))[-1])
class TestConfiguration(MAASServerTestCase):
"""Test for correct configuration of the preseed component."""
def test_setting_defined(self):
self.assertThat(
settings.PRESEED_TEMPLATE_LOCATIONS,
AllMatch(IsInstance(unicode)))
class TestGetPreseedTemplate(MAASServerTestCase):
"""Tests for `get_preseed_template`."""
def test_get_preseed_template_returns_None_if_no_template_locations(self):
# get_preseed_template() returns None when no template locations are
# defined.
self.patch(settings, "PRESEED_TEMPLATE_LOCATIONS", [])
self.assertEqual(
(None, None),
get_preseed_template(
(factory.getRandomString(), factory.getRandomString())))
def test_get_preseed_template_returns_None_when_no_filenames(self):
# get_preseed_template() returns None when no filenames are passed in.
self.patch(settings, "PRESEED_TEMPLATE_LOCATIONS", [self.make_dir()])
self.assertEqual((None, None), get_preseed_template(()))
def test_get_preseed_template_find_template_in_first_location(self):
template_content = factory.getRandomString()
template_path = self.make_file(contents=template_content)
template_filename = os.path.basename(template_path)
locations = [
os.path.dirname(template_path),
self.make_dir(),
]
self.patch(settings, "PRESEED_TEMPLATE_LOCATIONS", locations)
self.assertEqual(
(template_path, template_content),
get_preseed_template([template_filename]))
def test_get_preseed_template_find_template_in_last_location(self):
template_content = factory.getRandomString()
template_path = self.make_file(contents=template_content)
template_filename = os.path.basename(template_path)
locations = [
self.make_dir(),
os.path.dirname(template_path),
]
self.patch(settings, "PRESEED_TEMPLATE_LOCATIONS", locations)
self.assertEqual(
(template_path, template_content),
get_preseed_template([template_filename]))
class TestLoadPreseedTemplate(MAASServerTestCase):
"""Tests for `load_preseed_template`."""
def setUp(self):
super(TestLoadPreseedTemplate, self).setUp()
self.location = self.make_dir()
self.patch(
settings, "PRESEED_TEMPLATE_LOCATIONS", [self.location])
def create_template(self, location, name, content=None):
# Create a tempita template in the given `self.location` with the
# given `name`.
|
If content is not provid
|
ed, a random content
# will be put inside the template.
path = os.path.join(sel
|
eggplantbren/Oscillations
|
StateSpace/Python/Oscillator.py
|
Python
|
gpl-3.0
| 2,171
| 0.03731
|
import numpy as np
import numpy.random as rng
class Oscillator:
"""
A point in phase space for an oscillator.
"""
def __init__(self, state, omega=1., tau=1., beta=1.):
"""
Constructor: takes initial yition
and velocity as argument. Sets the time to zero
"""
self.state = state
self.omega, self.tau, self.beta = omega, tau, beta
self.time = 0.
def deriv(self, time, state, dt):
"""
Compute the derivatives from the given state
(not necessarily self.state, you need to pass that in
if that's what you want!)
"""
a = np.empty(2)
a[0] = state[1]
a[1] = -self.omega**2*state[0] - state[1]/self.tau\
+ self.beta*rng.randn()/np.sqrt(dt)
return a
def update(self, dt):
"""
Take a step using RK4
"""
f1 = self.deriv(self.time, self.state, dt)
f2 = self.deriv(self.time + 0.5*dt, self.state + 0.5*dt*f1, dt)
f3 = self.deriv(self.time + 0.5*dt, self.state + 0.5*dt*f2, dt)
f4 = self.deriv(self.time + dt, self.state + dt*f3
|
, dt)
self.state += dt/6.*(f1 + 2*f2 + 2*f3 + f4)
self.time += dt
if __name__ == '__main__':
import matplotlib.pyplot as plt
# Initial conditions
oscillator = Oscillator(np.array([0., 0.]))
# Timestep
dt = 0.01
steps = 100000 # Take this many steps
skip = 100 # Store and plot results this often
keep = np.e
|
mpty((steps/skip, 3)) # Store results in here
# Columns: time, yition, vocity
plt.ion() # Turn "interactive mode" for plotting on, so plots
# can update without the user having to close the window
plt.hold(False) # Clear the plot every time we plot something new
# Main loop
for i in xrange(0, steps):
# Saving and plotting
if i%skip == 0:
index = i/skip
# Save state to keep array
keep[index, :] = \
np.array([oscillator.time, oscillator.state[0],\
oscillator.state[1]])
# Plot yition vs time
plt.plot(keep[0:(index+1), 0], keep[0:(index+1), 1], 'b')
plt.xlabel('Time')
plt.ylabel('y')
plt.title('Stdev = %.03f'%keep[0:(index+1), 1].std())
plt.draw() # Refresh the plot
# Update the oscillator
oscillator.update(dt)
# At end of run, leave the last plot showing until the user closes it
plt.ioff()
plt.show()
|
bearstech/ansible
|
lib/ansible/modules/cloud/rackspace/rax_mon_notification.py
|
Python
|
gpl-3.0
| 5,164
| 0.001356
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rax_mon_notification
short_description: Create or delete a Rackspace Cloud Monitoring notification.
description:
- Create or delete a Rackspace Cloud Monitoring notification that specifies a
channel that can be used to communicate alarms, such as email, webhooks, or
PagerDuty. Rackspace monitoring module flow | rax_mon_entity -> rax_mon_check ->
*rax_mon_notification* -> rax_mon_notification_plan -> rax_mon_alarm
version_added: "2.0"
options:
state:
description:
- Ensure that the notification with this C(label) exists or does not exist.
choices: ['present', 'absent']
label:
description:
- Defines a friendly name for this notification. String between 1 and 255
characters long.
required: true
notification_type:
description:
- A supported notification type.
choices: ["webhook", "email", "pagerduty"]
required: true
details:
|
description:
- Dictionary of key-value pairs used to initialize the notification.
Required keys and meanings vary with notification type. See
http://docs.rackspace.com/cm/api/v1.0/cm-devguide/content/
service-notification-types-crud.html for details.
required: true
author: Ash Wilson
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Monitoring notification example
gather_facts: False
hosts: local
connection: local
|
tasks:
- name: Email me when something goes wrong.
rax_mon_entity:
credentials: ~/.rax_pub
label: omg
type: email
details:
address: me@mailhost.com
register: the_notification
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.rax import rax_argument_spec, rax_required_together, setup_rax_module
def notification(module, state, label, notification_type, details):
if len(label) < 1 or len(label) > 255:
module.fail_json(msg='label must be between 1 and 255 characters long')
changed = False
notification = None
cm = pyrax.cloud_monitoring
if not cm:
module.fail_json(msg='Failed to instantiate client. This typically '
'indicates an invalid region or an incorrectly '
'capitalized region name.')
existing = []
for n in cm.list_notifications():
if n.label == label:
existing.append(n)
if existing:
notification = existing[0]
if state == 'present':
should_update = False
should_delete = False
should_create = False
if len(existing) > 1:
module.fail_json(msg='%s existing notifications are labelled %s.' %
(len(existing), label))
if notification:
should_delete = (notification_type != notification.type)
should_update = (details != notification.details)
if should_update and not should_delete:
notification.update(details=notification.details)
changed = True
if should_delete:
notification.delete()
else:
should_create = True
if should_create:
notification = cm.create_notification(notification_type,
label=label, details=details)
changed = True
else:
for n in existing:
n.delete()
changed = True
if notification:
notification_dict = {
"id": notification.id,
"type": notification.type,
"label": notification.label,
"details": notification.details
}
module.exit_json(changed=changed, notification=notification_dict)
else:
module.exit_json(changed=changed)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present', 'absent']),
label=dict(required=True),
notification_type=dict(required=True, choices=['webhook', 'email', 'pagerduty']),
details=dict(required=True, type='dict')
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
state = module.params.get('state')
label = module.params.get('label')
notification_type = module.params.get('notification_type')
details = module.params.get('details')
setup_rax_module(module, pyrax)
notification(module, state, label, notification_type, details)
if __name__ == '__main__':
main()
|
LLNL/spack
|
var/spack/repos/builtin/packages/r-circstats/package.py
|
Python
|
lgpl-2.1
| 898
| 0.002227
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See t
|
he top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
from spack import *
class RCircstats(RPackage):
"""Circular Statistics, from "Topics in Circular Statistics" (2001)
Circular Statistics, from "Topics in Circular Statistics" (2001) S.
Rao Jammalamadaka and A. SenGupta, World Scientific."""
homepage = "https://cloud.r-project.org/package=CircStats"
url = "https://cloud.r-project.org/src/contrib/CircStats_0.2-6.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/CircStats"
maintainers = ['dorton21']
version('0.2-6', sha256='8efed93b75b314577341effea214e3dd6e0a515cfe1212eb051047a1f3276f1d')
depends_on('r-mass', type=('build', 'run'))
depends_on('r-boot', type=('build', 'run'))
|
ibc/MediaSoup
|
worker/deps/gyp/test/variables/filelist/gyptest-filelist.py
|
Python
|
isc
| 697
| 0.004304
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Test variable expansion of '<|(list.txt ...)' syntax commands.
"""
import os
import sys
import TestGyp
test = TestGyp.TestGyp()
CHDIR =
|
'src'
test.run_gyp('filelist2.gyp', chdir=CHDIR)
test.build('filelist2.gyp', 'foo', chdir=CHDIR)
contents = test.read('src/dummy_foo').replace('\r', '')
expect = '
|
John\nJacob\nJingleheimer\nSchmidt\n'
if not test.match(contents, expect):
print("Unexpected contents of `src/dummy_foo'")
test.diff(expect, contents, 'src/dummy_foo')
test.fail_test()
test.pass_test()
|
kytos/kytos
|
kytos/core/napps/base.py
|
Python
|
mit
| 9,959
| 0
|
"""Kytos Napps Module."""
import json
import os
import re
import sys
import tarfile
import urllib
from abc import ABCMeta, abstractmethod
from pathlib import Path
from random import randint
from threading import Event, Thread
from kytos.core.events import KytosEvent
from kytos.core.logs import NAppLog
__all__ = ('KytosNApp',)
LOG = NAppLog()
class NApp:
"""Class to represent a NApp."""
# pylint: disable=too-many-arguments
def __init__(self, username=None, name=None, version=None,
repository=None, meta=False):
self.username = username
self.name = name
self.version = version if version else 'latest'
self.repository = repository
self.meta = meta
self.description = None
self.tags = []
self.enabled = False
self.napp_dependencies = []
def __str__(self):
return "{}/{}".format(self.username, self.name)
def __repr__(self):
return f"NApp({self.username}/{self.name})"
def __hash__(self):
return hash(self.id)
def __eq__(self, other):
"""Compare username/name strings."""
return isinstance(other, self.__class__) and self.id == other.id
@property
def id(self): # pylint: disable=invalid-name
"""username/name string."""
return str(self)
@property
def uri(self):
"""Return a unique identifier of this NApp."""
version = self.version if self.version else 'latest'
if not self._has_valid_repository():
return ""
# Use the next line after Diraol fix redirect using ":" for version
# return "{}/{}:{}".format(self.repository, self.id, version)
return "{}/{}-{}".format(self.repository, self.id, version)
@property
def package_url(self):
"""Return a fully qualified URL for a NApp package."""
if not self.uri:
return ""
return "{}.napp".format(self.uri)
@classmethod
def create_from_uri(cls, uri):
"""Return a new NApp instance from an unique identifier."""
regex = r'^(((https?://|file://)(.+))/)?(.+?)/(.+?)/?(:(.+))?$'
match = re.match(regex, uri)
if not match:
return None
return cls(username=match.groups()[4],
name=match.groups()[5],
version=match.groups()[7],
repository=match.groups()[1])
@classmethod
def create_from_json(cls, filename):
"""Return a new NApp instance from a metadata file."""
with open(filename, encoding='utf-8') as data_file:
data = json.loads(data_file.read())
return cls.create_from_dict(data)
@classmethod
def create_from_dict(cls, data):
"""Return a new NApp instance from metadata."""
napp = cls()
for attribute, value in data.items():
setattr(napp, attribute, value)
return napp
def as_json(self):
"""Dump all NApp attributes on a json format."""
return jso
|
n.dumps(self.__dict__)
d
|
ef match(self, pattern):
"""Whether a pattern is present on NApp id, description and tags."""
try:
pattern = '.*{}.*'.format(pattern)
pattern = re.compile(pattern, re.IGNORECASE)
strings = [self.id, self.description] + self.tags
return any(pattern.match(string) for string in strings)
except TypeError:
return False
def download(self):
"""Download NApp package from his repository.
Raises:
urllib.error.HTTPError: If download is not successful.
Returns:
str: Downloaded temp filename.
"""
if not self.package_url:
return None
package_filename = urllib.request.urlretrieve(self.package_url)[0]
extracted = self._extract(package_filename)
Path(package_filename).unlink()
self._update_repo_file(extracted)
return extracted
@staticmethod
def _extract(filename):
"""Extract NApp package to a temporary folder.
Return:
pathlib.Path: Temp dir with package contents.
"""
random_string = '{:0d}'.format(randint(0, 10**6))
tmp = '/tmp/kytos-napp-' + Path(filename).stem + '-' + random_string
os.mkdir(tmp)
with tarfile.open(filename, 'r:xz') as tar:
tar.extractall(tmp)
return Path(tmp)
def _has_valid_repository(self):
"""Whether this NApp has a valid repository or not."""
return all([self.username, self.name, self.repository])
def _update_repo_file(self, destination=None):
"""Create or update the file '.repo' inside NApp package."""
with open("{}/.repo".format(destination), 'w') as repo_file:
repo_file.write(self.repository + '\n')
class KytosNApp(Thread, metaclass=ABCMeta):
"""Base class for any KytosNApp to be developed."""
def __init__(self, controller, **kwargs):
"""Contructor of KytosNapps.
Go through all of the instance methods and selects those that have
the events attribute, then creates a dict containing the event_name
and the list of methods that are responsible for handling such event.
At the end, the setup method is called as a complement of the init
process.
"""
Thread.__init__(self, daemon=False)
self.controller = controller
self.username = None # loaded from json
self.name = None # loaded from json
self.meta = False # loaded from json
self._load_json()
# Force a listener with a private method.
self._listeners = {
'kytos/core.shutdown': [self._shutdown_handler],
'kytos/core.shutdown.' + self.napp_id: [self._shutdown_handler]}
self.__event = Event()
#: int: Seconds to sleep before next call to :meth:`execute`. If
#: negative, run :meth:`execute` only once.
self.__interval = -1
self.setup()
#: Add non-private methods that listen to events.
handler_methods = [getattr(self, method_name) for method_name in
dir(self) if method_name[0] != '_' and
callable(getattr(self, method_name)) and
hasattr(getattr(self, method_name), 'events')]
# Building the listeners dictionary
for method in handler_methods:
for event_name in method.events:
if event_name not in self._listeners:
self._listeners[event_name] = []
self._listeners[event_name].append(method)
@property
def napp_id(self):
"""username/name string."""
return "{}/{}".format(self.username, self.name)
def listeners(self):
"""Return all listeners registered."""
return list(self._listeners.keys())
def _load_json(self):
"""Update object attributes based on kytos.json."""
current_file = sys.modules[self.__class__.__module__].__file__
json_path = os.path.join(os.path.dirname(current_file), 'kytos.json')
with open(json_path, encoding='utf-8') as data_file:
data = json.loads(data_file.read())
for attribute, value in data.items():
setattr(self, attribute, value)
def execute_as_loop(self, interval):
"""Run :meth:`execute` within a loop. Call this method during setup.
By calling this method, the application does not need to worry about
loop details such as sleeping and stopping the loop when
:meth:`shutdown` is called. Just call this method during :meth:`setup`
and implement :meth:`execute` as a single execution.
Args:
interval (int): Seconds between each call to :meth:`execute`.
"""
self.__interval = interval
def run(self):
"""Call the execute method, looping as needed.
It should not be overriden.
"""
self.notify_loaded()
LOG.info("Running NApp: %s", self)
self.execute()
while self.__interval > 0 and not self._
|
IntegratedAlarmSystem-Group/ias-webserver
|
tickets/migrations/0009_auto_20181105_2039.py
|
Python
|
lgpl-3.0
| 383
| 0.002611
|
# Generated by Django 2.0.8 on 2018
|
-11-05 20:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tickets', '0008_auto_20180730_2035'),
]
operations = [
migrations.AlterM
|
odelOptions(
name='ticket',
options={'default_permissions': ('add', 'change', 'delete', 'view')},
),
]
|
kura-pl/fast_polls
|
polls/urls.py
|
Python
|
mit
| 583
| 0.001715
|
from django.conf.urls imp
|
ort url
from . import views
app_name = 'polls'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^(?P<question_id>[0-9]+)$', views.vote, name='vote'),
url(r'^(?P<question_id>[0-9]+)/results$', views.results, name='results'),
url(r'^add$', views.add, name='add'),
url(r'^check$', views.check, name='check'),
url(r'^search$', views.search, name='search'),
url(r'^searched$', views.searched, name='searched'),
url(r'^random$', views.get_random, name='
|
get_random'),
url(r'^faq$', views.get_faq, name='get_faq'),
]
|
tonyseek/python-stdnum
|
stdnum/hr/__init__.py
|
Python
|
lgpl-2.1
| 870
| 0
|
# __init__.py - collection of Croatian numbers
# coding: utf-8
#
# Copyright (C) 2012 Arthur de Jong
#
# This
|
library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser Gen
|
eral Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of Croatian numbers."""
|
wiheto/teneto
|
test/plot/test_plot.py
|
Python
|
gpl-3.0
| 640
| 0
|
import teneto
import matplotlib.pyplot as plt
def test_sliceplot():
G = teneto.generatenetwork.rand_binomial([4, 2], 0.5, 'graphlet', 'wu')
fig, ax = plt.subplots(1)
ax = teneto.plot.slice_plot(G, ax)
plt.close(fig)
def test_circleplot():
G = teneto.generatenetwork.rand_binomial([4, 2], 0.5,
|
'graphlet', 'wd')
fig, ax = plt.subplots(1)
ax = teneto.plot.circle_plot(G.mean(axis=-1), ax)
plt.close(fig)
def test_stackplot(
|
):
G = teneto.generatenetwork.rand_binomial([4, 2], 0.5, 'contact', 'wd')
fig, ax = plt.subplots(1)
ax = teneto.plot.graphlet_stack_plot(G, ax, q=1)
plt.close(fig)
|
kbase/narrative
|
kbase-extension/ipython/profile_default/ipython_config.py
|
Python
|
mit
| 20,674
| 0.000532
|
# Configuration file for ipython.
c = get_config() # noqa: F821
c.Completer.use_jedi = False
# ------------------------------------------------------------------------------
# InteractiveShellApp configuration
# ------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.InteractiveShellApp.exec_PYTHONSTARTUP = True
# lines of code to run at IPython startup.
c.InteractiveShellApp.exec_lines = [
"import biokbase.narrative.magics",
"from biokbase.narrative.services import *",
"from biokbase.narrative.widgetmanager import WidgetManager",
"from biokbase.narrative.jobs import *",
]
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.InteractiveShellApp.gui = None
# Reraise exceptions encountered loading IPython extensions?
# c.InteractiveShellApp.reraise_ipython_extension_failures = False
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = ''
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.InteractiveShellApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
# ------------------------------------------------------------------------------
# TerminalIPythonApp configuration
# ------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.TerminalIPythonApp.exec_PYTHONSTARTUP = True
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = u''
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.TerminalIPythonApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = u'default'
# Configur
|
e matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# If a command or file is given via the command-line, e.g. 'ipython foo.py',
# start an interactive shell after executing the file or command.
# c.TerminalIPythonApp.force_interact = False
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names int
|
o the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.TerminalIPythonApp.ipython_dir = u''
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'osx',
# 'pyglet', 'qt', 'qt5', 'tk', 'wx').
# c.TerminalIPythonApp.gui = None
# Reraise exceptions encountered loading IPython extensions?
# c.TerminalIPythonApp.reraise_ipython_extension_failures = False
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# ------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
# ------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.TerminalInteractiveShell.colors = 'LightBG'
# If True, anything that would be passed to the pager will be displayed as
# regular output instead.
# c.TerminalInteractiveShell.display_page = False
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.TerminalI
|
hatbot-team/hatbot_resources
|
preparation/lang_utils/__init__.py
|
Python
|
mit
| 74
| 0.013514
|
__author__ = 'moskupols'
|
__all__ = ['morphology', 'cognates', 'pymys
|
tem']
|
JianmingXia/StudyTest
|
KnowledgeQuizTool/MillionHeroes/baiduSearch/get.py
|
Python
|
mit
| 277
| 0
|
import requests
__url = 'http://www.baidu.com/s?wd=' # 搜索请求网址
def page(word):
r = requests.get(__url + word)
if r.status_code == 200: # 请求错误(不是200)
|
处理
return r.text
else:
print(r.status_code)
|
return False
|
calandryll/transcriptome
|
scripts/old/quality_control3.py
|
Python
|
gpl-2.0
| 1,659
| 0.011453
|
#!/usr/bin/python -tt
# Updated version of clipping adapters from sequences
# Used cutadapt on combined sequences and removes first 13 bases with fastx_clipper
# Website: https://code.google.com/p/cutadapt/
# Updated on: 09/26/2013
# Import OS features to run external programs
import os
import glob
# Directories for input and output
input_dir = "/home/chris/transcriptome/fastq/qc/fastx/"
output_dir = "/home/chris/transcriptome/fastq/qc/fastx/"
fastq_orig = sorted(glob.glob1(input_dir, "*.fastq"))
orig = len(list(fastq_orig))
print "Input Directory: %s" % (input_dir)
print "Output Directory: %s" % (output_dir)
print "Scanning Input Directory..."
print "Found %s fastq files..." % (orig)
print "Fastq files: %s" % (fastq_orig)
for files in range(orig):
print "Analyzing %s..." % (fastq_orig[files])
fastqfile_in = input_dir + fastq_orig[files]
sam
|
ple_name = os.path.splitext(os.path.basename(fastq_orig[files]))[0]
# Remove adapters from sequences and keep score above 30, with a min length of 51
# Any other sequences will be discarded. This may be modified in future to see what the impact
# of removal of lower sequences yields.
fastq_tmp = output_dir + fastq_orig[files] + '_temp.fastq'
log_out = output_dir + "logs/" + fastq_orig[files] + '.log'
fastq_out = output_dir + sample_na
|
me + "_filtered.fastq"
print "Running quality filter of 20 score, 100%..."
os.system("fastq_quality_filter -v -Q 32 -q 20 -p 100 -i %s -o %s >> %s" % (fastqfile_in, fastq_tmp, log_out))
print "Removing artifacts..."
os.system("fastx_artifacts_filter -v -Q 32 -i %s -o %s >> %s" % (fastq_tmp, fastq_out, log_out))
os.system("rm %s" % (fastq_tmp))
|
VectorBlox/PYNQ
|
python/pynq/iop/tests/test__iop.py
|
Python
|
bsd-3-clause
| 3,522
| 0.012493
|
# Copyright (c) 2016, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED W
|
ARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = "Giuseppe Natale, Yun Rock Qu"
__copyright__ = "Copyright 2016, Xilinx"
__email__ = "pynq_support@xilinx.com"
import pytest
from pynq import Overlay
from pynq.iop import request_iop
global ol
ol = Overlay("base.bit")
@pytest.mark.run(order=11)
def test_request_iop():
"""Test for the _IOP class and the method request_iop().
Test whether the request_iop() can return an object without errors.
This is a test for case 1 (for more information, please see request_iop).
"""
fixed_id = 1
exception_raised = False
try:
request_iop(fixed_id,'mailbox.bin')
except LookupError:
exception_raised = True
assert not exception_raised, 'request_iop() should not raise exception.'
ol.reset()
@pytest.mark.run(order=12)
def test_request_iop_same():
"""Test for the _IOP class and the method request_iop().
The request_iop() should not raise any exception since the previous IOP
runs the same program.
This is a test for case 1 (for more information, please see request_iop).
"""
fixed_id = 1
exception_raised = False
request_iop(fixed_id,'mailbox.bin')
try:
request_iop(fixed_id,'mailbox.bin')
except LookupError:
exception_raised = True
assert not exception_raised, 'request_iop() should not raise exception.'
ol.reset()
@pytest.mark.run(order=13)
def test_request_iop_conflict():
"""Test for the _IOP class and the method request_iop().
Creates multiple IOP instances on the same fixed ID. Tests whether
request_iop() correctly raises a LookupError exception.
This is a test for case 2 (for more information, please see request_iop).
"""
fixed_id = 1
request_iop(fixed_id,'pmod_adc.bin')
pytest.raises(LookupError, request_iop, fixed_id, 'pmod_dac.bin')
ol.reset()
|
flacjacket/qtile
|
libqtile/scripts/qtile_cmd.py
|
Python
|
mit
| 6,464
| 0.000464
|
#!/usr/bin/env python
#
# Copyright (c) 2017, Piotr Przymus
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Command-line tool to expose qtile.command functionality to shell.
This can be used standalone or in other shell scripts.
"""
import pprint
import argparse
from libqtile.command import Client
from libqtile.command import CommandError, CommandException
def get_formated_info(obj, cmd, args=True, short=True):
"""
Get documentation for command/function and format it.
Returns:
* args=True, short=False - (function args) and a summary line.
* args=True, short=True - '*' if arguments are present and a summary line.
* args=False - a summary line.
If 'doc' function is not present in object or there is no doc string for given cmd it returns empty string.
The arguments are extracted from doc[0] line, the summary is constructed from doc[1] line.
"""
doc_func = obj.doc if hasattr(obj, "doc") else lambda x: ""
doc = doc_func(cmd).splitlines()
doc_args = ""
if doc:
short_description = doc[1] if len(doc) > 1 else ""
tdoc = doc[0]
doc_args = tdoc[tdoc.find("(") + 1:tdoc.find(")")].strip()
if doc_args: # return formatted args
doc_args = "({})".format(doc_args)
if args is False:
doc_args = ""
elif args and short:
doc_args = "*" if len(doc_args) > 1 else " "
return (doc_args + " " + short_description).rstrip()
def print_commands(prefix, obj):
"Print available commands for given object."
prefix += " -f "
output = []
max_cmd = 0 # max len of cmd for formatting
try:
cmds = obj.commands()
except AttributeError:
print("error: Sorry no commands in ", prefix)
exit()
except CommandError:
print("error: Sorry no such object ", prefix)
|
exit()
for cmd in cmds:
doc_
|
args = get_formated_info(obj, cmd)
pcmd = prefix + cmd
max_cmd = max(len(pcmd), max_cmd)
output.append([pcmd, doc_args])
# Print formatted output
formating = "{:<%d}\t{}" % (max_cmd + 1)
for line in output:
print(formating.format(line[0], line[1]))
def get_object(argv):
"""
Constructs a path to object and returns given object (if it exists).
"""
client = Client()
obj = client
if argv[0] == "cmd":
argv = argv[1:]
# Generate full obj specification
for arg in argv:
try:
obj = obj[arg] # check if it is an item
except KeyError:
try:
obj = getattr(obj, arg) # check it it is an attr
except AttributeError:
print("Specified object does not exist " + " ".join(argv))
exit()
return obj
def run_function(obj, funcname, args):
"Run command with specified args on given object."
try:
func = getattr(obj, funcname)
except AttributeError:
print("error: Sorry no function ", funcname)
exit()
try:
ret = func(*args)
except CommandError:
print("error: Sorry command '{}' cannot be found".format(funcname))
exit()
except CommandException:
print("error: Sorry cannot run function '{}' with arguments {}"
.format(funcname, args))
exit()
return ret
def print_base_objects():
"Prints access objects of Client, use cmd for commands."
actions = ["-o cmd", "-o window", "-o layout", "-o group", "-o bar"]
print("\n".join(actions))
def main():
"Runs tool according to specified arguments."
description = 'Simple tool to expose qtile.command functionality to shell.'
epilog = '''\
Examples:\n\
qtile-cmd\n\
qtile-cmd -o cmd\n\
qtile-cmd -o cmd -f prev_layout -i\n\
qtile-cmd -o cmd -f prev_layout -a 3 # prev_layout on group 3\n\
qtile-cmd -o group 3 -f focus_back\n
'''
fmt = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(description=description, epilog=epilog,
formatter_class=fmt)
parser.add_argument('--object', '-o', dest='obj_spec', nargs='+',
help='''Specify path to object (space separated).\
If no --function flag display available commands.''')
parser.add_argument('--function', '-f', dest='function', nargs=1,
default="help", help='Select function to execute.')
parser.add_argument('--args', '-a', dest='args', nargs='+',
default=[], help='Set arguments supplied to function.')
parser.add_argument('--info', '-i', dest='info', action='store_true',
help='''With both --object and --function args prints\
documentation for function.''')
args = parser.parse_args()
if args.obj_spec:
obj = get_object(args.obj_spec)
if args.function == "help":
print_commands("-o " + " ".join(args.obj_spec), obj)
elif args.info:
print(get_formated_info(obj, args.function[0],
args=True, short=False))
else:
ret = run_function(obj, args.function[0], args.args)
if ret is not None:
pprint.pprint(ret)
else:
print_commands("-o " + " ".join(args.obj_spec), obj)
else:
print_base_objects()
if __name__ == "__main__":
main()
|
daniestevez/gr-satellites
|
python/ccsds/telemetry_packet_reconstruction.py
|
Python
|
gpl-3.0
| 2,817
| 0.00284
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2019 Athanasios Theocharis <athatheoc@gmail.com>
# This was made under ESA Summer of Code in Space 2019
# by Athanasios Theocharis, mentored by Daniel Estevez
#
# This file is part of gr-satellites
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
from gnuradio import gr
import pmt
from . import telemetry
from . import space_packet
import array
class telemetry_packet_reconstruction(gr.basic_block):
"""
docstring for block telemetry_packet_reconstruction
"""
def __init__(self):
gr.basic_block.__init__(self,
name="telemetry_packet_reconstruction",
in_sig=[],
out_sig=[])
self.space_packet = []
self.length_of_space_packet = 0
##################################################
# Blocks
##################################################
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
self.message_port_register_out(pmt.intern('out'))
def handle_msg(self, msg_pmt):
msg = pmt.cdr
|
(msg_pmt)
if not pmt.is_u8vector(msg):
print("[ERROR] Received invalid message type. Expected u8vector")
return
packet = bytearray(pmt.u8vector_elements(msg))
size = len(packet) - 6
try:
header = telemetry.PrimaryHeader.parse(packet[:])
|
if header.ocf_flag == 1:
size -= 4
except:
print("Could not decode telemetry packet")
return
parsed = telemetry.FullPacket.parse(packet[:], size=size)
payload = parsed.payload
#The number 6 is used here, because that's the length of the Primary Header.
#todo: Add a variable for this
while len(payload) != 0:
if len(self.space_packet) < 6:
left = 6 - len(self.space_packet)
self.space_packet.extend(payload[:left])
payload = payload[left:]
if len(self.space_packet) >= 6:
self.length_of_space_packet = space_packet.PrimaryHeader.parse(bytearray(self.space_packet)).data_length
left = self.length_of_space_packet + 6 - len(self.space_packet)
self.space_packet.extend(payload[:left])
payload = payload[left:]
if 6 + self.length_of_space_packet == len(self.space_packet):
self.sendPacket()
def sendPacket(self):
packet = self.space_packet
packet = array.array('B', packet[:])
packet = pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(packet), packet))
self.message_port_pub(pmt.intern('out'), packet)
self.length_of_space_packet = 0
self.space_packet = []
|
japsu/django-selectreverse
|
selectreverse/tests/models.py
|
Python
|
bsd-2-clause
| 1,338
| 0.008969
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from selectreverse.utils import ReverseManager
class Building(models.Model):
number = models.IntegerField()
owners = models.ManyToManyField('Owner')
objects = models.Manager()
reversemanager = ReverseManager({'apartments': 'apartment_set', 'parkings': 'parking_set', 'xowners': 'owners'})
class Apartment(models.Model):
number = models.IntegerField()
building = models.ForeignKey(Building)
def __unicode__(self):
return u'%s' % self.number
class Parking(models.Model):
number = models.IntegerField()
building = models.ForeignKey(Building)
class Owner(models.Model):
name = models.CharField(max_length = 50)
objects = models.Manager()
reversemanager = ReverseManager({'buildings': 'building_set'})
class TaggedItem(models.Model):
tag = models.SlugField()
content_type = models.Fo
|
reignKey(ContentType)
object_id =
|
models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
class Bookmark(models.Model):
url = models.URLField()
tags = generic.GenericRelation(TaggedItem)
objects = models.Manager()
reversemanager = ReverseManager({'gtags': 'tags'})
|
telefonicaid/fiware-sdc
|
test/acceptance/e2e/uninstall_product/feature/terrain.py
|
Python
|
apache-2.0
| 3,017
| 0.002653
|
# -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from lettuce import world, before, after
from commons.terrain_steps import setup_feature, setup_scenario, setup_outline, tear_down
from commons.provisioning_steps impor
|
t ProvisioningSteps
from commons.rest_utils import RestUtils
from commons.con
|
figuration import CONFIG_VM_HOSTNAME
from commons.fabric_utils import execute_chef_client, execute_puppet_agent, remove_chef_client_cert_file, \
execute_chef_client_stop, execute_puppet_agent_stop, remove_puppet_agent_cert_file, remove_all_generated_test_files, \
remove_puppet_agent_catalog
provisioning_steps = ProvisioningSteps()
rest_utils = RestUtils()
@before.each_feature
def before_each_feature(feature):
"""
Hook: Will be executed before each feature. Configures global vars and gets token from keystone.
Launch agents (puppet and chef) in the target VM
"""
setup_feature(feature)
@before.each_scenario
def before_each_scenario(scenario):
"""
Hook: Will be executed before each Scenario.
Setup Scenario: initialize World vars and launch agents (puppet and chef) in the target VM
"""
setup_scenario(scenario)
execute_chef_client()
execute_puppet_agent()
@before.outline
def before_outline(param1, param2, param3, param4):
""" Hook: Will be executed before each Scenario Outline. Same behaviour as 'before_each_scenario'"""
setup_outline(param1, param2, param3, param4)
remove_all_generated_test_files()
remove_puppet_agent_catalog()
@after.each_scenario
def after_each_scenario(scenario):
"""
Hook: Will be executed after all each scenario
Removes Feature data and cleans the system. Kills all agents running in the VM.
"""
execute_chef_client_stop()
execute_puppet_agent_stop()
remove_chef_client_cert_file()
remove_puppet_agent_cert_file()
remove_all_generated_test_files()
remove_puppet_agent_catalog()
rest_utils.delete_node(world.headers, world.tenant_id, CONFIG_VM_HOSTNAME)
@after.all
def after_all(scenario):
"""
Hook: Will be executed after all each scenario
Removes Feature data and cleans the system. Kills all agents running in the VM.
"""
after_each_scenario(scenario)
tear_down(scenario)
|
mediatum/mediatum
|
core/metatype.py
|
Python
|
gpl-3.0
| 13,830
| 0.000723
|
"""
mediatum - a multimedia content repository
Copyright (C) 2007 Arne Seifert <seiferta@in.tum.de>
Copyright (C) 2007 Matthias Kramm <kramm@in.tum.de>
Copyright (C) 2013 Iryna Feuerstein <feuersti@in.tum.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without
|
even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from warnings import warn
class Context(object):
def __init__(self, field, value="", width=400, name="", lock=0, language=None, collection=None, container=None,
|
user=None, ip=""):
if collection is not None:
warn("collections argument is deprecated, use container", DeprecationWarning)
if container is not None:
raise ValueError("container and collection cannot be used together")
container = collection
self.field = field
self.value = value
self.width = width
self.name = name
self.language = language
self.collection = container
self.container = container
self.ip = ip
self.user = user
self.lock = lock
class Metatype(object):
joiner = '\n'
@classmethod
def get_name4schema(cls):
name = cls.__name__
return name[2:] if name.startswith("m_") else name
def getEditorHTML(self, field, value="", width=400, lock=0, language=None, required=None):
return ""
def getSearchHTML(self, context):
None
def getFormattedValue(self, metafield, maskitem, mask, node, language, html):
None
def format_request_value_for_db(self, field, params, item, language=None):
"""Prepare value for the database from update request params.
:param field: associated field
:param params: dict which contains POST form values
:param item: field name prepended with language specifier. Is the same as field name for non-multilingual fields.
"""
# just fetch the unmodified alue from the params dict
return params.get(item)
def getMaskEditorHTML(self, field, metadatatype=None, language=None):
return ""
@classmethod
def isContainer(cls):
return False
def isFieldType(self):
return True
def getName(self):
return ""
def getInformation(self):
return {"moduleversion": "1.0"}
''' events '''
def event_metafield_changed(self, node, field):
None
def get_input_pattern(self, field):
return ''
def get_input_title(self, field):
return ''
def get_input_placeholder(self, field):
return ''
def is_required(self, required):
"""
It's necessary to return different types in order for the template to render properly.
Since required='' or even required='False' is still interpreted as a required field,
it needs to be completely removed from the template where applicable. TAL attributes
are removed if they evaluate to None.
@param required: 0 or 1
@return: str True or None object
"""
if required:
return 'True'
else:
return None
charmap = [
[' ', '160', 'no-break space'],
['&', '38', 'ampersand'],
['"', '34', 'quotation mark'],
# finance
['¢', '162', 'cent sign'],
['€', '8364', 'euro sign'],
['£', '163', 'pound sign'],
['¥', '165', 'yen sign'],
# signs
['©', '169', 'copyright sign'],
['®', '174', 'registered sign'],
['™', '8482', 'trade mark sign'],
['‰', '8240', 'per mille sign'],
['µ', '181', 'micro sign'],
['·', '183', 'middle dot'],
['•', '8226', 'bullet'],
['…', '8230', 'three dot leader'],
['′', '8242', 'minutes / feet'],
['″', '8243', 'seconds / inches'],
['§', '167', 'section sign'],
['¶', '182', 'paragraph sign'],
['ß', '223', 'sharp s / ess-zed'],
# quotations
['‹', '8249', 'single left-pointing angle quotation mark'],
['›', '8250', 'single right-pointing angle quotation mark'],
['«', '171', 'left pointing guillemet'],
['»', '187', 'right pointing guillemet'],
['‘', '8216', 'left single quotation mark'],
['’', '8217', 'right single quotation mark'],
['“', '8220', 'left double quotation mark'],
['”', '8221', 'right double quotation mark'],
['‚', '8218', 'single low-9 quotation mark'],
['„', '8222', 'double low-9 quotation mark'],
['<', '60', 'less-than sign'],
['>', '62', 'greater-than sign'],
['≤', '8804', 'less-than or equal to'],
['≥', '8805', 'greater-than or equal to'],
['–', '8211', 'en dash'],
['—', '8212', 'em dash'],
['¯', '175', 'macron'],
['‾', '8254', 'overline'],
['¤', '164', 'currency sign'],
['¦', '166', 'broken bar'],
['¨', '168', 'diaeresis'],
['¡', '161', 'inverted exclamation mark'],
['¿', '191', 'turned question mark'],
['ˆ', '710', 'circumflex accent'],
['˜', '732', 'small tilde'],
['°', '176', 'degree sign'],
['−', '8722', 'minus sign'],
['±', '177', 'plus-minus sign'],
['÷', '247', 'division sign'],
['⁄', '8260', 'fraction slash'],
['×', '215', 'multiplication sign'],
['¹', '185', 'superscript one'],
['²', '178', 'superscript two'],
['³', '179', 'superscript three'],
['¼', '188', 'fraction one quarter'],
['½', '189', 'fraction one half'],
['¾', '190', 'fraction three quarters'],
# math / logical
['ƒ', '402', 'function / florin'],
['∫', '8747', 'integral'],
['∑', '8721', 'n-ary sumation'],
['∞', '8734', 'infinity'],
['√', '8730', 'square root'],
['∼', '8764', 'similar to'],
['≅', '8773', 'approximately equal to'],
['≈', '8776', 'almost equal to'],
['≠', '8800', 'not equal to'],
['≡', '8801', 'identical to'],
['∈', '8712', 'element of'],
['∉', '8713', 'not an element of'],
['∋', '8715', 'contains as member'],
['∏', '8719', 'n-ary product'],
['∧', '8743', 'logical and'],
['∨', '8744', 'logical or'],
['¬', '172', 'not sign'],
['∩', '8745', 'intersection'],
['∪', '8746', 'union'],
['∂', '8706', 'partial differential'],
['∀', '8704', 'for all'],
['∃', '8707', 'there exists'],
['∅', '8709', 'diameter'],
['∇', '8711', 'backward difference'],
['∗', '8727', 'asterisk operator'],
['∝', '8733', 'proportional to'],
['∠', '8736', 'angle'],
# undefined
['´', '180', 'acute accent'],
['¸', '184', 'cedilla'],
['ª', '170', 'feminine ordinal indicator'],
['º', '186', 'masculine ordinal indicator'],
['†', '8224', 'dagger'],
['‡', '8225', 'double dagger'],
# alphabetical special chars
['À', '192', 'A - grave'],
['Á', '193', 'A - acute'],
['Â', '194', 'A - circumflex'],
['Ã', '195', 'A - tilde'],
['Ä', '196', 'A - diaeresis'],
['Å', '197', 'A - ring above'],
['Æ', '198', 'ligature AE'],
['Ç', '199', 'C - cedilla'],
['È', '200', 'E - grave'],
['É', '201', 'E - acute'],
['Ê', '202', 'E - circumflex'],
['Ë', '203', 'E - diaeresis'],
['Ì', '204', 'I - grave'],
['Í', '20
|
parpg/parpg
|
parpg/behaviours/npc.py
|
Python
|
gpl-3.0
| 4,169
| 0.002639
|
# This file is part of PARPG.
# PARPG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# PARPG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PARPG. If not, see <http://www.gnu.org/licenses/>.
from random import randrange
from fife import fife
import base
from moving import MovingAgentBehaviour
class NPCBehaviour(MovingAgentBehaviour):
"""This is a basic NPC behaviour"""
def __init__(self, parent=None):
super(NPCBehaviour, self).__init__()
self.parent = parent
self.state = base._AGENT_STATE_NONE
self.pc = None
self.target_loc = None
# hard code these for now
self.distRange = (2, 4)
# these are parameters to lower the rate of wandering
# wander rate is the number of "IDLEs" before a wander step
# this could be set for individual NPCs at load time
# or thrown out altogether.
# HACK: 09.Oct.2011 Beliar
# I increased the wander rate to 900 since the idle method
# gets called way more often now.
self.wanderCounter = 0
self.wanderRate = 9
def getTargetLocation(self):
"""@rtype: fife.Location
@return: NPC's position"""
x = self.getX()
y = self.getY()
if self.state == base._AGENT_STATE_WANDER:
""" Random Target Location """
l = [0, 0]
for i in range(len(l)):
sign = randrange(0, 2)
dist = randrange(self.distRange[0], self.distRange[1])
if sign == 0:
dist *= -1
l[i] = dist
x += l[0]
y += l[1]
# Random walk is
# rl = randint(-1, 1);ud = randint(-1, 1);x += rl;y += ud
l = fife.Location(self.agent.getLocation())
l.setLayerCoordinates(fife.ModelCoordinate(x, y))
return l
def onInstanceActionFinished(self, instance, action):
"""What the NPC does when it has finished an action.
Called by the engine and required for InstanceActionListeners.
@type instance: fife.Instance
@param instance: self.agent
@type action: ???
@param action: ???
@return: None"""
if self.state == base._AGENT_STATE_WANDER:
self.target_loc = self.getTargetLocation()
MovingAgentBehaviour.onInstanceActionFinished(self, instance, action)
def idle(self):
"""Controls the NPC when it is idling. Different actions
based on the NPC's state.
@return: None"""
if self.state == base._AGENT_STATE_NONE:
self.state = bas
|
e._AGENT_STATE_IDLE
self.animate('stand')
elif self.state == base._AGENT_STATE_IDLE:
if self.wanderCounter > self.wanderRate:
self.wanderCounter = 0
self.state = base._AGENT_STATE_WANDER
else:
self.wanderCounter += 1
self.state = base._AGENT_STATE_NONE
self.target_loc = self.getTargetLocation()
self.animate('stand')
elif self.state == base._AGENT_STAT
|
E_WANDER:
self.wander(self.target_loc)
self.state = base._AGENT_STATE_NONE
elif self.state == base._AGENT_STATE_TALK:
self.animate('stand', self.pc.getLocation())
def wander(self, location):
"""Nice slow movement for random walking.
@type location: fife.Location
@param location: Where the NPC will walk to.
@return: None"""
self.agent.move('walk', location, self.speed)
coords = location.getMapCoordinates()
|
luosch/leetcode
|
python/Next Permutation.py
|
Python
|
mit
| 459
| 0.002179
|
class Solution(object):
def nextPermutation(self, nums):
cursor = -1
for i in range(len(nums) - 1, 0, -1):
if nums[i - 1] < nums[i]:
curso
|
r = i - 1
break
for i in range(len(nums) - 1, -1, -1):
if nums[i] > nums[cursor]:
nums[i], nums[cursor] = nums[cursor], nums[i]
|
nums[cursor + 1:] = sorted(nums[cursor + 1:])
break
|
MakarenaLabs/Orator-Google-App-Engine
|
orator/schema/grammars/__init__.py
|
Python
|
mit
| 206
| 0
|
# -*- coding: utf-8 -*-
from .grammar import SchemaGrammar
from .sq
|
lite_grammar import SQLiteSchemaGrammar
from .postgres_grammar import PostgresSchemaGrammar
from .mysql_grammar import MySql
|
SchemaGrammar
|
leftaroundabout/pyunicorn
|
tests/test_core/TestResitiveNetwork-circuits.py
|
Python
|
bsd-3-clause
| 6,379
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 SWIPO Project
#
# Authors (this file):
# Stefan Schinkel <stefan.sch
|
inkel@gmail.com>
"""
Provides sanity checks for basic for parallel and serial circiuts.
"""
import numpy as np
import networkx as nx
from pyunicorn import ResNetwork
from .ResistiveNetwork_utils import *
debug = 0
""" Test for basic sanity, pa
|
rallel and serial circiuts
"""
def testParallelTrivial():
r""" Trivial parallel case:
a) 0 --- 1 --- 2
/---- 3 ---\
b) 0 --- 1 --- 2
c) /---- 3 ---\
0 --- 1 --- 2
\____ 4 ___/
ER(a) = 2*ER(b) = 3*ER(c)
"""
nws = []
# construct nw1
idI, idJ = [0, 1], [1, 2]
nws.append(makeNW(idI, idJ, [.1]))
# construct nw2
idI += [0, 3]
idJ += [3, 2]
nws.append(makeNW(idI, idJ, [.1]))
# nw3
idI += [0, 4]
idJ += [4, 2]
nws.append(makeNW(idI, idJ, [.1]))
ER = []
for nw in nws:
rnw = ResNetwork(nw)
ER.append(rnw.effective_resistance(0, 2))
assert abs(ER[0]/2-ER[1]) < .1E-6
assert abs(ER[0]/3-ER[2]) < .1E-6
def testParallelLessTrivial():
""" Less Trivial Parallel Case:
|--- 1 --- 0
a) 2 |
|--- 3 ----4
|--- 1 --- 0 --- 5 --- |
b) 2 | | 7
|--- 3 ----4 --- 6 --- |
|---- 8 ----------- |
| | |
| |----------| |
| | |
|--- 1 --- 0 --- 5 --- | | |
c) 2 | | 7 | 9
|--- 3 ----4 --- 6 --- | | |
| | |
| ----------| |
| | |
|---- 10 -----------|
"""
nws = []
idI = [0, 1, 1, 2, 3]
idJ = [1, 2, 3, 3, 4]
nws.append(makeNW(idI, idJ, [1]*len(idI)))
idI.extend([0, 5, 5, 6, 6])
idJ.extend([5, 6, 7, 7, 4])
nws.append(makeNW(idI, idJ, [1]*len(idI)))
idI.extend([0, 8, 8, 9, 10])
idJ.extend([8, 9, 10, 10, 4])
nws.append(makeNW(idI, idJ, [1]*len(idI)))
ER = []
Gs = []
for nw in nws:
rnw = ResNetwork(nw)
ER.append(rnw.effective_resistance(0, 4))
# Gs.append(nx.DiGraph(nw))
# # showGraphs(Gs)
# # s = ''
# # for i,e in enumerate(ER):
# # s = s + "NW{:d} {:.3f}\t".format(i,e)
# # print "Effective resistances (0,2)\n %s" % (s)
assert abs(ER[0]/2-ER[1]) < .1E-6
assert abs(ER[0]/3-ER[2]) < .1E-6
# """ Less Trivial Parallel Case:
# /--- 1 --- 0
# a) 2 |
# \--- 3 ----4
# /--- 1 --- 0 --- 5 --- \
# b) 2 | | 7
# \--- 3 ----4 --- 6 --- /
# / --- 8 ----------- \
# | \
# /--- 1 --- 0 --- 5 --- \ \
# c) 2 7 9
# \--- 3 ----4 --- 6 --- / /
# | /
# \ --- 10 -----------/
# """
# nws =[]
# #construct nw1
# idI = [0,1,1,2,3]
# idJ = [1,2,3,3,4]
# val = [.1] * 5
# nws.append(makeNW(idI,idJ,[.1]*len(idI))[0])
# idI.extend([0,5,6,7])
# idJ.extend([5,6,7,4])
# val.extend( val * 6)
# nws.append(makeNW(idI,idJ,[.1]*len(idI))[0])
# idI.extend([0,8,9,10])
# idJ.extend([8,9,10,4])
# val.extend( val * 4)
# nws.append(makeNW(idI,idJ,val)[0])
# ER = []
# for nw in nws:
# rnw = ResNetwork(nw)
# ER.append( rnw.effective_resistance(0,4))
# s = ''
# for i,e in enumerate(ER):
# s = s + "NW{:d} {:.3f}\t".format(i,e)
# print "Effective resistances (0,2)\n %s" % (s)
# assert abs(ER[0]/2-ER[1]) < .1E-6
# assert abs(ER[0]/3-ER[2]) < .1E-6
def testParallelRandom():
""" 50 random parallel cases
"""
N = 10
p = .7
runs = 0
while runs < 50:
G = nx.fast_gnp_random_graph(N, p)
a = 0
b = G.number_of_nodes()-1
try:
nx.shortest_path(G, source=a, target=b)
except RuntimeError:
continue
i, j = [], []
for xx in G.edges():
i.append(xx[0])
j.append(xx[1])
# %.1f values for resistance
val = np.round(np.random.ranf(len(i))*100)/10
# and test
nw1 = makeNW(i, j, val)
nw2 = parallelCopy(nw1, a, b)
ER1 = ResNetwork(nw1).effective_resistance(a, b)
ER2 = ResNetwork(nw2).effective_resistance(a, b)
# assertion
assert (ER1/2-ER2) < 1E-6
# increment runs
runs += 1
def testSerialTrivial():
"""Trivial serial test case
a) 0 --- 1 --- 2
b) 0 --- 1 --- 2 --- 3 --- 4
ER(a)/2 = ER(b)
"""
# construct nw1
idI = [0, 1]
idJ = [1, 2]
val = [1, 1]
nw1 = np.zeros((3, 3))
G1 = nx.DiGraph()
for i, j, v in zip(idI, idJ, val):
nw1[i, j] = v
nw1[j, i] = v
# construct nw2
idI = idI + [2, 3]
idJ = idJ + [3, 4]
val = val + [1, 1]
nw2 = np.zeros((5, 5))
for i, j, v in zip(idI, idJ, val):
nw2[i, j] = v
nw2[j, i] = v
# init ResNetworks
rnw1 = ResNetwork(nw1)
rnw2 = ResNetwork(nw2)
ER1 = rnw1.effective_resistance(0, 2)
ER2 = rnw2.effective_resistance(0, 4)
print "Effective resistances (0,2)"
print "NW1 %.3f\tNW2 %.3f\t 2*NW1 = %.3f" % (ER1, ER2, 2*ER1)
assert (ER1*2-ER2) < 1E-6
def testSerialRandom():
""" 50 Random serial test cases
"""
N = 10
p = .7
runs = 0
while runs < 50:
# a random graph
G = nx.fast_gnp_random_graph(N, p)
try:
nx.shortest_path(G, source=0, target=N-1)
except RuntimeError:
continue
# convert to plain ndarray
nw1 = nx2nw(G)
# copy and join network
nw2 = serialCopy(nw1)
# compute effective resistance
ER1 = ResNetwork(
nw1, silence_level=3).effective_resistance(0, len(nw1)-1)
ER2 = ResNetwork(
nw2, silence_level=3).effective_resistance(0, len(nw2)-1)
# increment runs
runs += 1
# assertion
print ER1*2-ER2
assert (ER1*2-ER2) < 1E-6
|
thiagoald/hardmob_information_extractor
|
sources/generate_sentiments.py
|
Python
|
mit
| 1,731
| 0.006932
|
import requests
import json
NL_KEY = '*'
TL_KEY = '*'
def translate(text):
tmp_payload = {"q": text, "target": "en"}
s = requests.post('https://translation.googleapis.com/language/translate/v2?key=' + TL_KEY, json=tmp_payload)
data = s.json()['data']['translations'][0]
return data['translatedText']
def sentiment(text):
payload = {"encodingType": "UTF8", "document": {"type": "PLAIN_TEXT", "content": text}}
r = requests.post('https://language.googleapis.com/v1/documents:analyzeSentiment?key=' + NL_KEY,
json=payload)
return r.json()
def generate_sentiments():
with open('resources/new_output.js
|
on') as json_data:
objs = json.load(json_data)
newobjs = []
amt = len(objs)
for obj in objs:
comments = obj['comments']
scores = []
tot_score = 0
print "for " + obj['hardmob_link'] + ":"
try:
for comment in comments:
traducao = translate(comment)
sent = sentiment(traducao)
score = sent['documentSent
|
iment']['score']
scores.append(score)
tot_score += score
obj['scores'] = scores
obj['avg_score'] = tot_score/int(len(scores))
newobjs.append(obj)
except:
print "error found"
print "remaining: " + str(amt)
amt -= 1
return newobjs
def main():
newjson = generate_sentiments()
with open('resources/scored_info.json', 'w') as json_file:
json.dump(newjson, json_file, indent=4, sort_keys=True, ensure_ascii=True)
if __name__ == '__main__':
main()
|
gary-pickens/HouseMonitor
|
housemonitor/outputs/zigbee/test/zigbeeoutputstep_test.py
|
Python
|
mit
| 1,738
| 0.021864
|
'''
Created on Mar 8, 2013
@author: Gary
'''
import unittest
from housemonitor.outputs.zigbee.zigbeecontrol import ZigBeeControl
from housemonitor.outputs.zigbee.zigbeeoutputstep import ZigBeeOutputStep
from housemonitor.outputs.zigbee.zigbeeoutputthread import ZigBeeOutputThread
from housemonitor.lib.hmqueue import HMQueue
from housemonitor.lib.constants import Constants
from mock import Moc
|
k, MagicMock, patch
from housemonitor.lib.common import Common
import logging.config
class Test( unittest.TestCase ):
logger = logging.getLogger( 'UnitTest' )
def setUp( self ):
logging.config.fileConfig( "unittest_logging.conf" )
def tearDown( self ):
pass
def test_logger_name( self ):
queue
|
= HMQueue()
zig = ZigBeeOutputStep( queue )
self.assertEqual( Constants.LogKeys.outputsZigBee, zig.logger_name )
def test_topic_name( self ):
queue = HMQueue()
zig = ZigBeeOutputStep( queue )
self.assertEqual( Constants.TopicNames.ZigBeeOutput, zig.topic_name )
def test_step( self ):
value = 5
data = {Constants.DataPacket.device: 'device',
Constants.DataPacket.port: 'port',
Constants.DataPacket.arrival_time: 'arrival_time'}
listeners = ['a', 'b', 'c']
package = {'data': data, 'value': value}
queue = MagicMock( spec=HMQueue )
zig = ZigBeeOutputStep( queue )
v, d, l = zig.step( value, data, listeners )
queue.transmit.assert_called_once()
self.assertEqual( value, v )
self.assertEqual( data, d )
self.assertEqual( listeners, l )
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
tallessa/tallessa-backend
|
tallessa_backend/management/commands/setup.py
|
Python
|
agpl-3.0
| 1,317
| 0.000759
|
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command
from
|
django.core.management.base import BaseCommand
from tallessa.utils import log_get_or_create
# usually you should getLogger(__name__) but we are not under the tallessa namespace right now
logger = logging.getLogger('tallessa')
class Command(BaseCommand):
def handle(self, *args, **options):
management_commands = [
# (('collectstatic',), dict(interactive=False)),
(('migrate',), dict()),
]
if settings.DEBUG:
management_commands
|
.append((('setup_default_team',), dict()))
for pargs, opts in management_commands:
logger.info("** Running: %s", pargs[0])
call_command(*pargs, **opts)
if settings.DEBUG:
user, created = User.objects.get_or_create(
username='mahti',
defaults=dict(
first_name='Markku',
last_name='Mahtinen',
is_staff=True,
is_superuser=True,
),
)
if created:
user.set_password('mahti')
user.save()
log_get_or_create(logger, user, created)
|
henrikau/metadoc
|
client/allocations/entries.py
|
Python
|
gpl-3.0
| 2,825
| 0.004248
|
# -*- coding: utf-8 -*-
#
# allocations/entries.py is part of MetaDoc (Client).
#
# All of MetaDoc is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# MetaDoc is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more d
|
etails.
#
# You should have received a copy of the GNU General Public License
# along with MetaDoc. If not, see <http://www.gnu.org/licenses/>.
#
import metaelement
class AllocationEntry(metaelement.MetaElement):
"""AllocationEntry - Allocation for specific projects. """
xml_tag_name = "all_entry"
def __init__(self, account_nmb, volume, metric, all_class
|
, period):
""" Defines attributes for all_entry XML elements.
@param account_nmb: Account number for allocation.
@type account_nmb: String
@param volume: The amount of parameter metric.
@type volume: String
@param metric: Measurement of parameter volume.
@type metric: String
@param all_class: Allocations class.
@type all_class: String, either "pri" or "nonpri", for prioritized and
non-prioritized allocation.
@param period: Period of allocation.
@type period: String on form "YYYY.P" where P is the year's period.
"""
attributes = {
'account_nmb': account_nmb,
'volume': volume,
'metric': metric,
'all_class': all_class,
'period': period,
}
self.legal_metric = ('hours', 'mb',)
self.legal_all_class = ('pri', 'nonpri',)
super(AllocationEntry, self).__init__(AllocationEntry.xml_tag_name, attributes)
def clean_metric(self, metric):
"""Checks for legal values of metric.
Raises L{IllegalAttributeValueError} on illegal metric value.
@param metric: Metric for allocation
@type metric: String
@return: String
"""
self._clean_allowed_values(metric, self.legal_metric, 'metric', self.xml_tag_name, False)
return metric
def clean_all_class(self, all_class):
"""Checks for legal values of all_class.
Raises L{IllegalAttributeValueError} on illegal all_class value.
@param all_class: Allocation class of allocation
@type all_class: String
@return: String
"""
self._clean_allowed_values(all_class, self.legal_all_class, 'all_class', self.xml_tag_name, False)
return all_class
|
daafgo/Edx_bridge
|
xapi-bridge/__main__.py
|
Python
|
apache-2.0
| 3,433
| 0.034081
|
import sys, os, json, requests, threading
from urlparse import urljoin
from pyinotify import WatchManager, Notifier, EventsCodes, ProcessEvent
import converter, settings
class QueueManager:
'''
Manages the batching and publishing of statements in a thread-safe way.
'''
def __init__(self):
self.cache = []
self.cache_lock = threading.Lock()
self.publish_timer = None
def __del__(self):
self.destroy()
def destroy(self):
if self.publish_timer != None:
self.publish_timer.cancel()
def push(self, stmt):
'''Add a statement to the outgoing queue'''
# push statement to queue
with self.cache_lock:
self.cache.append(stmt)
# set timeout to publish statements
if len(self.cache) == 1 and settings.PUBLISH_MAX_WAIT_TIME > 0:
self.publish_timer = threading.Timer(settings.PUBLISH_MAX_WAIT_TIME, self.publish)
self.publish_timer.start()
# publish immediately if statement threshold is reached
if settings.PUBLISH_MAX_PAYLOAD <= len(self.cache):
self.publish()
def publish(self):
'''Publish the queued statements to the LRS and clear the queue'''
# make sure no new statements are added while publishing
with self.cache_lock:
# push statements to the lrs
url = urljoin(settings.LRS_ENDPOINT, 'statements')
r = requests.post(url, data=json.dumps(self.cache),
auth=(settings.LRS_USERNAME, settings.LRS_PASSWORD),
headers={'X-Experience-API-Version':'1.0.1', 'Content-Type':'application/json'})
print r.text
# clear cache and cancel any pending publish timeouts
self.cache = []
if self.publish_timer != None:
self.publish_timer.cancel()
class TailHandler(ProcessEvent):
'''
Parse incoming log events, convert to xapi, and add to publish queue
'''
MASK = EventsCodes.OP_FLAGS['IN_MODIFY']
def __init__(self, filename):
# prepare file input stream
self.ifp = open(filename, 'r', 1)
self.ifp.seek(0,2)
self.publish_queue = QueueManager()
self.raceBuffer = ''
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.publish_queue.destroy()
def process_IN_MODIFY(self,event):
'''Handles any changes to the log file'''
# read all new contents from the end of the file
buff = self.raceBuffer + self.ifp.read()
# if there's no newline at end of file, we probably read it before edx finished writing
# add read contents to a buffer and return
if len(buff) != 0 and buff[-1] != '\n':
self.raceBuffer = buff
else:
self.raceBuffer = ''
evts = [i for i in buff.split('\n') if len(i) != 0]
|
for e
|
in evts:
try:
evtObj = json.loads(e)
except ValueError as err:
print 'Could not parse JSON for', e
continue
xapi = converter.to_xapi(evtObj)
if xapi != None:
for i in xapi:
self.publish_queue.push(i)
print '{} - {} {} {}'.format(i['timestamp'], i['actor']['name'], i['verb']['display']['en-US'], i['object']['definition']['name']['en-US'])
def watch(watch_file):
'''
Watch the given file for changes
'''
wm = WatchManager()
with TailHandler(watch_file) as th:
notifier = Notifier(wm, th)
wdd = wm.add_watch(watch_file, TailHandler.MASK)
notifier.loop()
# flush queue before exiting
th.publish_queue.publish()
print 'Exiting'
if __name__ == '__main__':
log_path = os.path.abspath(sys.argv[1]) if len(sys.argv) > 1 else '/edx/var/log/tracking.log'
print 'Watching file', log_path
watch(log_path)
|
seiji56/rmaze-2016
|
logic_code/last_ver/sim/herkulex.py
|
Python
|
gpl-3.0
| 23,289
| 0.004122
|
#!/usr/bin/env python2.7
"""
@package: pyHerkulex
@name: herkulex.py
@author: Achu Wilson (achuwilson@gmail.com), Akhil Chandran (akhilchandran.t.r@gmail.com)
@version: 0.1
This is a python library for interfacing the Herkulex range of smart
servo motors manufactured by Dongbu Robotics.
The library was created by Achu Wilson (mailto:achu@sastrarobotics.com)
for the internal projects of Sastra Robotics
This free software is distributed under the GNU General Public License.
See http://www.gnu.org/licenses/gpl.html for details.
For usage of this code for commercial purposes contact Sastra Robotics
India Pvt. Ltd. (mailto:contact@sastrarobotics.com)
"""
import time
try:
# PySerial Module
import serial
except:
raise ImportError("couldnt find pySerial")
# Commands
EEP_WRITE_REQ = 0x01
EEP_READ_REQ = 0x02
RAM_WRITE_REQ = 0x03
RAM_READ_REQ = 0x04
I_JOG_REQ = 0x05
S_JOG_REQ = 0x06
STAT_REQ = 0x07
ROLLBACK_REQ = 0x08
REBOOT_REQ = 0x09
EEP_WRITE_ACK = 0x41
EEP_READ_ACK = 0x42
RAM_WRITE_ACK = 0x43
RAM_READ_ACK = 0x44
I_JOG_ACK = 0x45
S_JOG_ACK = 0x46
STAT_ACK = 0x47
ROLLBACK_ACK = 0x48
REBOOT_ACK = 0x49
#Addresses
MODEL_NO1_EEP = 0
MODEL_NO2_EEP = 1
VERSION1_EEP = 2
VERSION2_EEP = 3
BAUD_RATE_EEP = 4
SERVO_ID_EEP = 6
SERVO_ID_RAM = 0
ACK_POLICY_EEP = 7
ACK_POLICY_RAM = 1
ALARM_LED_POLICY_EEP = 8
ALARM_LED_POLICY_RAM = 2
TORQUE_POLICY_EEP = 9
TORQUE_POLICY_RAM = 3
MAX_TEMP_EEP = 11
MAX_TEMP_RAM = 5
MIN_VOLTAGE_EEP = 12
MIN_VOLTAGE_RAM = 6
MAX_VOLTAGE_EEP = 13
MAX_VOLTAGE_RAM = 7
ACCELERATION_RATIO_EEP = 14
ACCELERATION_RATIO_RAM = 8
MAX_ACCELERATION_TIME_EEP = 15
MAX_ACCELERATION_TIME_RAM = 9
DEAD_ZONE_EEP = 16
DEAD_ZONE_RAM = 10
SATURATOR_OFFSET_EEP = 17
SATURATOR_OFFSET_RAM = 11
SATURATOR_SLOPE_EEP = 18
SATURATOR_SLOPE_RAM = 12
PWM_OFFSET_EEP = 20
PWM_OFFSET_RAM = 14
MIN_PWM_EEP = 21
MIN_PWM_RAM = 15
MAX_PWM_EEP = 22
MAX_PWM_RAM = 16
OVERLOAD_PWM_THRESHOLD_EEP = 24
OVERLOAD_PWM_THRESHOLD_RAM = 18
MIN_POSITION_EEP = 26
MIN_POSITION_RAM = 20
MAX_POSITION_EEP = 28
MAX_POSITION_RAM = 22
POSITION_KP_EEP = 30
POSITION_KP_RAM = 24
POSITION_KD_EEP = 32
POSITION_KD_RAM = 26
POSITION_KI_EEP = 34
POSITION_KI_RAM =28
POSITION_FEEDFORWARD_GAIN1_EEP = 36
POSITION_FEEDFORWARD_GAIN1_RAM = 30
POSITION_FEEDFORWARD_GAIN2_EEP = 38
POSITION_FEEDFORWARD_GAIN2_RAM = 32
VELOCITY_KP_EEP = 40
VELOCITY_KP_RAM = 34
VELOCITY_KI_EEP = 42
VELOCITY_KI_RAM = 36
LED_BLINK_PERIOD_EEP = 44
LED_BLINK_PERIOD_RAM = 38
ADC_FAULT_CHECK_PERIOD_EEP = 45
ADC_FAULT_CHECK_PERIOD_RAM = 39
PACKET_GARBAGE_CHECK_PERIOD_EEP = 46
PACKET_GARBAGE_CHECK_PERIOD_RAM = 40
STOP_DETECTION_PERIOD_EEP = 47
STOP_DETECTION_PERIOD_RAM = 41
OVERLOAD_DETECTION_PERIOD_EEP = 48
OVERLOAD_DETECTION_PERIOD_RAM = 42
STOP_THRESHOLD_EEP = 49
STOP_THRESHOLD_RAM = 43
INPOSITION_MARGIN_EEP = 50
INPOSITION_MARGIN_RAM = 44
CALIBRATION_DIFF_LOW_EEP = 52
CALIBRATION_DIFF_LOW_RAM = 46
CALIBRATION_DIFF_UP_EEP = 53
CALIBRATION_DIFF_UP_RAM = 47
STATUS_ERROR_RAM = 48
STATUS_DETAIL_RAM = 49
AUX1_RAM = 50
TORQUE_CONTROL_RAM = 52
LED_CONTROL_RAM = 53
VOLTAGE_RAM = 54
TEMPERATURE_RAM = 55
CURRENT_CONTROL_MODE_RAM = 56
TICK_RAM = 57
CALIBRATED_POSITION_RAM = 58
ABSOLUTE_POSITION_RAM = 60
DIFFERENTIAL_POSITION_RAM = 62
PWM_RAM = 64
ABSOLUTE_SECOND_POSITION_RAM = 66
ABSOLUTE_GOAL_POSITION_RAM = 68
ABSOLUTE_DESIRED_TRAJECTORY_POSITION = 70
DESIRED_VELOCITY_RAM = 72
BYTE1 = 0x01
BYTE2 = 0x02
BROADCAST_ID = 0xFE
SERPORT = None
def connect(portname, baudrate):
""" Connect to the Herkulex bus
Connect to serial port to which Herkulex Servos are attatched
Args:
portname (str): The serial port name
baudrate (int): The serial port baudrate
Raises:
SerialException: Error occured while opening serial port
"""
global SERPORT
try:
SERPORT = serial.Serial(portname, baudrate, timeout = 0.1)
except:
raise HerkulexError("could not open the serial port")
def close():
""" Close the Serial port
Properly close the serial port before exiting the application
Raises:
SerialException: Error occured while closing serial port
"""
try:
SERPORT.close()
except:
raise HerkulexError("could not close the serial port")
def checksum1(data, stringlength):
""" Calculate Checksum 1
Calculate the ckecksum 1 required for the herkulex data packet
Args:
data (list): the data of which checksum is to be calculated
stringlength (int): the length of the data
Returns:
int: The calculated checksum 1
"""
value_buffer = 0
for count in range(0, stringlength):
value_buffer = value_buffer ^ data[count]
return value_buffer&0xFE
def checksum2(data):
""" Calculate Checksum 2
Calculate the ckecksum 2 required for the herkulex data packet
Args:
data (int): the data of which checksum is to be calculated
Returns:
int: The calculated checksum 2
"""
return (~data)&0xFE
def se
|
nd_data(dat
|
a):
""" Send data to herkulex
Paketize & write the packet to serial port
Args:
data (list): the data to be sent
Raises:
SerialException: Error occured while opening serial port
"""
datalength = len(data)
csm1 = checksum1(data, datalength)
csm2 = checksum2(csm1)
data.insert(0, 0xFF)
data.insert(1, 0xFF)
data.insert(5, csm1)
data.insert(6, csm2)
stringtosend = ""
for i in range(len(data)):
byteformat = '%02X' % data[i]
stringtosend = stringtosend + "\\x" + byteformat
try:
SERPORT.write(stringtosend.decode('string-escape'))
#print stringtosend
except:
raise HerkulexError("could not communicate with motors")
def clear_errors():
""" Clears the errors register of all Herkulex servos
Args:
none
"""
data = []
data.append(0x0B)
data.append(BROADCAST_ID)
data.append(RAM_WRITE_REQ)
data.append(STATUS_ERROR_RAM)
data.append(BYTE2)
data.append(0x00)
data.append(0x00)
send_data(data)
def scale(input_value, input_min, input_max, out_min, out_max):
""" scale a value from one range to another
"""
# Figure out how 'wide' each range is
input_span = input_max - input_min
output_span = out_max - out_min
# Convert the left range into a 0-1 range (float)
valuescaled = float(input_value - input_min) / float(input_span)
# Convert the 0-1 range into a value in the right range.
return out_min + (valuescaled * output_span)
def scan_servos():
"""Scan for the herkulex servos connected
This function will scan for all the herkulex servos connected
to the bus.
Args:
none
Returns:
list: a list of tuples of the form [(id, model)]
"""
servos = []
for servo_id in range(0x00, 0xFE):
model = get_model(servo_id)
if model:
servos += [(servo_id, model)]
return servos
def get_model(servoid):
""" Get the servo model
This function gets the model of the herkules servo, provided its id
Args:
servoid(int): the id of the servo
Returns:
int: an integer corresponding to the model number
0x06 for DRS-602
0x04 for DRS-402
0x02 for DRS-202
"""
data = []
data.append(0x09)
data.append(servoid)
data.append(EEP_READ_REQ)
data.append(MODEL_NO1_EEP)
data.append(BYTE1)
send_data(data)
rxdata = []
try:
rxdata = SERPORT.read(12)
return ord(rxdata[9])&0xFF
except:
raise HerkulexError("could not communicate with motors")
class servo:
""" The servo class
This class handles the interface to the herkulex smart servos
"""
def __init__(self, servoid):
""" servo class initialization
Args:
servoid(int): the id of the servo
"""
self.servoid = servoid
self.servomodel = get_model(servoid)
def get_model(self):
""" Get the servo model
This function gets the model of the herkules servo, provided its id
Args:
none
Returns:
int: an integer corresp
|
zguangyu/rts2
|
python/rts2/target.py
|
Python
|
gpl-2.0
| 1,526
| 0.026212
|
# Library for RTS2 JSON calls.
# (C) 2012 Petr Kubanek, Institute of Physics
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import json
class Target:
def __init__(self,id,name=None):
self.id = id
self.name = name
def reload(self):
"""Load target data from JSON interface."""
if self.id is None:
name = None
return
try:
data = json.getProxy().loadJson('/api/tbyid',{'id':self.id})['d'][0]
self.name = data[1]
except Exception,ex:
self.name = None
def get(name):
"""Return array with targets matching given name or target ID"""
try:
return json.getP
|
roxy().loadJson('/api/tbyid',{'id':int(name)})['d']
except ValueError:
return json.getProxy
|
().loadJson('/api/tbyname',{'n':name})['d']
def create(name,ra,dec):
return json.getProxy().loadJson('/api/create_target', {'tn':name, 'ra':ra, 'dec':dec})['id']
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/skimage/feature/tests/test_peak.py
|
Python
|
gpl-3.0
| 18,709
| 0.001604
|
import numpy as np
import unittest
from skimage._shared.testing import assert_array_almost_equal
from skimage._shared.testing import assert_equal
from scipy import ndimage as ndi
from skimage.feature import peak
np.random.seed(21)
class TestPeakLocalMax():
def test_trivial_case(self):
trivial = np.zeros((25, 25))
peak_indices = peak.peak_local_max(trivial, min_distance=1, indices=True)
assert type(peak_indices) is np.ndarray
assert not peak_indices # inherent boolean-ness of empty list
peaks = peak.peak_local_max(trivial, min_distance=1, indices=False)
assert (peaks.astype(np.bool) == trivial).all()
def test_noisy_peaks(self):
peak_locations = [(7, 7), (7, 13), (13, 7), (13, 13)]
# image with noise of amplitude 0.8 and peaks of amplitude 1
image = 0.8 * np.random.rand(20, 20)
for r, c in peak_locations:
image[r, c] = 1
peaks_detected = peak.peak_local_max(image, min_distance=5)
assert len(peaks_detected) == len(peak_locations)
for loc in peaks_detected:
assert tuple(loc) in peak_locations
def test_relative_threshold(self):
image = np.zeros((5, 5), dtype=np.uint8)
image[1, 1] = 10
image[3, 3] = 20
peaks = peak.peak_local_max(image, min_distance=1, threshold_rel=0.5)
assert len(peaks) == 1
assert_array_almost_equal(peaks, [(3, 3)])
def test_absolute_threshold(self):
image = np.zeros((5, 5), dtype=np.uint8)
image[1, 1] = 10
image[3, 3] = 20
peaks = peak.peak_local_max(image, min_distance=1, threshold_abs=10)
assert len(peaks) == 1
assert_array_almost_equal(peaks, [(3, 3)])
def test_constant_image(self):
image = 128 * np.ones((20, 20), dtype=np.uint8)
peaks = peak.peak_local_max(image, min_distance=1)
assert len(peaks) == 0
def test_flat_peak(self):
image = np.zeros((5, 5), dtype=np.uint8)
image[1:3, 1:3] = 10
peaks = peak.peak_local_max(image, min_distance=1)
asse
|
rt len(peaks) == 4
def test_sorted_peaks(self):
image = np.zeros((5, 5), dtype=np.uint8)
image[1, 1] = 20
image[3, 3] = 10
peaks = peak.peak_local_max(image, min_distance=1)
assert peaks.tolist() == [[3, 3], [1, 1]]
image = np.zeros((3, 10))
image[1, (1, 3, 5, 7)] = (1, 3, 2, 4)
peaks = peak.peak_local_max(image, min_distance=1)
assert peaks.tolist() == [[1, 7], [1, 5], [
|
1, 3], [1, 1]]
def test_num_peaks(self):
image = np.zeros((7, 7), dtype=np.uint8)
image[1, 1] = 10
image[1, 3] = 11
image[1, 5] = 12
image[3, 5] = 8
image[5, 3] = 7
assert len(peak.peak_local_max(image, min_distance=1, threshold_abs=0)) == 5
peaks_limited = peak.peak_local_max(
image, min_distance=1, threshold_abs=0, num_peaks=2)
assert len(peaks_limited) == 2
assert (1, 3) in peaks_limited
assert (1, 5) in peaks_limited
peaks_limited = peak.peak_local_max(
image, min_distance=1, threshold_abs=0, num_peaks=4)
assert len(peaks_limited) == 4
assert (1, 3) in peaks_limited
assert (1, 5) in peaks_limited
assert (1, 1) in peaks_limited
assert (3, 5) in peaks_limited
def test_num_peaks_and_labels(self):
image = np.zeros((7, 7), dtype=np.uint8)
labels = np.zeros((7, 7), dtype=np.uint8) + 20
image[1, 1] = 10
image[1, 3] = 11
image[1, 5] = 12
image[3, 5] = 8
image[5, 3] = 7
peaks_limited = peak.peak_local_max(
image, min_distance=1, threshold_abs=0, labels=labels)
assert len(peaks_limited) == 5
peaks_limited = peak.peak_local_max(
image, min_distance=1, threshold_abs=0, labels=labels, num_peaks=2)
assert len(peaks_limited) == 2
def test_num_peaks_tot_vs_labels_4quadrants(self):
np.random.seed(21)
image = np.random.uniform(size=(20, 30))
i, j = np.mgrid[0:20, 0:30]
labels = 1 + (i >= 10) + (j >= 15) * 2
result = peak.peak_local_max(image, labels=labels,
min_distance=1, threshold_rel=0,
indices=True,
num_peaks=np.inf,
num_peaks_per_label=2)
assert len(result) == 8
result = peak.peak_local_max(image, labels=labels,
min_distance=1, threshold_rel=0,
indices=True,
num_peaks=np.inf,
num_peaks_per_label=1)
assert len(result) == 4
result = peak.peak_local_max(image, labels=labels,
min_distance=1, threshold_rel=0,
indices=True,
num_peaks=2,
num_peaks_per_label=2)
assert len(result) == 2
def test_num_peaks3D(self):
# Issue 1354: the old code only hold for 2D arrays
# and this code would die with IndexError
image = np.zeros((10, 10, 100))
image[5,5,::5] = np.arange(20)
peaks_limited = peak.peak_local_max(image, min_distance=1, num_peaks=2)
assert len(peaks_limited) == 2
def test_reorder_labels(self):
image = np.random.uniform(size=(40, 60))
i, j = np.mgrid[0:40, 0:60]
labels = 1 + (i >= 20) + (j >= 30) * 2
labels[labels == 4] = 5
i, j = np.mgrid[-3:4, -3:4]
footprint = (i * i + j * j <= 9)
expected = np.zeros(image.shape, float)
for imin, imax in ((0, 20), (20, 40)):
for jmin, jmax in ((0, 30), (30, 60)):
expected[imin:imax, jmin:jmax] = ndi.maximum_filter(
image[imin:imax, jmin:jmax], footprint=footprint)
expected = (expected == image)
result = peak.peak_local_max(image, labels=labels, min_distance=1,
threshold_rel=0, footprint=footprint,
indices=False, exclude_border=False)
assert (result == expected).all()
def test_indices_with_labels(self):
image = np.random.uniform(size=(40, 60))
i, j = np.mgrid[0:40, 0:60]
labels = 1 + (i >= 20) + (j >= 30) * 2
i, j = np.mgrid[-3:4, -3:4]
footprint = (i * i + j * j <= 9)
expected = np.zeros(image.shape, float)
for imin, imax in ((0, 20), (20, 40)):
for jmin, jmax in ((0, 30), (30, 60)):
expected[imin:imax, jmin:jmax] = ndi.maximum_filter(
image[imin:imax, jmin:jmax], footprint=footprint)
expected = np.transpose(np.nonzero(expected == image))
expected = expected[np.argsort(image[tuple(expected.T)])[::-1]]
result = peak.peak_local_max(image, labels=labels, min_distance=1,
threshold_rel=0, footprint=footprint,
indices=True, exclude_border=False)
result = result[np.argsort(image[tuple(result.T)])[::-1]]
assert (result == expected).all()
def test_ndarray_indices_false(self):
nd_image = np.zeros((5, 5, 5))
nd_image[2, 2, 2] = 1
peaks = peak.peak_local_max(nd_image, min_distance=1, indices=False)
assert (peaks == nd_image.astype(np.bool)).all()
def test_ndarray_exclude_border(self):
nd_image = np.zeros((5, 5, 5))
nd_image[[1, 0, 0], [0, 1, 0], [0, 0, 1]] = 1
nd_image[3, 0, 0] = 1
nd_image[2, 2, 2] = 1
expected = np.zeros_like(nd_image, dtype=np.bool)
expected[2, 2, 2] = True
expectedNoBorder = nd_image > 0
result = peak.peak_local_max(nd_image, min_distance=2,
exclude_border=2, indices=False)
assert_equal(result, expected)
# Check that bools work as expected
assert_equal(
|
hsoft/qtlib
|
radio_box.py
|
Python
|
bsd-3-clause
| 2,903
| 0.007234
|
# Created On: 2010-06-02
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QRadioButton
from .util import horizontalSpacer
class RadioBox(QWidget):
def __init__(self, parent=None, items=None, spread=True, **kwargs):
# If spread is False, insert a spacer in the layout so that the items don't use all the
# space they're given but rather align left.
if items is None:
items = []
super().__init__(parent, **kwargs)
self._buttons = []
self._labels = items
self._selected_index = 0
self._spacer = horizontalSpacer() if not spread else None
self._layout = QHBoxLayout(self)
self._update_buttons()
#--- Private
def _update_buttons(self):
if self._spacer is not None:
self._layout.removeItem(self._spacer)
to_remove = self._buttons[len(self._labels):]
for button in to_remove:
self._layout.removeWidget(button)
|
button.setParent(None)
del self._buttons[len(self._labels):]
to_add = self._labels[len(self._buttons):]
for _ in to_add:
button = QRadioButton(self)
self._buttons.append(button)
self._layout.addWidget(button)
button.toggled.connect(self
|
.buttonToggled)
if self._spacer is not None:
self._layout.addItem(self._spacer)
if not self._buttons:
return
for button, label in zip(self._buttons, self._labels):
button.setText(label)
self._update_selection()
def _update_selection(self):
self._selected_index = max(0, min(self._selected_index, len(self._buttons)-1))
selected = self._buttons[self._selected_index]
selected.setChecked(True)
#--- Event Handlers
def buttonToggled(self):
for i, button in enumerate(self._buttons):
if button.isChecked():
self._selected_index = i
self.itemSelected.emit(i)
break
#--- Signals
itemSelected = pyqtSignal(int)
#--- Properties
@property
def buttons(self):
return self._buttons[:]
@property
def items(self):
return self._labels[:]
@items.setter
def items(self, value):
self._labels = value
self._update_buttons()
@property
def selected_index(self):
return self._selected_index
@selected_index.setter
def selected_index(self, value):
self._selected_index = value
self._update_selection()
|
acs-um/gestion-turnos
|
apps/clientes/migrations/0003_auto_20150605_2119.py
|
Python
|
mit
| 1,158
| 0.000864
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('clientes', '0002_auto_20150530_1324'),
]
operations = [
migrations.CreateModel(
name='PerfilCliente',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nombre', models.CharField(max_length=50)),
('apellido', models.CharField(max_length=50)),
|
('documento', models.IntegerField()),
('telefono', models.IntegerField()),
('obrasocial', models.CharField(max_length=50)),
('email', models.EmailField(max_length=75)),
|
('cliente', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.DeleteModel(
name='Cliente',
),
]
|
AdrianRibao/django-users
|
django_users/templatetags/users.py
|
Python
|
bsd-3-clause
| 422
| 0.014218
|
# -*-
|
coding: utf-8 -*-
from django import template
from django_users.forms import CreateUserForm
#from django.utils.translation import ugettext as _
register = template.Library()
@register.inclusion_tag('users/templatetags/registration.html', takes_context = True)
def registration_form(context, form=None, *args, **kwargs):
if not form:
form = CreateUserForm
return {
'
|
form': form,
}
|
pombredanne/milk
|
milk/tests/test_nfoldcrossvalidation_regression.py
|
Python
|
mit
| 10,055
| 0.001392
|
import numpy as np
from milk.measures.nfoldcrossvalidation import nfoldcrossvalidation, foldgenerator
# Regression test in 2011-01-31
def test_getfoldgenerator():
labels = np.array([
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
])
origins = np.array([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6,
6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8,
8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
15, 15, 15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22,
22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23,
23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25,
25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27,
27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
29, 29, 29, 29, 29, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 31, 31, 31, 31, 31,
31, 31, 31, 31, 31, 31, 31, 31, 31, 32, 32, 32, 32, 32, 32, 32, 32,
33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 35, 35, 35, 35, 35, 35, 35,
35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35,
|
35, 36,
36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 37, 37, 37, 37,
37, 37, 37, 37, 37, 37, 37, 37, 3
|
7, 37, 38, 38, 38, 38, 38, 38, 38,
38, 38, 38, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 40, 40,
40, 40, 40, 40, 40, 40, 40, 40, 40, 41, 41, 41, 41, 41, 41, 41, 41,
41, 41, 41, 41, 41, 41, 41, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42,
42, 42, 42, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43,
43, 43, 43, 43, 43, 43, 43, 43, 44, 44, 44, 44, 44, 44, 44, 44, 44,
44, 44, 44, 44, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45,
45, 45, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46,
47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47,
47, 47, 47, 47, 47, 47, 47, 47, 48, 48, 48, 48, 48, 48, 48, 48, 48,
48, 48, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 50, 50, 50, 50,
50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51,
51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 52, 52, 52, 52, 52, 5
|
Panda3D-google-code-repositories/naith
|
game/plugins/simpleweapon/simpleweapon.py
|
Python
|
apache-2.0
| 7,962
| 0.017458
|
# -*- coding: utf-8 -*-
# Copyright Tom SF Haines
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import posixpath
import random
import math
from bin.shared import ray_cast
from bin.shared import csp
from direct.actor.Actor import Actor
from direct.interval.IntervalGlobal import *
from direct.interval.ActorInterval import ActorInterval
from panda3d.core import *
from panda3d.ode import *
class SimpleWeapon:
"""Provides a simple weapon system - not very sophisticaed, but good enough to test shooting things."""
def __init__(self,manager,xml):
self.gunView = render.attachNewNode('gun-view')
self.ray = None
self.reload(manager,xml)
def destroy(self):
self.gunView.removeNode()
if self.ray!=None:
self.ray.destroy()
def reload(self,manager,xml):
# Get the path to load weapons from...
basePath = manager.get('paths').getConfig().find('weapons').get('path')
# Variables to manage the firing state (Used G36 as reference for defaults.)...
bullet = xml.find('bullet')
if bullet!=None:
self.bulletRate = float(bullet.get('rate',1.0/12.5))
self.bulletSpeed = float(bullet.get('speed',920.0))
self.bulletWeight = float(bullet.get('mass',0.004))
else:
self.bulletRate = 1.0/12.5
self.bulletSpeed = 920.0
self.bulletWeight = 0.004
# Determine the weapon meshes path...
self.meshPath = posixpath.join(basePath, xml.find('egg').get('file'))
# Get the camera interface, so we can zoom in when the player aims...
self.camera = manager.get(xml.find('camera').get('plugin'))
# Create our gun node - both the gun a
|
nd the ray used for shooting track this - allows for gun jitter, kick back etc...
parent = xml.find('parent')
self.gunView.reparentTo(manager.get(parent.get('plugin')).getNode(parent.get('node')))
|
# Create a ray cast to detect what the player is looking at... and what will be shot...
self.space = manager.get('ode').getSpace()
if self.ray!=None:
self.ray.destroy()
self.ray = OdeRayGeom(100.0)
self.ray.setCategoryBits(BitMask32(0xfffffffe))
self.ray.setCollideBits(BitMask32(0xfffffffe))
# Get all the stuff we need to do the muzzle flash particle effect...
flash = xml.find('muzzle_flash')
self.flashManager = manager.get(flash.get('plugin'))
self.flashEffect = flash.get('effect')
self.flashBone = flash.get('bone') # Will be swapped out for the actual node latter.
self.flashPos = csp.getPos(flash.get('pos'))
# Get all the stuff we need to do the bullet hit sparks effect...
sparks = xml.find('sparks')
self.sparksManager = manager.get(sparks.get('plugin'))
self.sparksEffect = sparks.get('effect')
# Create a quaternion that rotates +ve z to +ve y - used to point it in the weapon direction rather than up...
self.zToY = Quat()
self.zToY.setFromAxisAngle(-90.0,Vec3(1.0,0.0,0.0))
# State for the animation...
self.state = False # False==casual, True==aim.
self.nextState = False
# Firing state...
self.firing = False # True if the trigger is being held.
self.triggerTime = 0.0 # How long the trigger has been held for, so we know when to eject ammo.
# For bullet holes
bh = xml.find('bullet_holes')
if bh != None:
self.bulletHoles = manager.get(bh.get('plugin'))
else:
self.bulletHoles = None
def postInit(self):
for i in self.postReload():
yield i
def postReload(self):
# Load the actor...
self.mesh = Actor(self.meshPath)
yield
# Shader generator makes it shiny, plus we need it in the right places in the render graph...
self.mesh.setShaderAuto()
self.mesh.reparentTo(self.gunView)
self.mesh.hide()
yield
# Set its animation going... except we pause it until needed...
self.nextAni()
self.interval.pause()
# Gun flash requires an exposed bone...
self.flashBone = self.mesh.exposeJoint(None,"modelRoot",self.flashBone)
yield
def gunControl(self,task):
# Update the gun direction ray to follow the players view...
self.ray.setPosition(self.gunView.getPos(render))
self.ray.setQuaternion(self.zToY.multiply(self.gunView.getQuat(render)))
# If the gun is firing update the trigger time, if a bullet is ejected do the maths...
if self.firing:
dt = globalClock.getDt()
self.triggerTime += dt
while self.triggerTime>self.bulletRate:
self.triggerTime -= self.bulletRate
hit,pos,norm = ray_cast.nearestHit(self.space,self.ray)
# Create a muzzle flash effect...
self.flashManager.doEffect(self.flashEffect, self.flashBone, True, self.flashPos)
if hit:
# Create an impact sparks effect...
# Calculate the reflection direction...
rd = self.ray.getDirection()
sparkDir = (norm * (2.0*norm.dot(rd))) - rd
# Convert the reflection direction into a quaternion that will rotate +ve z to the required direction...
try:
ang = -math.acos(sparkDir[2])
except:
print 'Angle problem', sparkDir
ang = 0.0
axis = Vec3(0.0,0.0,1.0).cross(sparkDir)
axis.normalize()
sparkQuat = Quat()
sparkQuat.setFromAxisAngleRad(ang,axis)
# Set it going...
self.sparksManager.doEffect(self.sparksEffect, render, False, pos, sparkQuat)
# Make a bullet hole
if hit.hasBody() and isinstance(hit.getBody().getData(), NodePath):
self.bulletHoles.makeNew(pos, norm, hit.getBody().getData())
else:
self.bulletHoles.makeNew(pos, norm, None)
# Impart some energy on the object...
if hit and hit.hasBody():
body = hit.getBody()
# Calculate the force required to supply the energy the bullet contains to the body...
force = self.bulletWeight*self.bulletSpeed/0.05
# Get the direction of travel of the bullet, multiply by force...
d = self.ray.getDirection()
d *= force
# If the object is asleep awaken it...
if not body.isEnabled():
body.enable()
# Add the force to the object...
body.addForceAtPos(d,pos)
return task.cont
def start(self):
# Make the gun visible...
self.mesh.show()
# Set the gun animation going...
self.interval.finish()
# Weapon task - this primarily makes it shoot...
self.task = taskMgr.add(self.gunControl,'GunControl')
def stop(self):
self.interval.pause()
self.mesh.hide()
taskMgr.remove(self.task)
def nextAni(self):
self.state = self.nextState
if self.state:
ani = random.choice(('aim_wiggle_a','aim_wiggle_b','aim_wiggle_c'))
else:
ani = random.choice(('casual_wiggle_a','casual_wiggle_b','casual_wiggle_c'))
self.mesh.pose(ani,0)
self.interval = Sequence(self.mesh.actorInterval(ani),Func(self.nextAni))
self.interval.start()
def setAiming(self,s):
if self.nextState!=s:
self.interval.pause()
self.nextState = s
self.camera.setZoomed(s)
def wib():
self.interval.finish()
if s: ani = 'casual_aim'
else: ani = 'aim_casual'
transition = Sequence(self.mesh.actorInterval(ani),Func(wib))
transition.start()
def setFiring(self,s):
self.firing = s
if self.firing:
self.triggerTime = 0.0
|
shivamMg/malvo
|
teams/migrations/0008_auto_20160305_1811.py
|
Python
|
gpl-3.0
| 546
| 0.001832
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-05 18:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
depen
|
dencies = [
('teams', '0007_auto_20160305_1726'),
]
operations = [
migrations.AlterField(
model_name='team',
name='lang_pref',
field=models.CharField(choices
|
=[('C', 'C'), ('J', 'Java')], default='0', max_length=1, verbose_name='programming language preference'),
),
]
|
cherrishes/weilai
|
xingxing/common/mqtt_helper.py
|
Python
|
apache-2.0
| 1,402
| 0.001664
|
import uuid
from paho.mqtt import publish
from paho.mqtt.client import MQTTv31
from conf.mqttconf import *
def send(msg, user_list, qos=2, retain=False):
"""
发布mqtt消息
:param msg:消息内容,可以是字
|
符串、int、bytearray
:param user_list: 用户列表数组(不带前缀的),例如:["zhangsa
|
n","lilei"]
:param qos: 消息质量(0:至多一次,1:至少一次,2:只有一次)
:param retain:设置是否保存消息,为True时当订阅者不在线时发送的消息等上线后会得到通知,否则只发送给在线的设备
:return:
"""
auth = {"username": MOSQUITTO_PUB_USER, "password": MOSQUITTO_PUB_PWD}
client_id = MOSQUITTO_PREFIX + str(uuid.uuid1())
msgs = []
for i in user_list:
print(i)
msg_obj = dict()
msg_obj["qos"] = qos
msg_obj["retain"] = retain
msg_obj["topic"] = MOSQUITTO_TOPIC_PREFIX + str(i)
msg_obj["payload"] = msg
msgs.append(msg_obj)
if len(msgs) > 0 and msg:
print(msgs)
try:
publish.multiple(msgs, hostname=MOSQUITTO_HOST, port=MOSQUITTO_PORT, client_id=client_id, keepalive=60,
will=None, auth=auth, tls=None, protocol=MQTTv31)
ret = 1
except Exception as e:
print(str(e))
ret = -1
else:
ret = -2
return ret
|
Andy-Thornton/drupal-security-updates
|
security-updates.py
|
Python
|
gpl-3.0
| 1,721
| 0.006973
|
#!/usr/bin/env python
import subprocess, os, sys
import colorama
from colorama import Fore, Back, Style
# Store paths and connection info
sourcetree = "/home/flux/Projects/hackday"
gitlabapi = ""
drupalbootstrap = sourcetree + "/drupal/includes/bootstrap.inc"
contrib = sourcetree + "/all/modules/contrib"
rpmmodules = sourcetree + "/drupal/modules"
rssDrupalCore = "http://drupal.org/security/rss.xml"
rssDrupalContrib = "http://drupal.org/security/contrib/rss.xml"
doAPI = "https://www.drupal.org/api-d7/node.json?type=project_module&field_project_machine_name="
# Start with Drupal core
print Fore.BLUE + ("=" * 7) + " Drupal Core " + ("=" * 7) + Style.RESET_ALL
print Fore.GREEN
with open(drupalbootstrap, 'r') as searchfile:
for line in searchfile:
if """define('VERSION',""" in line:
drupalversion = line.split("'")
print "-- Drupal Core: " + drupalversion[3]
print Style.RESET_ALL
# Function to iterate through a module path and pull the version numbers
def modulelist(modpath):
print Fore.BLUE + ("=" * 7) + " " + modpath + " " + ("=" * 7) + St
|
yle.RESET_ALL
dirs
|
= os.listdir(modpath)
print Fore.GREEN
for module in dirs:
info = modpath + "/" + module + "/" + module + ".info"
try:
with open(info, 'r') as searchfile:
for line in searchfile:
if """version = """ in line:
moduleversion = line.split("version =")
if not "VERSION" in moduleversion[1]:
print "-- " + module + " " + moduleversion[1].replace('\"','')
except:
pass
print Style.RESET_ALL
modulelist(contrib)
modulelist(rpmmodules)
|
MCLConsortium/mcl-site
|
src/jpl.mcl.site.knowledge/src/jpl/mcl/site/knowledge/organfolder.py
|
Python
|
apache-2.0
| 544
| 0.001845
|
# encoding: utf-8
u'''MCL — Organ Folder'''
from ._base import IIngestableFolder, Ingestor, IngestableFolderView
from .in
|
terfaces imp
|
ort IOrgan
from five import grok
class IOrganFolder(IIngestableFolder):
u'''Folder containing body systems, also known as organs.'''
class OrganIngestor(Ingestor):
u'''RDF ingestor for organs.'''
grok.context(IOrganFolder)
def getContainedObjectInterface(self):
return IOrgan
class View(IngestableFolderView):
u'''View for an organ folder'''
grok.context(IOrganFolder)
|
p12tic/awn-extras
|
shared/python/awnmediaplayers.py
|
Python
|
gpl-2.0
| 28,094
| 0.003132
|
# !/usr/bin/python
# Copyright (c) 2007 Randal Barlow <im.tehk at gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import sys
import os
import subprocess
import atexit
import gobject
import pygtk
import gtk
from gtk import gdk
import awn
import dbus
from dbus.mainloop.glib import DBusGMainLoop
import string
try:
import mutagen.mp3
import mutagen.mp4
from mutagen.id3 import ID3
import tempfile
album_art_file = "%s/awnmediaplayer_%s.png" % (tempfile.gettempdir(), os.getenv('USERNAME'))
art_icon_from_tag = True
except ImportError:
art_icon_from_tag = False
if gtk.gtk_version >= (2, 18):
from urllib import unquote
DBusGMainLoop(set_as_default=True)
def cleanup():
if art_icon_from_tag:
try:
os.remove(album_art_file)
except OSError:
pass
atexit.register(cleanup)
def get_app_name():
player_name = None
bus_obj = dbus.SessionBus().get_object('org.freedesktop.DBus', '/org/freedesktop/DBus')
if bus_obj.NameHasOwner('org.gnome.Rhythmbox') == True:
player_name = "Rhythmbox"
elif bus_obj.NameHasOwner('org.exaile.DBusInterface') == True:
player_name = "Exaile"
elif bus_obj.NameHasOwner('org.gnome.Banshee') == True:
player_name = "Banshee"
elif bus_obj.NameHasOwner('org.bansheeproject.Banshee') == True:
player_name = "BansheeOne"
elif bus_obj.NameHasOwner('org.gnome.Listen') == True:
player_name = "Listen"
elif bus_obj.NameHasOwner('net.sacredchao.QuodLibet') == True:
player_name = "QuodLibet"
elif bus_obj.NameHasOwner('org.mpris.songbird') == True:
player_name = "Songbird"
elif bus_obj.NameHasOwner('org.mpris.vlc') == True:
player_name = "VLC"
elif bus_obj.NameHasOwner('org.mpris.audacious') == True:
player_name = "Audacious"
elif bus_obj.NameHasOwner('org.mpris.bmp') == True:
player_name = "BMP"
elif bus_obj.NameHasOwner('org.mpris.xmms2') == True:
player
|
_name = "XMMS2"
elif bus
|
_obj.NameHasOwner('org.mpris.amarok') == True:
player_name = "Amarok"
elif bus_obj.NameHasOwner('org.mpris.aeon') == True:
player_name = "Aeon"
elif bus_obj.NameHasOwner('org.mpris.dragonplayer') == True:
player_name = "DragonPlayer"
elif bus_obj.NameHasOwner('org.freedesktop.MediaPlayer') == True:
player_name = "mpDris"
elif bus_obj.NameHasOwner('org.mpris.clementine') == True:
player_name = "Clementine"
elif bus_obj.NameHasOwner('org.mpris.guayadeque') == True:
player_name = "Guayadeque"
return player_name
def player_available(executable):
"""Check if player is installed if it's not in 'Activatable Services' on DBus"""
for path in os.getenv('PATH').split(':'):
if path == '':
continue
if os.path.isfile(os.path.join(path, executable)):
return True
return False
def launch_player(args):
"""Launch player if this can't be done via DBus"""
try:
subprocess.Popen(args)
except OSError, e:
print "awnmediaplayer: error launching %s: %s" % (args, e)
return False
return True
class GenericPlayer(object):
"""Insert the level of support here"""
def __init__(self, dbus_name=None):
# set signalling_supported to True in your subclass's constructor if you use signal(s) which are received when currently played song changes (e.g. playingUriChanged signal)
self.signalling_supported = False
# set to DBus service name string in your subclass
self.dbus_base_name = dbus_name
self.song_change_cb = None
self.playing_changed_cb = None
self.dbus_driver()
def set_song_change_callback(self, cb):
self.song_change_cb = cb
def set_playing_changed_callback(self, cb):
self.playing_changed_cb = cb
def song_changed_emitter(self, *args, **kwargs):
if (self.song_change_cb):
self.song_change_cb()
def playing_changed_emitter(self, playing):
if (self.playing_changed_cb):
self.playing_changed_cb(playing)
def is_async(self):
"""
Returns True if this player class supports song change signalling.
"""
return self.signalling_supported
def is_available(self):
"""
Returns true if this player is present on the system.
Override if necessary.
"""
if (self.dbus_base_name != None):
bus_obj = dbus.SessionBus().get_object('org.freedesktop.DBus', '/org/freedesktop/DBus')
ACTIVATABLE_SERVICES = bus_obj.ListActivatableNames()
return self.dbus_base_name in ACTIVATABLE_SERVICES
return False
def start(self):
"""
Starts given player.
Override if necessary.
"""
if (self.dbus_base_name != None):
object_path = '/' + self.dbus_base_name.replace('.', '/')
try:
bus = dbus.SessionBus()
obj = bus.get_object(self.dbus_base_name, object_path)
return True
except Exception, e:
print "awnmediaplayer: error launching %s: %s" % (self.__class__.__name__, e)
return False
def get_dbus_name(self):
"""
Returns player's dbus name.
"""
return self.dbus_base_name
def dbus_driver(self):
"""
Defining the dbus location for GenericPlayer
Provides self.player and any other interfaces needed by get_media_info
and the button methods
"""
pass
def get_media_info(self):
"""
This method tries to get information about currently playing media
Returns
* dict result = dictionary of various information about media
(should always have at least the 'title' key)
"""
return {}
def is_playing(self):
"""
This method determines if the player is currently in 'playing' state
as opossed to 'paused' / 'stopped'
"""
return False
def previous(self):
pass
def play_pause(self):
pass
def next(self):
pass
def play_uri(self, uri):
"""
Immediately starts playing the specified URI.
"""
return False
def enqueue_uris(self, uris):
"""
Adds uris to current playlist.
"""
return False
class MPRISPlayer(GenericPlayer):
""" a default implementation of MPRIS """
def __init__(self, interface):
GenericPlayer.__init__(self, interface)
self.signalling_supported = True
def playing_changed_emitter(self, playing):
print "Status Change: ", playing
if (self.playing_changed_cb):
self.playing_changed_cb(playing[0] == 0)
def dbus_driver(self):
"""
Defining the dbus location for
"""
bus_obj = dbus.SessionBus().get_object('org.freedesktop.DBus', '/org/freedesktop/DBus')
if bus_obj.NameHasOwner(self.dbus_base_name) == True:
self.session_bus = dbus.SessionBus()
self.player = self.session_bus.get_object(self.dbus_base_name, '/Player')
self.player.connect_to_signal('TrackChange', self.song_changed_emitter, member_keyword='member')
self.player.connect_to_signal('StatusChange', self.playing_changed_emitter)
def get_media_info(self):
self.dbus_driver()
|
indexofire/gork
|
src/gork/application/feedz/processors/__init__.py
|
Python
|
mit
| 91
| 0
|
# -*- coding: ut
|
f-8 -*-
from feedz.processors.content_filter import ContentFilterPro
|
cessor
|
cchurch/ansible-modules-core
|
cloud/amazon/rds_param_group.py
|
Python
|
gpl-3.0
| 10,548
| 0.005973
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at y
|
our option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Gene
|
ral Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rds_param_group
version_added: "1.5"
short_description: manage RDS parameter groups
description:
- Creates, modifies, and deletes RDS parameter groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the group should be present or absent.
required: true
default: present
aliases: []
choices: [ 'present' , 'absent' ]
name:
description:
- Database parameter group identifier.
required: true
default: null
aliases: []
description:
description:
- Database parameter group description. Only set when a new group is added.
required: false
default: null
aliases: []
engine:
description:
- The type of database for this group. Required for state=present.
required: false
default: null
aliases: []
choices: [ 'aurora5.6', 'mariadb10.0', 'mysql5.1', 'mysql5.5', 'mysql5.6', 'mysql5.7', 'oracle-ee-11.2', 'oracle-ee-12.1', 'oracle-se-11.2', 'oracle-se-12.1', 'oracle-se1-11.2', 'oracle-se1-12.1', 'postgres9.3', 'postgres9.4', 'postgres9.5', sqlserver-ee-10.5', 'sqlserver-ee-11.0', 'sqlserver-ex-10.5', 'sqlserver-ex-11.0', 'sqlserver-ex-12.0', 'sqlserver-se-10.5', 'sqlserver-se-11.0', 'sqlserver-se-12.0', 'sqlserver-web-10.5', 'sqlserver-web-11.0', 'sqlserver-web-12.0' ]
immediate:
description:
- Whether to apply the changes immediately, or after the next reboot of any associated instances.
required: false
default: null
aliases: []
params:
description:
- Map of parameter names and values. Numeric values may be represented as K for kilo (1024), M for mega (1024^2), G for giga (1024^3), or T for tera (1024^4), and these values will be expanded into the appropriate number before being set in the parameter group.
required: false
default: null
aliases: []
author: "Scott Anderson (@tastychutney)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Add or change a parameter group, in this case setting auto_increment_increment to 42 * 1024
- rds_param_group:
state: present
name: norwegian_blue
description: 'My Fancy Ex Parrot Group'
engine: 'mysql5.6'
params:
auto_increment_increment: "42K"
# Remove a parameter group
- rds_param_group:
state: absent
name: norwegian_blue
'''
VALID_ENGINES = [
'aurora5.6',
'mariadb10.0',
'mysql5.1',
'mysql5.5',
'mysql5.6',
'mysql5.7',
'oracle-ee-11.2',
'oracle-ee-12.1',
'oracle-se-11.2',
'oracle-se-12.1',
'oracle-se1-11.2',
'oracle-se1-12.1',
'postgres9.3',
'postgres9.4',
'postgres9.5',
'sqlserver-ee-10.5',
'sqlserver-ee-11.0',
'sqlserver-ex-10.5',
'sqlserver-ex-11.0',
'sqlserver-ex-12.0',
'sqlserver-se-10.5',
'sqlserver-se-11.0',
'sqlserver-se-12.0',
'sqlserver-web-10.5',
'sqlserver-web-11.0',
'sqlserver-web-12.0',
]
try:
import boto.rds
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# returns a tuple: (whether or not a parameter was changed, the remaining parameters that weren't found in this parameter group)
class NotModifiableError(Exception):
def __init__(self, error_message, *args):
super(NotModifiableError, self).__init__(error_message, *args)
self.error_message = error_message
def __repr__(self):
return 'NotModifiableError: %s' % self.error_message
def __str__(self):
return 'NotModifiableError: %s' % self.error_message
INT_MODIFIERS = {
'K': 1024,
'M': pow(1024, 2),
'G': pow(1024, 3),
'T': pow(1024, 4),
}
TRUE_VALUES = ('on', 'true', 'yes', '1',)
def set_parameter(param, value, immediate):
"""
Allows setting parameters with 10M = 10* 1024 * 1024 and so on.
"""
converted_value = value
if param.type == 'string':
converted_value = str(value)
elif param.type == 'integer':
if isinstance(value, basestring):
try:
for modifier in INT_MODIFIERS.keys():
if value.endswith(modifier):
converted_value = int(value[:-1]) * INT_MODIFIERS[modifier]
converted_value = int(converted_value)
except ValueError:
# may be based on a variable (ie. {foo*3/4}) so
# just pass it on through to boto
converted_value = str(value)
elif type(value) == bool:
converted_value = 1 if value else 0
else:
converted_value = int(value)
elif param.type == 'boolean':
if isinstance(value, basestring):
converted_value = value in TRUE_VALUES
else:
converted_value = bool(value)
param.value = converted_value
param.apply(immediate)
def modify_group(group, params, immediate=False):
""" Set all of the params in a group to the provided new params. Raises NotModifiableError if any of the
params to be changed are read only.
"""
changed = {}
new_params = dict(params)
for key in new_params.keys():
if key in group:
param = group[key]
new_value = new_params[key]
try:
old_value = param.value
except ValueError:
# some versions of boto have problems with retrieving
# integer values from params that may have their value
# based on a variable (ie. {foo*3/4}), so grab it in a
# way that bypasses the property functions
old_value = param._value
if old_value != new_value:
if not param.is_modifiable:
raise NotModifiableError('Parameter %s is not modifiable.' % key)
changed[key] = {'old': old_value, 'new': new_value}
set_parameter(param, new_value, immediate)
del new_params[key]
return changed, new_params
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
engine = dict(required=False, choices=VALID_ENGINES),
description = dict(required=False),
params = dict(required=False, aliases=['parameters'], type='dict'),
immediate = dict(required=False, type='bool'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_engine = module.params.get('engine')
group_description = module.params.get('description')
group_params = module.params.get('params') or {}
immediate = module.params.get('immediate') or False
if state == 'present':
for required in ['name', 'description', 'engine']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'engine', 'params']:
if module.params.get(not_allowed):
module.fail_json(msg = str("P
|
h-hwang/octodns
|
tests/test_octodns_provider_dnsimple.py
|
Python
|
mit
| 6,915
| 0
|
#
#
#
from __future__ import absolute_import, division, print_function, \
unicode_literals
from mock import Mock, call
from os.path import dirname, join
from requests import HTTPError
from requests_mock import ANY, mock as requests_mock
from unittest import TestCase
from octodns.record import Record
from octodns.provider.dnsimple import DnsimpleClientNotFound, DnsimpleProvider
from octodns.provider.yaml import YamlProvider
from octodns.zone import Zone
class TestDnsimpleProvider(TestCase):
expected = Zone('unit.tests.', [])
source = YamlProvider('test', join(dirname(__file__), 'config'))
source.populate(expected)
# Our test suite differs a bit, add our NS and remove the simple one
expected.add_record(Record.new(expected, 'under', {
'ttl': 3600,
'type': 'NS',
'values': [
'ns1.unit.tests.',
'ns2.unit.tests.',
]
}))
for record in list(expected.records):
if record.name == 'sub' and record._type == 'NS':
expected._remove_record(record)
break
def test_populate(self):
provider = DnsimpleProvider('test', 'token', 42)
# Bad auth
with requests_mock() as mock:
mock.get(ANY, status_code=401,
text='{"message": "Authentication failed"}')
with self.assertRaises(Exception) as ctx:
zone = Zone('unit.tests.', [])
provider.populate(zone)
self.assertEquals('Unauthorized', ctx.exception.message)
# General error
with requests_mock() as mock:
mock.get(ANY, status_code=502, text='Things caught fire')
with self.assertRaises(HTTPError) as ctx:
zone = Zone('unit.tests.', [])
provider.populate(zone)
self.assertEquals(502, ctx.exception.response.status_code)
# Non-existant zone doesn't populate anything
with requests_mock() as mock:
mock.get(ANY, status_code=404,
text='{"message": "Domain `foo.bar` not found"}')
zone = Zone('unit.tests.', [])
provider.populate(zone)
self.assertEquals(set(), zone.records)
# No diffs == no changes
with requests_mock() as mock:
base = 'https://api.dnsimple.com/v2/42/zones/unit.tests/' \
'records?page='
with open('tests/fixtures/dnsimple-page-1.json') as fh:
mock.get('{}{}'.format(base, 1), text=fh.read())
with open('tests/fixtures/dnsimple-page-2.json') as fh:
mock.get('{}{}'.format(base, 2), text=fh.read())
zone = Zone('unit.tests.', [])
provider.populate(zone)
self.assertEquals(14, len(zone.records))
changes = self.expected.changes(zone, provider)
self.assertEquals(0, len(changes))
# 2nd populate makes no network calls/all from cache
again = Zone('unit.tests.', [])
provider.populate(again)
self.assertEquals(14, len(again.records))
# bust the cache
del provider._zone_records[zone.name]
# test handling of invalid content
with requests_mock() as mock:
with open('tests/fixtures/dnsimple-invalid-content.json') as fh:
mock.get(ANY, text=fh.read())
zone = Zone('unit.tests.', [])
provider.populate(zone)
self.assertEquals(set([
Record.new(zone, '', {
'ttl': 3600,
'type': 'SSHFP',
'values': []
}),
Record.new(zone, '_srv._tcp', {
'ttl': 600,
'type': 'SRV',
'values': []
}),
Record.new(zone, 'naptr', {
'ttl': 600,
'type': 'NAPTR',
'values': []
}),
]), zone.records)
def test_apply(self):
provider = DnsimpleProvider('test', 'token', 42)
resp = Mock()
resp.json = Mock()
provider._client._r
|
equest = Mock(return_value=resp)
# non-existant domain, create everything
resp.json.side_effect = [
DnsimpleClientNotFound, # no zone in populate
DnsimpleClientNotFound,
|
# no domain during apply
]
plan = provider.plan(self.expected)
# No root NS, no ignored
n = len(self.expected.records) - 2
self.assertEquals(n, len(plan.changes))
self.assertEquals(n, provider.apply(plan))
provider._client._request.assert_has_calls([
# created the domain
call('POST', '/domains', data={'name': 'unit.tests'}),
# created at least one of the record with expected data
call('POST', '/zones/unit.tests/records', data={
'content': '20 30 foo-1.unit.tests.',
'priority': 10,
'type': 'SRV',
'name': '_srv._tcp',
'ttl': 600
}),
])
# expected number of total calls
self.assertEquals(26, provider._client._request.call_count)
provider._client._request.reset_mock()
# delete 1 and update 1
provider._client.records = Mock(return_value=[
{
'id': 11189897,
'name': 'www',
'content': '1.2.3.4',
'ttl': 300,
'type': 'A',
},
{
'id': 11189898,
'name': 'www',
'content': '2.2.3.4',
'ttl': 300,
'type': 'A',
},
{
'id': 11189899,
'name': 'ttl',
'content': '3.2.3.4',
'ttl': 600,
'type': 'A',
}
])
# Domain exists, we don't care about return
resp.json.side_effect = ['{}']
wanted = Zone('unit.tests.', [])
wanted.add_record(Record.new(wanted, 'ttl', {
'ttl': 300,
'type': 'A',
'value': '3.2.3.4'
}))
plan = provider.plan(wanted)
self.assertEquals(2, len(plan.changes))
self.assertEquals(2, provider.apply(plan))
# recreate for update, and deletes for the 2 parts of the other
provider._client._request.assert_has_calls([
call('POST', '/zones/unit.tests/records', data={
'content': '3.2.3.4',
'type': 'A',
'name': 'ttl',
'ttl': 300
}),
call('DELETE', '/zones/unit.tests/records/11189899'),
call('DELETE', '/zones/unit.tests/records/11189897'),
call('DELETE', '/zones/unit.tests/records/11189898')
], any_order=True)
|
archlinux/archweb
|
public/tests.py
|
Python
|
gpl-2.0
| 1,786
| 0.00056
|
from django.test import TestCase
class PublicTest(TestCase):
fixtures = ['main/fixtures/arches.json', 'main/fixtures/repos.json',
'main/fixtures/package.json', 'main/fixtures/groups.json',
'devel/fixtures/staff_groups.json']
def test_index(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_about(self):
response = self.client.get('/about/')
self.assertEqual(response.status_code, 200)
def test_art(self):
response = self.client.get('/art/')
self.assertEqual(response.status_code, 200)
def test_svn(self):
response = self.client.get('/svn/')
self.assertEqual(response.status_code, 200)
def test_donate(self):
response = self.client.get('/donate/')
self.assertEqual(response.status_code, 200)
def test_download(self):
response = self.client.get('/download/')
self.assertEqual(response.status_code, 200)
def test_master_keys(self):
response = self.client.get('/master-keys/')
self.assertEqual(response
|
.status_code, 200)
def test_master_keys_json(self):
response = self.client.get('/master-keys/json/')
self.assertEqual(response.status_c
|
ode, 200)
def test_feeds(self):
response = self.client.get('/feeds/')
self.assertEqual(response.status_code, 200)
def test_people(self):
response = self.client.get('/people/developers/')
self.assertEqual(response.status_code, 200)
def test_sitemap(self):
sitemaps = ['sitemap', 'sitemap-base']
for sitemap in sitemaps:
response = self.client.get('/{}.xml'.format(sitemap))
self.assertEqual(response.status_code, 200)
|
chandler14362/panda3d
|
direct/src/distributed/DoInterestManager.py
|
Python
|
bsd-3-clause
| 29,162
| 0.002846
|
"""
The DoInterestManager keeps track of which parent/zones that we currently
have interest in. When you want to "look" into a zone you add an interest
to that zone. When you want to get rid of, or ignore, the objects in that
zone, remove interest in that zone.
p.s. A great deal of this code is just code moved from ClientRepository.py.
"""
from panda3d.core import *
from panda3d.direct import *
from .MsgTypes import *
from direct.showbase.PythonUtil import *
from direct.showbase import DirectObject
from .PyDatagram import PyDatagram
from direct.directnotify.DirectNotifyGlobal import directNotify
import types
from direct.showbase.PythonUtil import report
class InterestState:
StateActive = 'Active'
StatePendingDel = 'PendingDel'
def __init__(self, desc, state, context, event, parentId, zoneIdList,
eventCounter, auto=False):
self.desc = desc
self.state = state
self.context = context
# We must be ready to keep track of multiple events. If somebody
# requested an interest to be removed and we get a second request
# for removal of the same interest before we get a response for the
# first interest removal, we now have two parts of the codebase
# waiting for a response on the removal of a single interest.
self.events = []
self.eventCounter = eventCounter
if event:
self.addEvent(event)
self.parentId = parentId
self.zoneIdList = zoneIdList
self.auto = auto
def addEvent(self, event):
self.events.append(event)
self.eventCounter.num += 1
def getEvents(self):
return list(self.events)
def clearEvents(self):
self.eventCounter.num -= len(self.events)
assert self.eventCounter.num >= 0
self.events = []
def sendEvents(self):
for event in self.events:
messenger.send(event)
self.clearEvents()
def setDesc(self, desc):
self.desc = desc
def isPendingDelete(self):
return self.state == InterestState.StatePendingDel
def __repr__(self):
return 'InterestState(desc=%s, state=%s, context=%s, event=%s, parentId=
|
%s, zoneIdList=%s)' % (
self.desc, self.state, self.context, self.events, self.parentId, self.zoneIdList)
class InterestHandle:
"""This class helps to ensure that valid handles get passed in to DoInterestManager funcs"""
def __init__(self, id):
self._id = id
def asInt(self):
return self._id
def __eq__(se
|
lf, other):
if type(self) == type(other):
return self._id == other._id
return self._id == other
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._id)
# context value for interest changes that have no complete event
NO_CONTEXT = 0
class DoInterestManager(DirectObject.DirectObject):
"""
Top level Interest Manager
"""
notify = directNotify.newCategory("DoInterestManager")
InterestDebug = ConfigVariableBool('interest-debug', False)
# 'handle' is a number that represents a single interest set that the
# client has requested; the interest set may be modified
_HandleSerialNum = 0
# high bit is reserved for server interests
_HandleMask = 0x7FFF
# 'context' refers to a single request to change an interest set
_ContextIdSerialNum = 100
_ContextIdMask = 0x3FFFFFFF # avoid making Python create a long
_interests = {}
if __debug__:
_debug_interestHistory = []
_debug_maxDescriptionLen = 40
_SerialGen = SerialNumGen()
_SerialNum = serialNum()
def __init__(self):
assert DoInterestManager.notify.debugCall()
DirectObject.DirectObject.__init__(self)
self._addInterestEvent = uniqueName('DoInterestManager-Add')
self._removeInterestEvent = uniqueName('DoInterestManager-Remove')
self._noNewInterests = False
self._completeDelayedCallback = None
# keep track of request contexts that have not completed
self._completeEventCount = ScratchPad(num=0)
self._allInterestsCompleteCallbacks = []
def __verbose(self):
return self.InterestDebug.getValue() or self.getVerbose()
def _getAnonymousEvent(self, desc):
return 'anonymous-%s-%s' % (desc, DoInterestManager._SerialGen.next())
def setNoNewInterests(self, flag):
self._noNewInterests = flag
def noNewInterests(self):
return self._noNewInterests
def setAllInterestsCompleteCallback(self, callback):
if ((self._completeEventCount.num == 0) and
(self._completeDelayedCallback is None)):
callback()
else:
self._allInterestsCompleteCallbacks.append(callback)
def getAllInterestsCompleteEvent(self):
return 'allInterestsComplete-%s' % DoInterestManager._SerialNum
def resetInterestStateForConnectionLoss(self):
DoInterestManager._interests.clear()
self._completeEventCount = ScratchPad(num=0)
if __debug__:
self._addDebugInterestHistory("RESET", "", 0, 0, 0, [])
def isValidInterestHandle(self, handle):
# pass in a handle (or anything else) and this will return true if it is
# still a valid interest handle
if not isinstance(handle, InterestHandle):
return False
return handle.asInt() in DoInterestManager._interests
def updateInterestDescription(self, handle, desc):
iState = DoInterestManager._interests.get(handle.asInt())
if iState:
iState.setDesc(desc)
def addInterest(self, parentId, zoneIdList, description, event=None):
"""
Look into a (set of) zone(s).
"""
assert DoInterestManager.notify.debugCall()
handle = self._getNextHandle()
# print 'base.cr.addInterest(',description,',',handle,'):',globalClock.getFrameCount()
if self._noNewInterests:
DoInterestManager.notify.warning(
"addInterest: addingInterests on delete: %s" % (handle))
return
# make sure we've got parenting rules set in the DC
if parentId not in (self.getGameDoId(),):
parent = self.getDo(parentId)
if not parent:
DoInterestManager.notify.error(
'addInterest: attempting to add interest under unknown object %s' % parentId)
else:
if not parent.hasParentingRules():
DoInterestManager.notify.error(
'addInterest: no setParentingRules defined in the DC for object %s (%s)'
'' % (parentId, parent.__class__.__name__))
if event:
contextId = self._getNextContextId()
else:
contextId = 0
# event = self._getAnonymousEvent('addInterest')
DoInterestManager._interests[handle] = InterestState(
description, InterestState.StateActive, contextId, event, parentId, zoneIdList, self._completeEventCount)
if self.__verbose():
print('CR::INTEREST.addInterest(handle=%s, parentId=%s, zoneIdList=%s, description=%s, event=%s)' % (
handle, parentId, zoneIdList, description, event))
self._sendAddInterest(handle, contextId, parentId, zoneIdList, description)
if event:
messenger.send(self._getAddInterestEvent(), [event])
assert self.printInterestsIfDebug()
return InterestHandle(handle)
def addAutoInterest(self, parentId, zoneIdList, description):
"""
Look into a (set of) zone(s).
"""
assert DoInterestManager.notify.debugCall()
handle = self._getNextHandle()
if self._noNewInterests:
DoInterestManager.notify.warning(
"addInterest: addingInterests on delete: %s" % (handle))
return
# make sure we've got parenting rules set in the DC
if parentId not in (self.getGameDoId(),):
parent = self.getDo(parentId)
if not parent:
DoInterestManager.notify.error(
'addInte
|
zhreshold/mxnet
|
tests/python/unittest/test_extensions.py
|
Python
|
apache-2.0
| 7,787
| 0.010659
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This test checks if dynamic loading of library into MXNet is successful
import os
import platform
import mxnet as mx
import numpy as np
from mxnet import nd
from mxnet.gluon import nn
from mxnet.base import MXNetError
from mxnet.test_utils import download, is_cd_run, assert_almost_equal, default_context
import pytest
base_path = os.path.join(os.path.dirname(__file__), "../../..")
def check_platform():
return platform.machine() not in ['x86_64', 'AMD64']
@pytest.mark.skipif(check_platform(), reason="not all machine types supported")
@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test")
def test_custom_op():
# possible places to find library file
if (os.name=='posix'):
lib = 'libcustomop_lib.so'
if os.path.exists(lib):
fname = lib
elif os.path.exists(os.path.join(base_path,'build/'+lib)):
fname = os.path.join(base_path,'build/'+lib)
else:
raise MXNetError("library %s not found " % lib)
elif (os.name=='nt'):
lib = 'libcustomop_lib.dll'
if os.path.exists('windows_package\\lib\\'+lib):
fname = 'windows_package\\lib\\'+lib
else:
raise MXNetError("library %s not found " % lib)
fname = os.path.abspath(fname)
# load the library containing gemm custom operators
mx.library.load(fname)
# test symbol 2D gemm custom operators
s = mx.sym.Variable('s')
t = mx.sym.Variable('t')
c = mx.sym.my_gemm(s,t)
d = mx.sym.state_gemm(s,t)
# baseline gemm from MXNet
base = mx.sym.linalg.gemm2(s,t)
# get some random input matrices
dim_n, dim_k, dim_m = tuple(np.random.randint(1, 5, size=3))
mat1 = mx.nd.random.uniform(-10, 10, shape=(dim_n, dim_k), ctx=mx.cpu())
mat2 = mx.nd.random.uniform(-10, 10, shape=(dim_k, dim_m), ctx=mx.cpu())
# intermediate ndarrays to be populated by gradient compute
in_grad1 = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
in_grad2 = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
in_grad_base = [mx.nd.empty((dim_n,dim_k),ctx=mx.cpu()),mx.nd.empty((dim_k,dim_m),ctx=mx.cpu())]
exe1 = c.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad1)
exe2 = d.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad2)
exe_base = base.bind(ctx=mx.cpu(),args={'s':mat1,'t':mat2},args_grad=in_grad_base)
out1 = exe1.forward()
out2 = exe2.forward()
# test stateful operator by calling it multiple times
out2 = exe2.forward()
out_base = exe_base.forward()
# check that forward compute matches one executed by MXNet
assert_almost_equal(out_base[0].asnumpy(), out1[0].asnumpy(), rtol=1e-3, atol=1e-3)
assert_almost_equal(out_base[0].asnumpy(), out2[0].asnumpy(), rtol=1e-3, atol=1e-3)
# random output grad ndarray for gradient update
out_grad = mx.nd.ones((dim_n, dim_m), ctx=mx.cpu())
exe1.backward([out_grad])
exe2.backward([out_grad])
exe_base.backward([out_grad])
# check that gradient compute matches one executed by MXNet
assert_almost_equal(in_grad_base[0].asnumpy(), in_grad1[0].asnumpy(), rtol=1e-3, atol=1e-3)
assert_almost_equal(in_grad_base[0].asnumpy(), in_grad2[0].asnumpy(), rtol=1e-3, atol=1e-3)
@pytest.mark.skipif(check_platform(), reason="not all machine types supported")
@pytest.mark.skipif(is_cd_run(), reason="continuous delivery run - ignoring test")
def test_subgraph():
# possible places to find library file
if (os.name=='posix'):
lib = 'libsubgraph_lib.so'
if os.path.exists(lib):
# plain make build, when run in the CI
fname = lib
elif os.path.exists(os.path.join(base_path, 'build/'+lib)):
# plain cmake build when run in the CI
fname = os.path.join(base_path, 'build/'+lib)
else:
raise MXNetError("library %s not found " % lib)
elif (os.name=='nt'):
lib = 'libsubgraph_lib.dll'
if os.path.exists('windows_package\\lib\\'+lib):
# plain make build, when run in the CI
fname = 'windows_package\\lib\\'+lib
else:
# plain cmake build when run in the CI
raise MXNetError("library %s not found " % lib)
fname = os.path.abspath(fname)
mx.library.load(fname)
# test simple graph with add, exp and log operators, library supports exp/log
a = mx.sym.var('a')
b = mx.sym.var('b')
c = a + b
d = mx.sym.exp(c)
sym = mx.sym.log(d)
args = {'a':mx.nd.ones((3,2),ctx=mx.cpu()), 'b':mx.nd.ones((3,2),ctx=mx.cpu())}
arg_array = [mx.nd.ones((3,2),dtype='float32',ctx=mx.cpu()),
mx.nd.ones((3,2),dtype='float32',ctx=mx.cpu())]
# baseline - regular execution in MXNet
exe = sym.bind(ctx=mx.cpu(), args=args)
out = exe.forward()
# without propogating shapes/types, passing a custom option to subgraph prop "myOpt"
# should not create subgraph since subgraph prop requires type info
mysym1 = sym.optimize_for("myProp", myOpt='yello')
exe1 = mysym1.bind(ctx=mx.cpu(), args=args)
out1 = exe1.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out1[0].asnumpy(), rtol=1e-3, atol=1e-3)
# with propogating shapes/types, rejecting subgraph
# this tests creating the subgraph and having the subgraph prop reject it
mysym2 = sym.optimize_for("myProp", arg_array, reject=True)
exe2 = mysym2.bind(ctx=mx.cpu(), args=args)
out2 = exe2.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out2[0].asnumpy(), rtol=1e-3, atol=1e-3)
# with propogating shapes/types
mysym3 = sym.optimize_for("myProp",arg_array)
exe3 = mysym3.bind(ctx=mx.cpu(), args=args)
out3 = exe3.forward()
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out3[0].asnumpy(), rtol=1e-3, atol=1e-3)
# Gluon Hybridize partitioning with shapes/types
sym_block = nn.SymbolBlock(sym, [a,b])
sym_block.initialize()
sym_block.hybridize(backend='myProp')
out4 = sym_block(mx.nd.ones((3,2)),mx.nd.ones((3,2)))
# check that result matches one executed by MXNet
assert_almost_equal(out[0].asnumpy(), out4[0].asnumpy(), rtol=1e-3, atol=1e-3)
# Gluon Hybridize partitioning with shapes/types
sym_block2 = nn.SymbolBlock(sym, [a,b])
sym_block2.initialize()
a_data = mx.nd.ones((3,2))
b_data = mx.nd.ones((3,2))
sym_block2.optimize_for(a_data, b_data, backend='myProp')
sym_block2.export('optimized')
sym_block3 = nn.SymbolBlock.imports('optimized-symbol.json
|
',['a','b'],
'optimized-0000.params')
out5 = sym_block3(a_data, b_data)
# check that result matches one exec
|
uted by MXNet
assert_almost_equal(out[0].asnumpy(), out5[0].asnumpy(), rtol=1e-3, atol=1e-3)
|
manassolanki/erpnext
|
erpnext/accounts/doctype/account/chart_of_accounts/chart_of_accounts.py
|
Python
|
gpl-3.0
| 6,558
| 0.02516
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe, os, json
from frappe.utils import cstr
from unidecode import unidecode
from six import iteritems
def create_charts(company, chart_template=None, existing_company=None):
chart = get_chart(chart_template, existing_company)
if chart:
accounts = []
def _import_accounts(children, parent, root_type, root_account=False):
for account_name, child in iteritems(children):
if root_account:
root_type = child.get("root_type")
if account_name not in ["account_number", "account_type",
"root_type", "is_group", "tax_rate"]:
account_number = cstr(child.get("account_number")).strip()
account_name, account_name_in_db = add_suffix_if_duplicate(account_name,
account_number, accounts)
is_group = identify_is_group(child)
report_type = "Balance Sheet" if root_type in ["Asset", "Liability", "Equity"] \
else "Profit and Loss"
account = frappe.get_doc({
"doctype": "Account",
"account_name": account_name,
"company": company,
"parent_account": parent,
"is_group": is_group,
"root_type": root_type,
"report_type": report_type,
"account_number": account_number,
"account_type": child.get("account_type"),
"account_currency": frappe.db.get_value("Company", company, "default_currency"),
"tax_rate": child.get("tax_rate")
})
if root_account or frappe.local.flags.allow_unverified_charts:
account.flags.ignore_mandatory = True
account.flags.ignore_permissions = True
account.insert()
accounts.append(account_name_in_db)
_import_accounts(child, account.name, root_type)
_import_accounts(chart, None, None, root_account=True)
def add_suffix_if_duplicate(account_name, account_number, accounts):
if account_number:
account_name_in_db = unidecode(" - ".join([account_number,
account_name.strip().lower()]))
else:
account_name_in_db = unidecode(account_name.strip().lower())
if account_name_in_db in accounts:
count = accounts.count(account_name_in_db)
account_name = account_name + " " + cstr(count)
return account_name, account_name_in_db
def identify_is_group(child):
if child.get("is_group"):
is_group = child.get("is_group")
elif len(set(child.keys()) - set(["account_type", "root_type", "is_group", "tax_rate", "account_number"])):
is_group = 1
else:
is_group = 0
return is_group
def get_chart(chart_template, existing_company=None):
chart = {}
if existing_company:
return get_account_tree_from_existing_company(existing_company)
elif chart_template == "Standard":
from erpnext.accounts.doctype.account.chart_of_accounts.verified import standard_chart_of_accounts
return standard_chart_of_accounts.get()
elif chart_template == "Standard with Numbers":
from erpnext.accounts.doctype.account.chart_of_accounts.verified \
import standard_chart_of_accounts_with_account_number
return standard_chart_of_accounts_with_account_number.get()
else:
folders = ("verified",)
if frappe.local.flags.allow_unverified_charts:
folders = ("verified", "unverified")
for folder in folders:
path = os.path.join(os.path.dirname(__file__), folder)
for fname in os.listdir(path):
fname = frappe.as_unicode(fname)
if fname.endswith(".json"):
with open(os.path.join(path, fname), "r") as f:
chart = f.read()
if chart and json.loads(chart).get("name") == chart_template:
return json.loads(chart).get("tree")
@frappe.whitelist()
def get_charts_for_country(country, with_standard=False):
charts = []
def _get_chart_name(content):
if content:
content = json.loads(content)
if (content and content.get("disabled", "No") == "No") \
or frappe.local.flags.allow_unverified_charts:
charts.append(content["name"])
country_code = frappe.db.get_value("Country", country, "code")
if country_code:
folders = ("verified",)
if frappe.local.flags.allow_unverified_charts:
folders = ("verified", "unverified")
for folder in folders:
path = os.path.join(os.path.dirname(__file__), folder)
if not os.path.exists(path):
contin
|
ue
|
for fname in os.listdir(path):
fname = frappe.as_unicode(fname)
if (fname.startswith(country_code) or fname.startswith(country)) and fname.endswith(".json"):
with open(os.path.join(path, fname), "r") as f:
_get_chart_name(f.read())
# if more than one charts, returned then add the standard
if len(charts) != 1 or with_standard:
charts += ["Standard", "Standard with Numbers"]
return charts
def get_account_tree_from_existing_company(existing_company):
all_accounts = frappe.get_all('Account',
filters={'company': existing_company},
fields = ["name", "account_name", "parent_account", "account_type",
"is_group", "root_type", "tax_rate", "account_number"],
order_by="lft, rgt")
account_tree = {}
# fill in tree starting with root accounts (those with no parent)
if all_accounts:
build_account_tree(account_tree, None, all_accounts)
return account_tree
def build_account_tree(tree, parent, all_accounts):
# find children
parent_account = parent.name if parent else ""
children = [acc for acc in all_accounts if cstr(acc.parent_account) == parent_account]
# if no children, but a group account
if not children and parent.is_group:
tree["is_group"] = 1
tree["account_number"] = parent.account_number
# build a subtree for each child
for child in children:
# start new subtree
tree[child.account_name] = {}
# assign account_type and root_type
if child.account_number:
tree[child.account_name]["account_number"] = child.account_number
if child.account_type:
tree[child.account_name]["account_type"] = child.account_type
if child.tax_rate:
tree[child.account_name]["tax_rate"] = child.tax_rate
if not parent:
tree[child.account_name]["root_type"] = child.root_type
# call recursively to build a subtree for current account
build_account_tree(tree[child.account_name], child, all_accounts)
@frappe.whitelist()
def validate_bank_account(coa, bank_account):
accounts = []
chart = get_chart(coa)
if chart:
def _get_account_names(account_master):
for account_name, child in iteritems(account_master):
if account_name not in ["account_number", "account_type",
"root_type", "is_group", "tax_rate"]:
accounts.append(account_name)
_get_account_names(child)
_get_account_names(chart)
return (bank_account in accounts)
|
kiwiheretic/logos-v2
|
reddit/migrations/0021_redditcredentials_reddit_username.py
|
Python
|
apache-2.0
| 440
| 0
|
# -*- coding: utf-8 -
|
*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reddit', '0020_
|
pendingsubmissions_submitted'),
]
operations = [
migrations.AddField(
model_name='redditcredentials',
name='reddit_username',
field=models.CharField(max_length=50, null=True),
),
]
|
yubchen/Qlinter
|
sublimelinter.py
|
Python
|
mit
| 15,716
| 0.001145
|
#
# sublimelinter.py
# Part of SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ryan Hileman and Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter3
# License: MIT
#
"""This module provides the SublimeLinter plugin class and supporting methods."""
import os
import re
import sublime
import sublime_plugin
from .lint.linter import Linter
from .lint.highlight import HighlightSet
from .lint.queue import queue
from .lint import persist, util
def plugin_loaded():
"""The ST3 entry point for plugins."""
persist.plugin_is_loaded = True
persist.settings.load()
persist.printf('debug mode:', 'on' if persist.debug_mode() else 'off')
util.create_tempdir()
for linter in persist.linter_classes.values():
linter.initialize()
plugin = SublimeLinter.shared_plugin()
queue.start(plugin.lint)
util.generate_menus()
util.generate_color_scheme(from_reload=False)
util.install_syntaxes()
persist.settings.on_update_call(SublimeLinter.on_settings_updated)
# This ensures we lint the active view on a fresh install
window = sublime.active_window()
if window:
plugin.on_activated(window.active_view())
class SublimeLinter(sublime_plugin.EventListener):
"""The main ST3 plugin class."""
# We use this to match linter settings filenames.
LINTER_SETTINGS_RE = re.compile('^SublimeLinter(-.+?)?\.sublime-settings')
shared_instance = None
@classmethod
def shared_plugin(cls):
"""Return the plugin instance."""
return cls.shared_instance
def __init__(self, *args, **kwargs):
"""Initialize a new instance."""
super().__init__(*args, **kwargs)
# Keeps track of which views we have assigned linters to
self.loaded_views = set()
# Keeps track of which views have actually been linted
self.linted_views = set()
# A mapping between view ids and syntax names
self.view_syntax = {}
self.__class__.shared_instance = self
@classmethod
def lint_all_views(cls):
"""Simulate a modification of all views, which will trigger a relint."""
def apply(view):
if view.id() in persist.view_linters:
cls.shared_instance.hit(view)
util.apply_to_all_views(apply)
def lint(self, view_id, hit_time=None, callback=None):
"""
Lint the view with the given id.
This method is called asynchronously by persist.Daemon when a lint
request is pulled off the queue, or called synchronously when the
Lint command is executed or a file is saved and Show Errors on Save
is enabled.
If provided, hit_time is the time at which the lint request was added
to the queue. It is used to determine if the view has been modified
since the lint request was queued. If so, the lint is aborted, since
another lint request is already in the queue.
callback is the method to call when the lint is finished. If not
provided, it defaults to highlight().
"""
# If the view has been modified since the lint was triggered,
# don't lint again.
if hit_time is not None and persist.last_hit_times.get(view_id, 0) > hit_time:
return
view = Linter.get_view(view_id)
if view is None:
return
filename = view.file_name()
code = Linter.text(view)
callback = callback or self.highlight
Linter.lint_view(view, filename, code, hit_time, callback)
def highlight(self, view, linters, hit_time):
"""
Highlight any errors found during a lint of the given view.
This method is called by Linter.lint_view after linting is finished.
linters is a list of the linters that ran. hit_time has the same meaning
as in lint(), and if the view was modified since the lint request was
made, this method aborts drawing marks.
If the view has not been modified since hit_time, all of the marks and
errors from the list of linters are aggregated and drawn, and the status
is updated.
"""
vid = view.id()
# If the view has been modified since the lint was triggered,
# don't draw marks.
if hit_time is not None and persist.last_hit_times.get(vid, 0) > hit_time:
return
errors = {}
highlights = persist.highlights[vid] = HighlightSet()
for linter in linters:
if linter.highlight:
highlights.add(linter.highlight)
if linter.errors:
for line, errs in linter.errors.items():
errors.setdefault(line, []).extend(errs)
# Keep track of one view in each window that shares view's buffer
window_views = {}
buffer_id = view.buffer_id()
for window in sublime.windows():
wid = window.id()
for other_view in window.views():
if other_view.buffer_id() == buffer_id:
vid = other_view.id()
persist.highlights[vid] = highlights
highlights.clear(other_view)
highlights.draw(other_view)
persist.errors[vid] = errors
if window_views.get(wid) is None:
window_views[wid] = other_view
for view in window_views.values():
self.on_selection_modified_async(view)
def hit(self, view):
"""Record an activity that could trigger a lint and enqueue a desire to lint."""
vid = view.id()
self.check_syntax(view)
self.linted_views.add(vid)
if view.size() == 0:
for linter in Linter.get_linters(vid):
linter.clear()
return
persist.last_hit_times[vid] = queue.hit(view)
def check_syntax(self, view):
"""
Check and return if view's syntax has changed.
If the syntax has changed, a new linter is assigned.
"""
vid = view.id()
syntax = persist.get_syntax(view)
# Syntax either has never been set or just changed
if vid not in self.view_syntax or self.view_syntax[vid] != syntax:
self.view_syntax[vid] = syntax
Linter.assign(view, reset=True)
self.clear(view)
return True
else:
return False
def clear(self, view):
"""Clear all marks, errors and status from the given view."""
Linter.clear_view(view)
def is_scratch(self, view):
"""
Return whether a view is effectively scratch.
There is a bug (or feature) in the current ST3 where the Find panel
is not marked scratch but has no window.
There is also a bug where settings files opened from within .sublime-package
files are not marked scratch during the initial on_modified event, so we have
to check that a view with a filename actually exists on disk if the file
being opened is in the Sublime Text packages directory.
"""
if view.is_scratch() or view.is_read_only() or view.window() is None or view.settings().get("repl") is not None:
return True
elif (
view.file_name() and
view.file_name().startswith(sublime.packages_path() + os.path.sep)
|
and
not os.path.exists(view.file_name())
):
return True
else:
return False
def view_has_file_only_linter(self, vid):
"""Return True if any linters for the given view are file-only."""
for lint in persist.view_linters.get(vid, []):
if lint.tempfile_suffix == '-':
return True
return False
# sublime_plu
|
gin.EventListener event handlers
def on_modified(self, view):
"""Called when a view is modified."""
if self.is_scratch(view):
return
if view.id() not in persist.view_linters:
syntax_changed = self.check_syntax(view)
if not syntax_changed:
|
callowayproject/django-cookiesession
|
example/urls.py
|
Python
|
apache-2.0
| 222
| 0.004505
|
from django.conf.urls.defa
|
ults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^login/', 'django.contrib.auth.views.login'),
(r'^admin/', include(admin
|
.site.urls)),
)
|
shichao-an/adium-sh
|
adiumsh/settings.py
|
Python
|
bsd-2-clause
| 1,138
| 0
|
import os
from .utils import get_config
PACKAGE_PATH = os.path.abspath(os.path.dirname(__file__))
REPO_PATH = os.path.join(PACKAGE_PATH, os.pardir)
LOG_PATH = '~/Library/Application Support/Adium 2.0/Users/Default/Logs'
LOG_PATH = os.path.expanduser(LOG_PATH)
MOVED_LOG_PATH = os.path.expanduser('~/.adiumshlogs')
CONFIG_PATH = os.path.expanduser('~/.adiumsh') \
if not os.environ.get('ADIUMSH_TEST') \
else os.path.join(REPO_PATH, '.adiumsh')
default_config = get_config(CONFIG_PATH, 'default')
DEFAULT_ACCOUNT = \
default_config.get('account', None) if default_config else None
DEFAULT_SERVICE = \
default_config.get('service', None) if default_config else None
DEFAULT_BUDDY = \
default_config.get('buddy', None)
|
if default_config else None
DEFAULT_CHAT = \
default_config.get('chat', None) if default_config else None
EVENT_MESSAGE_RECEIVED = 'MESSAGE_RECEIVED'
EVENT_MES
|
SAGE_SENT = 'MESSAGE_SENT'
EVENT_STATUS_AWAY = 'STATUS_AWAY'
EVENT_STATUS_ONLINE = 'STATUS_ONLINE'
EVENT_STATUS_OFFLINE = 'STATUS_OFFLINE'
EVENT_STATUS_CONNECTED = 'STATUS_CONNECTED'
EVENT_STATUS_DISCONNECTED = 'STATUS_DISCONNECTED'
|
forestdussault/olc_webportalv2
|
olc_webportalv2/users/middleware.py
|
Python
|
mit
| 1,413
| 0
|
from django.http import HttpResponseRedirect
from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
from re import compile
EXEMPT_URLS = [compile(settings.LOGIN_URL.lstrip('/'))]
if hasattr(settings, 'LOGIN_EXEMPT_URLS'):
EXEMPT_URLS += [compile(expr) for expr in settings.LOGIN_EXEMPT_URLS]
class LoginRequiredMiddleware(MiddlewareMixin):
"""
Middleware that requires a user to be authenticated to view any page other
than LOGIN_URL. Exemptions to this requirement can optionally be specified
in settings via a list of regular expressions in LOGIN_EXEMPT_URLS (which
you can copy from your urls.py).
Requires authenticatio
|
n middleware and template context processors to be
loaded. You'll get an error if they aren't.
"""
def process_request(self, request):
assert hasattr(request, 'user'), "The Login Required middleware\
requires authentication middleware to be installed. Edit your\
MIDDLEWARE_CLASSES setting to insert\
'django.contrib.auth.middlware.AuthenticationMiddleware'. If that doesn't
|
\
work, ensure your TEMPLATE_CONTEXT_PROCESSORS setting includes\
'django.core.context_processors.auth'."
if not request.user.is_authenticated():
path = request.path_info.lstrip('/')
if not any(m.match(path) for m in EXEMPT_URLS):
return HttpResponseRedirect(settings.LOGIN_URL)
|
Gentux/etalage
|
etalage/contexts.py
|
Python
|
agpl-3.0
| 7,875
| 0.009524
|
# -*- coding: utf-8 -*-
# Etalage -- Open Da
|
ta POIs portal
# By: Emmanuel Raviart <eraviart@easter-eggs.com>
#
# Copyright (C) 2011, 2012 Easter-eggs
# http://gitorious.org/infos-pratiques/etalage
#
# This file is part of Etalage.
#
# Etalage is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Etalage is distributed in the hope that it will be
|
useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Context loaded and saved in WSGI requests"""
import gettext
import webob
from . import conf
__all__ = ['Ctx', 'null_ctx']
class Ctx(object):
_parent = None
default_values = dict(
_lang = None,
_scopes = UnboundLocalError,
_translator = None,
base_categories_slug = None,
category_tags_slug = None,
container_base_url = None,
distance = None, # Max distance in km
gadget_id = None,
hide_directory = False,
req = None,
subscriber = None,
)
env_keys = ('_lang', '_scopes', '_translator')
def __init__(self, req = None):
if req is not None:
self.req = req
etalage_env = req.environ.get('etalage', {})
for key in object.__getattribute__(self, 'env_keys'):
value = etalage_env.get(key)
if value is not None:
setattr(self, key, value)
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
parent = object.__getattribute__(self, '_parent')
if parent is None:
default_values = object.__getattribute__(self, 'default_values')
if name in default_values:
return default_values[name]
raise
return getattr(parent, name)
@property
def _(self):
return self.translator.ugettext
def blank_req(self, path, environ = None, base_url = None, headers = None, POST = None, **kw):
env = environ.copy() if environ else {}
etalage_env = env.setdefault('etalage', {})
for key in self.env_keys:
value = getattr(self, key)
if value is not None:
etalage_env[key] = value
return webob.Request.blank(path, environ = env, base_url = base_url, headers = headers, POST = POST, **kw)
def get_containing(self, name, depth = 0):
"""Return the n-th (n = ``depth``) context containing attribute named ``name``."""
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
if depth <= 0:
return self
depth -= 1
parent = ctx_dict.get('_parent')
if parent is None:
return None
return parent.get_containing(name, depth = depth)
def get_inherited(self, name, default = UnboundLocalError, depth = 1):
ctx = self.get_containing(name, depth = depth)
if ctx is None:
if default is UnboundLocalError:
raise AttributeError('Attribute %s not found in %s' % (name, self))
return default
return object.__getattribute__(ctx, name)
def iter(self):
yield self
parent = object.__getattribute__(self, '_parent')
if parent is not None:
for ancestor in parent.iter():
yield ancestor
def iter_containing(self, name):
ctx_dict = object.__getattribute__(self, '__dict__')
if name in ctx_dict:
yield self
parent = ctx_dict.get('_parent')
if parent is not None:
for ancestor in parent.iter_containing(name):
yield ancestor
def iter_inherited(self, name):
for ctx in self.iter_containing(name):
yield object.__getattribute__(ctx, name)
def lang_del(self):
del self._lang
if self.req is not None and self.req.environ.get('etalage') is not None \
and '_lang' in self.req.environ['etalage']:
del self.req.environ['etalage']['_lang']
def lang_get(self):
if self._lang is None:
# self._lang = self.req.accept_language.best_matches('en-US') if self.req is not None else []
# Note: Don't forget to add country-less language code when only a "language-COUNTRY" code is given.
self._lang = ['fr-FR', 'fr']
if self.req is not None:
self.req.environ.setdefault('etalage', {})['_lang'] = self._lang
return self._lang
def lang_set(self, lang):
self._lang = lang
if self.req is not None:
self.req.environ.setdefault('etalage', {})['_lang'] = self._lang
# Reinitialize translator for new languages.
if self._translator is not None:
# Don't del self._translator, because attribute _translator can be defined in a parent.
self._translator = None
if self.req is not None and self.req.environ.get('etalage') is not None \
and '_translator' in self.req.environ['etalage']:
del self.req.environ['etalage']['_translator']
lang = property(lang_get, lang_set, lang_del)
def new(self, **kwargs):
ctx = Ctx()
ctx._parent = self
for name, value in kwargs.iteritems():
setattr(ctx, name, value)
return ctx
@property
def parent(self):
return object.__getattribute__(self, '_parent')
def scopes_del(self):
del self._scopes
if self.req is not None and self.req.environ.get('wenoit_etalage') is not None \
and '_scopes' in self.req.environ['wenoit_etalage']:
del self.req.environ['wenoit_etalage']['_scopes']
def scopes_get(self):
return self._scopes
def scopes_set(self, scopes):
self._scopes = scopes
if self.req is not None:
self.req.environ.setdefault('wenoit_etalage', {})['_scopes'] = scopes
scopes = property(scopes_get, scopes_set, scopes_del)
@property
def session(self):
return self.req.environ.get('beaker.session') if self.req is not None else None
@property
def translator(self):
"""Get a valid translator object from one or several languages names."""
if self._translator is None:
languages = self.lang
if not languages:
return gettext.NullTranslations()
if not isinstance(languages, list):
languages = [languages]
translator = gettext.NullTranslations()
i18n_dir_by_plugin_name = conf['i18n_dir_by_plugin_name'] or {}
for name, i18n_dir in [
('biryani', conf['biryani_i18n_dir']),
(conf['package_name'], conf['i18n_dir']),
] + sorted(i18n_dir_by_plugin_name.iteritems()):
if name is not None and i18n_dir is not None:
translator = new_translator(name, i18n_dir, languages, fallback = translator)
self._translator = translator
return self._translator
null_ctx = Ctx()
null_ctx.lang = ['fr-FR', 'fr']
def new_translator(domain, localedir, languages, fallback = None):
new = gettext.translation(domain, localedir, fallback = True, languages = languages)
if fallback is not None:
new.add_fallback(fallback)
return new
|
alhashash/odoomrp-wip
|
stock_lock_lot/wizard/__init__.py
|
Python
|
agpl-3.0
| 288
| 0
|
# -*- codin
|
g: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from . import wiz_lock_
|
lot
|
candYgene/abg-ld
|
src/tmp/ontomap_breed.py
|
Python
|
apache-2.0
| 894
| 0.021253
|
#!/usr/bin/env python
#
# This script maps breed names to Livestock Breed Ontology (LBO) IDs.
#
# Input files:
# ONTO.tsv with two columns: id (format: <ontology_acronym>:<term_id>) and name
# pigQTLdb.tsv w
|
ith two columns: qtl_id and breed (comma-separated field of values)
#
# Output (STDOUT):
# thr
|
ee columns separated by \t: id, name, LBO breed IDs
#
sep='\t'
file1 ='../data/ONTO.tsv'
file2 ='../data/pigQTLdb.tsv'
lookup = dict()
with open(file1) as fin:
for ln in fin:
ln = ln.rstrip()
id, name = ln.split(sep)
lookup[name.lower()] = id
with open(file2) as fin:
for ln in fin:
ln = ln.rstrip()
cols = ln.split(sep)
if len(cols) == 2:
ids = []
for b in cols[1].lower().split(','):
if b in lookup:
ids.append(lookup[b])
print(ln + sep + ','.join(ids))
else:
print(ln)
|
skibblenybbles/django-commando
|
commando/django/core/management/commands/sqlclear.py
|
Python
|
mit
| 50
| 0
|
fro
|
m ..sqlclear import SQLClearCommand as Command
| |
psusloparov/sneeze
|
pocket/setup.py
|
Python
|
apache-2.0
| 492
| 0.014228
|
from setuptools import setup, find_packages
setup(name='pocket
|
',
version='0.0.0',
packages=find_packages(),
install_requires=['sneeze'],
entry_points={'nose.plugins.sneeze.plugins.add_models' : ['pocket_models = pocket.database:add_models'],
'nose.plugins.sneeze.plugins.add_options' : ['pocket_options = pocket.log_lib:add_options'],
'nose.plugins.sneeze.plugins.m
|
anagers' : ['pocket_manager = pocket.log_lib:TissueHandler']})
|
codeforamerica/pittsburgh-purchasing-suite
|
purchasing/conductor/manager/flow_management.py
|
Python
|
bsd-3-clause
| 2,911
| 0.002061
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, url_for, flash, abort
from purchasing.decorators import requires_roles
from purchasing.data.stages import Stage
from purchasing.data.flows import Flow
from purchasing.conductor.forms import FlowForm, NewFlowForm
from purchasing.conductor.manager import blueprint
@blueprint.route('/flow/new', methods=['GET', 'POST'])
@requires_roles('conductor', 'admin', 'superadmin')
def new_flow():
'''Create a new flow
:status 200: Render the new flow template
:status 302: Try to create a new flow using the
:py:class:`~purchasing.conductor.forms.NewFlowForm`, redirect
to the flows list view if successful
'''
stages = Stage.choices_factory()
form = NewFlowForm(stages=stages)
if form.validate_on_submit():
stage_order = []
for entry in form.stage_order.entries:
# try to evaluate the return value as an ID
try:
stage_id = int(entry.data)
# otherwise it's a new stage
except ValueError:
new_stage = Stage.create(name=entry.data)
stage_id = new_stage.id
stage_order.append(stage_id)
Flow.create(flow_name=form.flow_name.data, stage_order=stage_order)
flash('Flow created successfully!', 'alert-success')
return redirect(url_for('conductor.flows_list'))
return render_template('conductor/flows/new.html', stages=stages, form=form)
@blueprint.route('/flows')
@requires_roles('conductor', 'admin', 'superadmin')
def flows_list():
'''List all flows
:status 200: Render the all flows list template
'''
flows = Flow.query.order_by(Flow.flow_name).all()
active, archived = [], []
for flow in flows:
if flow.is_archived:
archived.append(flow)
else:
active.append(flow)
return render_template('conductor/flows/browse.html', active=active, archived=archived)
@blueprint.route('/flow/<int:flow_id>', methods=['GET', 'POST'])
@requires_roles('conductor', 'admin', 'superadmin')
|
def flow_detail(flow_id):
'''View/edit a flow's details
:status 200: Render the flow edit template
:status 302: Post changes to the a flow using the submitted
:py:class:`~purchasing.conductor.forms.FlowForm`, redirect back to
th
|
e current flow's detail page if successful
'''
flow = Flow.query.get(flow_id)
if flow:
form = FlowForm(obj=flow)
if form.validate_on_submit():
flow.update(
flow_name=form.data['flow_name'],
is_archived=form.data['is_archived']
)
flash('Flow successfully updated', 'alert-success')
return redirect(url_for('conductor.flow_detail', flow_id=flow.id))
return render_template('conductor/flows/edit.html', form=form, flow=flow)
abort(404)
|
muchu1983/104_cameo
|
test/unit/test_spiderForPEDAILY.py
|
Python
|
bsd-3-clause
| 1,237
| 0.00841
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import unittest
import logging
from cameo.spiderForPEDAILY import SpiderForPEDAILY
"""
測試 抓取 PEDAILY
"""
class SpiderForPEDAILYTest(unittest.Test
|
Case):
#準備
def setUp(self):
logging.basicConfig(level=logging.INFO)
self.spider = SpiderForPEDAILY()
self.spider.initDriver()
#收尾
def te
|
arDown(self):
self.spider.quitDriver()
"""
#測試抓取 index page
def test_downloadIndexPage(self):
logging.info("SpiderForPEDAILYTest.test_downloadIndexPage")
self.spider.downloadIndexPage()
#測試抓取 category page
def test_downloadCategoryPage(self):
logging.info("SpiderForPEDAILYTest.test_downloadCategoryPage")
self.spider.downloadCategoryPage()
"""
#測試抓取 news page
def test_downloadNewsPage(self):
logging.info("SpiderForPEDAILYTest.test_downloadNewsPage")
self.spider.downloadNewsPage(strCategoryName=None)
#測試開始
if __name__ == "__main__":
unittest.main(exit=False)
|
javier-ruiz-b/docker-rasppi-images
|
raspberry-google-home/env/lib/python3.7/site-packages/rsa/util.py
|
Python
|
apache-2.0
| 2,986
| 0.00067
|
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions."""
import sys
from optparse import Op
|
tionParser
import rsa.key
def private_to_public() ->
|
None:
"""Reads a private key and outputs the corresponding public key."""
# Parse the CLI options
parser = OptionParser(usage='usage: %prog [options]',
description='Reads a private key and outputs the '
'corresponding public key. Both private and public keys use '
'the format described in PKCS#1 v1.5')
parser.add_option('-i', '--input', dest='infilename', type='string',
help='Input filename. Reads from stdin if not specified')
parser.add_option('-o', '--output', dest='outfilename', type='string',
help='Output filename. Writes to stdout of not specified')
parser.add_option('--inform', dest='inform',
help='key format of input - default PEM',
choices=('PEM', 'DER'), default='PEM')
parser.add_option('--outform', dest='outform',
help='key format of output - default PEM',
choices=('PEM', 'DER'), default='PEM')
(cli, cli_args) = parser.parse_args(sys.argv)
# Read the input data
if cli.infilename:
print('Reading private key from %s in %s format' %
(cli.infilename, cli.inform), file=sys.stderr)
with open(cli.infilename, 'rb') as infile:
in_data = infile.read()
else:
print('Reading private key from stdin in %s format' % cli.inform,
file=sys.stderr)
in_data = sys.stdin.read().encode('ascii')
assert type(in_data) == bytes, type(in_data)
# Take the public fields and create a public key
priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform)
pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e)
# Save to the output file
out_data = pub_key.save_pkcs1(cli.outform)
if cli.outfilename:
print('Writing public key to %s in %s format' %
(cli.outfilename, cli.outform), file=sys.stderr)
with open(cli.outfilename, 'wb') as outfile:
outfile.write(out_data)
else:
print('Writing public key to stdout in %s format' % cli.outform,
file=sys.stderr)
sys.stdout.write(out_data.decode('ascii'))
|
yxping/leetcode
|
solutions/257.Binary_Tree_Paths/AC_dfs_n.py
|
Python
|
gpl-2.0
| 836
| 0.010766
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
# Author: illuz <iilluzen[at]gmail.com>
# File: AC_dfs_n.py
# Create Date: 2015-08-16 10:15:54
# Usage: AC_dfs_n.py
# Descripton:
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param {TreeNode} root
# @return {string[]}
def binaryTreePaths(self, root):
path =[]
paths = []
def dfs(root):
if root:
path.append(st
|
r(root.va
|
l))
if root.left == None and root.right == None:
paths.append('->'.join(path))
dfs(root.left)
dfs(root.right)
path.pop()
dfs(root)
return paths
|
idrogeno/IdroMips
|
lib/python/Components/VfdSymbols.py
|
Python
|
gpl-2.0
| 4,865
| 0.027955
|
from twisted.internet import threads
from config import config
from enigma import eDBoxLCD, eTimer, iPlayableService
import NavigationInstance
from Tools.Directories import fileExists
from Components.ParentalControl import parentalControl
from Components.ServiceEventTracker import ServiceEventTracker
from Components.SystemInfo import SystemInfo
from boxbranding import getBoxType
POLLTIME = 5 # seconds
def SymbolsCheck(session, **kwargs):
global symbolspoller, POLLTIME
if getBoxType() in ('dummy'):
POLLTIME = 1
symbolspoller = SymbolsCheckPoller(session)
symbolspoller.start()
class SymbolsCheckPoller:
def __init__(self, session):
self.session = session
self.blink = False
self.led = "0"
self.timer = eTimer()
self.onClose = []
self.__event_tracker = ServiceEventTracker(screen=self,eventmap=
{
iPlayableService.evUpdatedInfo: self.__evUpdatedInfo,
})
def __onClose(self):
pass
def start(self):
if self.symbolscheck not in self.timer.callback:
self.timer.callback.append(self.symbolscheck)
self.timer.startLongTimer(0)
def stop(self):
if self.symbolscheck in self.timer.callback:
self.timer.callback.remove(self.symbolscheck)
self.timer.stop()
def symbolscheck(self):
threads.deferToThread(self.JobTask)
self.timer.startLongTimer(POLLTIME)
def JobTask(self):
self.Recording()
self.PlaySymbol()
self.timer.startLongTimer(POLLTIME)
def __evUpdatedInfo(self):
self.service = self.session.nav.getCurrentService()
self.Subtitle()
self.ParentalControl()
self.PlaySymbol()
del self.service
def Recording(self):
if fileExists("/proc/stb/lcd/symbol_circle"):
recordings = len(NavigationInstance.instance.getRecordings())
if recordings > 0:
open("/proc/stb/lcd/symbol_circle", "w").write("3")
else:
open("/proc/stb/lcd/symbol_circle", "w").write("0")
elif getBoxType() in ('dummy'):
recordings = len(NavigationInstance.instance.getRecordings())
if recordings > 0:
open("/proc/stb/lcd/symbol_recording", "w").write("1")
else:
open("/proc/stb/lcd/symbol_recording", "w").write("0")
elif getBoxType() in ('dummy'):
recordings = len(NavigationInstance.in
|
stance.getRecordings())
self.blink = not self.blink
if recordings > 0:
if self.blink:
open("/proc/stb/lcd/powerled", "w").write("1")
self.led = "1"
else:
open("/proc/stb/lcd/powerled", "w").write("0")
|
self.led = "0"
elif self.led == "1":
open("/proc/stb/lcd/powerled", "w").write("0")
elif getBoxType() in ('dummy'):
recordings = len(NavigationInstance.instance.getRecordings())
self.blink = not self.blink
if recordings > 0:
if self.blink:
open("/proc/stb/lcd/powerled", "w").write("0")
self.led = "1"
else:
open("/proc/stb/lcd/powerled", "w").write("1")
self.led = "0"
elif self.led == "1":
open("/proc/stb/lcd/powerled", "w").write("1")
else:
if not fileExists("/proc/stb/lcd/symbol_recording") or not fileExists("/proc/stb/lcd/symbol_record_1") or not fileExists("/proc/stb/lcd/symbol_record_2"):
return
recordings = len(NavigationInstance.instance.getRecordings())
if recordings > 0:
open("/proc/stb/lcd/symbol_recording", "w").write("1")
if recordings == 1:
open("/proc/stb/lcd/symbol_record_1", "w").write("1")
open("/proc/stb/lcd/symbol_record_2", "w").write("0")
elif recordings >= 2:
open("/proc/stb/lcd/symbol_record_1", "w").write("1")
open("/proc/stb/lcd/symbol_record_2", "w").write("1")
else:
open("/proc/stb/lcd/symbol_recording", "w").write("0")
open("/proc/stb/lcd/symbol_record_1", "w").write("0")
open("/proc/stb/lcd/symbol_record_2", "w").write("0")
def Subtitle(self):
if not fileExists("/proc/stb/lcd/symbol_smartcard"):
return
subtitle = self.service and self.service.subtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if subtitlelist:
subtitles = len(subtitlelist)
if subtitles > 0:
open("/proc/stb/lcd/symbol_smartcard", "w").write("1")
else:
open("/proc/stb/lcd/symbol_smartcard", "w").write("0")
else:
open("/proc/stb/lcd/symbol_smartcard", "w").write("0")
def ParentalControl(self):
if not fileExists("/proc/stb/lcd/symbol_parent_rating"):
return
service = self.session.nav.getCurrentlyPlayingServiceReference()
if service:
if parentalControl.getProtectionLevel(service.toCompareString()) == -1:
open("/proc/stb/lcd/symbol_parent_rating", "w").write("0")
else:
open("/proc/stb/lcd/symbol_parent_rating", "w").write("1")
else:
open("/proc/stb/lcd/symbol_parent_rating", "w").write("0")
def PlaySymbol(self):
if not fileExists("/proc/stb/lcd/symbol_play "):
return
if SystemInfo["SeekStatePlay"]:
open("/proc/stb/lcd/symbol_play ", "w").write("1")
else:
open("/proc/stb/lcd/symbol_play ", "w").write("0")
|
mrpau/kolibri
|
kolibri/core/content/test/test_zipcontent.py
|
Python
|
mit
| 12,256
| 0.003264
|
import hashlib
import os
import tempfile
import zipfile
from bs4 import BeautifulSoup
from django.test import Client
from django.test import TestCase
from mock import patch
from ..models import LocalFile
from ..utils.paths import get_content_storage_file_path
from kolibri.core.auth.test.helpers import provision_device
from kolibri.utils.tests.helpers import override_option
DUMMY_FILENAME = "hashi123.js"
empty_content = '<html><head><script src="/static/content/hashi123.js"></script></head><body></body></html>'
@patch("kolibri.core.content.views.get_hashi_filename", return_value=DUMMY_FILENAME)
@override_option("Paths", "CONTENT_DIR", tempfile.mkdtemp())
class ZipContentTestCase(TestCase):
"""
Testcase for zipcontent endpoint
"""
index_name = "index.html"
index_str = "<html></html>"
other_name = "other.html"
other_str = "<html><head></head></html>"
script_name = "script.html"
script_str = "<html><head><script>test</script></head></html>"
async_script_name = "async_script.html"
async_script_str = (
|
'<html><head><script async src="url/url.js"></script></head></html>'
)
empty_html_name = "empty.html"
empty_html_str = ""
doctype_name = "doctype.html"
doctype = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
"""
|
doctype_str = doctype + "<html><head><script>test</script></head></html>"
html5_doctype_name = "html5_doctype.html"
html5_doctype = "<!DOCTYPE HTML>"
html5_doctype_str = (
html5_doctype + "<html><head><script>test</script></head></html>"
)
test_name_1 = "testfile1.txt"
test_str_1 = "This is a test!"
test_name_2 = "testfile2.txt"
test_str_2 = "And another test..."
embedded_file_name = "test/this/path/test.txt"
embedded_file_str = "Embedded file test"
def setUp(self):
self.client = Client()
provision_device()
self.hash = hashlib.md5("DUMMYDATA".encode()).hexdigest()
self.extension = "zip"
self.filename = "{}.{}".format(self.hash, self.extension)
self.zip_path = get_content_storage_file_path(self.filename)
zip_path_dir = os.path.dirname(self.zip_path)
if not os.path.exists(zip_path_dir):
os.makedirs(zip_path_dir)
with zipfile.ZipFile(self.zip_path, "w") as zf:
zf.writestr(self.index_name, self.index_str)
zf.writestr(self.other_name, self.other_str)
zf.writestr(self.script_name, self.script_str)
zf.writestr(self.async_script_name, self.async_script_str)
zf.writestr(self.empty_html_name, self.empty_html_str)
zf.writestr(self.doctype_name, self.doctype_str)
zf.writestr(self.html5_doctype_name, self.html5_doctype_str)
zf.writestr(self.test_name_1, self.test_str_1)
zf.writestr(self.test_name_2, self.test_str_2)
zf.writestr(self.embedded_file_name, self.embedded_file_str)
self.zip_file_obj = LocalFile(
id=self.hash, extension=self.extension, available=True
)
self.zip_file_base_url = self.zip_file_obj.get_storage_url()
def test_zip_file_url_reversal(self, filename_patch):
file = LocalFile(id=self.hash, extension=self.extension, available=True)
self.assertEqual(
file.get_storage_url(), "/zipcontent/{}/".format(self.filename)
)
def test_non_zip_file_url_reversal(self, filename_patch):
file = LocalFile(id=self.hash, extension="otherextension", available=True)
filename = file.get_filename()
self.assertEqual(
file.get_storage_url(),
"/content/storage/{}/{}/{}".format(filename[0], filename[1], filename),
)
def test_zip_file_internal_file_access(self, filename_patch):
# test reading the data from file #1 inside the zip
response = self.client.get(self.zip_file_base_url + self.test_name_1)
self.assertEqual(next(response.streaming_content).decode(), self.test_str_1)
# test reading the data from file #2 inside the zip
response = self.client.get(self.zip_file_base_url + self.test_name_2)
self.assertEqual(next(response.streaming_content).decode(), self.test_str_2)
def test_nonexistent_zip_file_access(self, filename_patch):
bad_base_url = self.zip_file_base_url.replace(
self.zip_file_base_url[20:25], "aaaaa"
)
response = self.client.get(bad_base_url + self.test_name_1)
self.assertEqual(response.status_code, 404)
def test_zip_file_nonexistent_internal_file_access(self, filename_patch):
response = self.client.get(self.zip_file_base_url + "qqq" + self.test_name_1)
self.assertEqual(response.status_code, 404)
def test_non_allowed_file_internal_file_access(self, filename_patch):
response = self.client.get(
self.zip_file_base_url.replace("zip", "png") + self.test_name_1
)
self.assertEqual(response.status_code, 404)
def test_not_modified_response_when_if_modified_since_header_set(
self, filename_patch
):
caching_client = Client(HTTP_IF_MODIFIED_SINCE="Sat, 10-Sep-2016 19:14:07 GMT")
response = caching_client.get(self.zip_file_base_url + self.test_name_1)
self.assertEqual(response.status_code, 304)
def test_content_security_policy_header(self, filename_patch):
response = self.client.get(self.zip_file_base_url + self.test_name_1)
self.assertEqual(
response.get("Content-Security-Policy"),
"default-src 'self' 'unsafe-inline' 'unsafe-eval' data: blob: http://testserver",
)
def test_content_security_policy_header_http_referer(self, filename_patch):
response = self.client.get(
self.zip_file_base_url + self.test_name_1,
HTTP_REFERER="http://testserver:1234/iam/a/real/path/#thatsomeonemightuse",
)
self.assertEqual(
response.get("Content-Security-Policy"),
"default-src 'self' 'unsafe-inline' 'unsafe-eval' data: blob: http://testserver:1234",
)
def test_access_control_allow_origin_header(self, filename_patch):
response = self.client.get(self.zip_file_base_url + self.test_name_1)
self.assertEqual(response.get("Access-Control-Allow-Origin"), "*")
response = self.client.options(self.zip_file_base_url + self.test_name_1)
self.assertEqual(response.get("Access-Control-Allow-Origin"), "*")
def test_x_frame_options_header(self, filename_patch):
response = self.client.get(self.zip_file_base_url + self.test_name_1)
self.assertEqual(response.get("X-Frame-Options", ""), "")
def test_access_control_allow_headers(self, filename_patch):
headerval = "X-Penguin-Dance-Party"
response = self.client.options(
self.zip_file_base_url + self.test_name_1,
HTTP_ACCESS_CONTROL_REQUEST_HEADERS=headerval,
)
self.assertEqual(response.get("Access-Control-Allow-Headers", ""), headerval)
response = self.client.get(
self.zip_file_base_url + self.test_name_1,
HTTP_ACCESS_CONTROL_REQUEST_HEADERS=headerval,
)
self.assertEqual(response.get("Access-Control-Allow-Headers", ""), headerval)
def test_request_for_html_no_head_return_hashi_modified_html(self, filename_patch):
response = self.client.get(self.zip_file_base_url)
content = '<html><head><script src="/static/content/hashi123.js"></script></head><body></body></html>'
self.assertEqual(response.content.decode("utf-8"), content)
def test_request_for_html_body_no_script_return_hashi_modified_html(
self, filename_patch
):
response = self.client.get(self.zip_file_base_url + self.other_name)
self.assertEqual(response.content.decode("utf-8"), empty_content)
def test_request_for_html_body_script_return_hashi_modified_html(
self, filename_patch
):
response = self.client.get(self.zip_file_base_url + self.script_name)
content = (
|
d2jvkpn/bioinformatics_tools
|
bioparser/gff3extract_v0.9.py
|
Python
|
gpl-3.0
| 1,658
| 0.019903
|
import os, gzip, argparse
parser = argparse.ArgumentParser(description='Extract transcript records')
parser.add_argument("gff3", help="input gff3 file")
parser.add_argument("-F", dest="Feature", nargs='+', required=True,
help="transcript type(s) and attribution(s), " +
"E.g \"-F mRNA ncRNA:ncrna_class=lncRNA\"")
parser.add_argument("-A", dest="Attri", nargs='+', default=None,
help="output at
|
tribution(s) of selected transcript(s)")
parser.add_argument("-a", dest="attri", nargs='+', default=None,
help="output attribution(s) of exon")
if len(os.sys.argv) == 1: parser.print_help(); os.sys.exit(0)
args = parser.parse_args()
MFeature = {}
for i in args.Feature:
ii = i.split(":")
try:
MFeature[ ii[0] ] = ii[1].split(",")
except:
MFeature[ ii[0] ] = None
def extract ( Fx9 ):
global k;
|
f9 = Fx9[8].split(';'); t = Fx9[2]
if t in MFeature:
k = 1
if MFeature[t] != None and not any(x in MFeature[t] for x in f9):
k=0
if t != 'exon' and t not in MFeature: k = 0
if k != 1: return 0
atr = args.Attri if t in MFeature else args.attri
F9 = ""
if atr != None:
for i in f9:
if i.split(sep='=', maxsplit=1)[0] in atr: F9 += i + ';'
Fx9[8] = F9.strip(';')
print('\t'.join(Fx9))
if args.gff3.endswith('.gz'):
f = gzip.open(args.gff3, 'r')
for line in f:
line = line.decode("utf8").rstrip()
if not line.startswith("#"): extract( line.split('\t') )
else:
f = open(args.gff3, 'r')
for line in f:
line = line.rstrip()
if not line.startswith("#"): extract( line.split('\t') )
f.close()
|
kasiazubielik/python_training
|
generator/group.py
|
Python
|
apache-2.0
| 1,257
| 0.009547
|
from model.group import Group
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of groups", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 3
f = "data/groups.json"
for o, a in opts:
|
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*10
|
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Group(name="", header="", footer="")] + [
Group(name=random_string("name", 10), header=random_string("header", 20), footer=random_string("footer", 20))
for i in range(n)
]
file = config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
# out.write(json.dumps(testdata, default=lambda x: x.__dict__, indent=2))
jsonpickle.set_encoder_options("json", indent = 2)
out.write(jsonpickle.encode(testdata))
# "dump" changes data sturcture into a json string
# __dict__ keeps the same properties that are assigned to the __init__ method in model.group
|
LAR-UFES/pppoe-plugin
|
pppoediplugin/CheckConnection.py
|
Python
|
gpl-3.0
| 2,146
| 0.00794
|
#!/usr/bin/env python3
from subprocess import getoutput
import threading
import time
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk as gtk
from gi.repository import GLib
class CheckConnection(threading.Thread):
def __init__(self, pppoedi
|
):
super(CheckConnection,self).__init__()
self.pppoedi = pppoedi
def run(self):
super(CheckConnection,self).run()
self.pppoedi.settings.active_status = False
#fsyslog = open('/var/log/syslog','r')
self.pppoedi.pppoedi_bus_interface.OpenSyslog()
while not self.pppoedi.settings.quit_pppoedi:
#ppp_status=fsyslog.read()
ppp_status=self.pppoedi.pppoedi_bus_interface.ReadSyslog()
|
if self.pppoedi.settings.connect_active:
if ppp_status != '':
if 'PAP authentication succeeded' in ppp_status and not self.pppoedi.settings.active_status:
self.pppoedi.settings.active_status = True
self.pppoedi.button_conn_disconn.set_label("Desconectar")
self.pppoedi.button_conn_disconn.set_sensitive(True)
elif 'PAP authentication failed' in ppp_status:
self.pppoedi.settings.active_status = False
self.pppoedi.disconnect()
GLib.idle_add(self.pppoedi.showAlertMsg,'Falha na autenticação.', gtk.MessageType.ERROR)
elif 'Unable to complete PPPoE Discovery' in ppp_status:
self.pppoedi.settings.active_status = False
self.pppoedi.disconnect()
GLib.idle_add(self.pppoedi.showAlertMsg,'Não foi possível completar o PPPoE Discovery.', gtk.MessageType.ERROR)
elif 'Connection terminated.' in ppp_status:
self.pppoedi.settings.active_status = False
self.pppoedi.disconnect()
GLib.idle_add(self.pppoedi.showAlertMsg,'A conexão foi terminada.', gtk.MessageType.ERROR)
time.sleep(0.5)
#fsyslog.close()
|
xuleiboy1234/autoTitle
|
textsum/seq2seq_attention_model.py
|
Python
|
mit
| 13,425
| 0.00432
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Sequence-to-Sequence with attention model for text summarization.
"""
from collections import namedtuple
import numpy as np
import seq2seq_lib
from six.moves import xrange
import tensorflow as tf
import embedding as emb
HParams = namedtuple('HParams',
'vocab_path, mode, min_lr, lr, batch_size, '
'enc_layers, enc_timesteps, dec_timesteps, '
'min_input_len, num_hidden, emb_dim, max_grad_norm, '
'num_softmax_samples')
def _extract_argmax_and_embed(embedding, output_projection=None,
update_embedding=True):
"""Get a loop_function that extracts the previous symbol and embeds it.
Args:
embedding: embedding tensor for symbols.
output_projection: None or a pair (W, B). If provided, each fed previous
output will first be multiplied by W and added B.
update_embedding: Boolean; if False, the gradients will not propagate
through the embeddings.
Returns:
A loop function.
"""
def loop_function(prev, _):
"""function that feed previous model output rather than ground truth."""
if output_projection is not None:
prev = tf.nn.xw_plus_b(
prev, output_projection[0], output_projection[1])
prev_symbol = tf.argmax(prev, 1)
# Note that gradients will not propagate through the second parameter of
# embedding_lookup.
emb_prev = tf.nn.embedding_lookup(embedding, prev_symbol)
if not update_embedding:
emb_prev = tf.stop_gradient(emb_prev)
return emb_prev
return loop_function
class Seq2SeqAttentionModel(object):
"""Wrapper for Tensorflow model graph for text sum vectors."""
def __init__(self, hps, vocab, num_gpus=0):
self._hps = hps
self._vocab = vocab
self._num_gpus = num_gpus
self._cur_gpu = 0
def run_train_step(self, sess, article_batch, abstract_batch, targets,
article_lens, abstract_lens, loss_weights):
to_return = [self._train_op, self._summaries, self._loss, self.global_step]
return sess.run(to_return,
feed_dict={self._articles: article_batch,
self._abstracts: abstract_batch,
self._targets: targets,
self._article_lens: article_lens,
self._abstract_lens: abstract_lens,
self._loss_weights: loss_weights})
def run_eval_step(self, sess, article_batch, abstract_batch, targets,
article_lens, abstract_lens, loss_weights):
to_return = [self._summaries, self._loss, self.global_step]
return sess.run(to_return,
feed_dict={self._articles: article_batch,
self._abstracts: abstract_batch,
self._targets: targ
|
ets,
self._article_lens: article_lens,
|
self._abstract_lens: abstract_lens,
self._loss_weights: loss_weights})
def run_decode_step(self, sess, article_batch, abstract_batch, targets,
article_lens, abstract_lens, loss_weights):
to_return = [self._outputs, self.global_step]
return sess.run(to_return,
feed_dict={self._articles: article_batch,
self._abstracts: abstract_batch,
self._targets: targets,
self._article_lens: article_lens,
self._abstract_lens: abstract_lens,
self._loss_weights: loss_weights})
def _next_device(self):
"""Round robin the gpu device. (Reserve last gpu for expensive op)."""
if self._num_gpus == 0:
return ''
dev = '/gpu:%d' % self._cur_gpu
if self._num_gpus > 1:
self._cur_gpu = (self._cur_gpu + 1) % (self._num_gpus-1)
return dev
def _get_gpu(self, gpu_id):
if self._num_gpus <= 0 or gpu_id >= self._num_gpus:
return ''
return '/gpu:%d' % gpu_id
def _add_placeholders(self):
"""Inputs to be fed to the graph."""
hps = self._hps
self._articles = tf.placeholder(tf.int32,
[hps.batch_size, hps.enc_timesteps],
name='articles')
self._abstracts = tf.placeholder(tf.int32,
[hps.batch_size, hps.dec_timesteps],
name='abstracts')
self._targets = tf.placeholder(tf.int32,
[hps.batch_size, hps.dec_timesteps],
name='targets')
self._article_lens = tf.placeholder(tf.int32, [hps.batch_size],
name='article_lens')
self._abstract_lens = tf.placeholder(tf.int32, [hps.batch_size],
name='abstract_lens')
self._loss_weights = tf.placeholder(tf.float32,
[hps.batch_size, hps.dec_timesteps],
name='loss_weights')
def _add_seq2seq(self):
hps = self._hps
vsize = self._vocab.NumIds()
with tf.variable_scope('seq2seq'):
encoder_inputs = tf.unstack(tf.transpose(self._articles))
decoder_inputs = tf.unstack(tf.transpose(self._abstracts))
targets = tf.unstack(tf.transpose(self._targets))
loss_weights = tf.unstack(tf.transpose(self._loss_weights))
article_lens = self._article_lens
# Embedding shared by the input and outputs.
with tf.variable_scope('embedding'), tf.device('/cpu:0'):
# embedding = tf.get_variable(
# 'embedding', [vsize, hps.emb_dim], dtype=tf.float32,
# initializer=tf.truncated_normal_initializer(stddev=1e-4))
word2vec = emb.embeded(hps)
embedding = word2vec.get_embedding()
emb_encoder_inputs = [tf.nn.embedding_lookup(embedding, x)
for x in encoder_inputs]
emb_decoder_inputs = [tf.nn.embedding_lookup(embedding, x)
for x in decoder_inputs]
for layer_i in xrange(hps.enc_layers):
with tf.variable_scope('encoder%d'%layer_i), tf.device(
self._next_device()):
cell_fw = tf.contrib.rnn.LSTMCell(
hps.num_hidden,
initializer=tf.random_uniform_initializer(-0.1, 0.1, seed=123),
state_is_tuple=False)
cell_bw = tf.contrib.rnn.LSTMCell(
hps.num_hidden,
initializer=tf.random_uniform_initializer(-0.1, 0.1, seed=113),
state_is_tuple=False)
(emb_encoder_inputs, fw_state, _) = tf.contrib.rnn.static_bidirectional_rnn(
cell_fw, cell_bw, emb_encoder_inputs, dtype=tf.float32,
sequence_length=article_lens)
encoder_outputs = emb_encoder_inputs
with tf.variable_scope('output_projection'):
w = tf.get_variable(
'w', [hps.num_hidden, vsize], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=1e-4))
w_t = tf.transpose(w)
v = tf.get_variable(
'v', [vsize], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=1e-4))
with tf.variable_scope('decoder'), tf.device(self._next_device()):
# When decoding, use model output from the previous step
# for the n
|
cediant/hpcservergridscheduler
|
CLIENT/client.py
|
Python
|
gpl-2.0
| 7,365
| 0.057298
|
#!/home/executor/Ice/python2.4/bin/python
# Copyright (C) 2012 CEDIANT <info@cediant.es>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License v2
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os, time
import sys, traceback, Ice, IceGrid
import threading
import random
SLICE_CONTRACT = os.environ['HOME'] + "/DISTRIB/HPCServer/execservant.ice"
#CONFIG_FILE = os.environ['HOME'] + "/config"
CONFIG_FILE = "client.cfg"
DEFAULT_PRIO = "5"
BASE_VERSION = "364"
ALTERNATE_VERSION = "364"
import amebajobs
Ice.loadSlice(SLICE_CONTRACT)
import HPCServer
def random_prio ( max_prio=10 ) :
return int( max_prio * random.random() )
def step_prio ( max_prio=10 ) :
priolist = range(max_prio,0,-1)
top = 0
for i in priolist :
top += i
prio = int( top * random.random() )
for i in priolist :
limit += i
if prio < limit :
break
return i - 1
import math
def stats ( values ) :
avg , err = 0.0 , 0.0
for v in values :
avg += v
err += v * v
avg /= len(values)
err /= len(values)
err -= avg * avg
return avg , math.sqrt( err )
class AMI_HPCServer_AsyncgExec(Ice.Object):
def __init__(self,name,application):
self.name = name
self.application = application
def ice_response(self, result):
self.application.cond.acquire()
try:
# print "Terminada operacion con resultado [%s]" % result
if self.application.detailfile : self.application.detailfile.write( "%s %s\n" % ( result , time.time() ) )
self.application.jobs-=1
if self.application.jobs==0:
self.application.cond.notify()
finally:
self.application.cond.release()
print result
def ice_exception(self,ex):
self.application.cond.acquire()
try:
self.application.jobs-=1
if self.application.jobs==0:
self.application.cond.notify()
print "excepcion --- %s" % ex
finally:
self.application.cond.release()
class ExecClientApp(Ice.Application):
def __init__(self):
self.jobs = 0
self.cond = threading.Condition()
self.detailfile = None
def launchOperation(self,input_string,prio):
try:
ic = self.communicator()
base = ic.stringToProxy("HPCServerServant")
#base = ic.stringToProxy("HPCServerServant:default -p 10000")
e_servant = HPCServer.ExecServantPrx.checkedCast(base)
except Ice.NotRegisteredException:
print "%s : couldn't find a `::HPCServer::HPCServerServant' object." % self.appName()
return False
try:
ctx={}
if prio == "random" :
ctx["prio"] = "%s" % random_prio( 10 )
else :
ctx["prio"] = prio
if ameba_percent :
if ameba_percent < int(100*random.random()) :
ctx["ameba_version"] = ALTERNATE_VERSION
else :
ctx["ameba_version"] = BASE_VERSION
elif ameba_range :
ctx["ameba_version"] = "%s" % int( 10 + ameba_range * random.random() )
else :
ctx["ameba_version"] = BASE_VERSION
#ctx["testing"] = "No"
ctx["submittime"] = "%s" % time.time()
ctx["url"] = "http://01cnbtlgridp:21000/deploy/"
ctx["errormail"] = "gfernandez@cediant.es"
ctx["smtp1"] = "01cnbtlgridp:25"
## AMI + AMD
#print "lanzada operacion"
self.cond.acquire()
try:
callback = AMI_HPCServer_AsyncgExec(input_string,self)
e_servant.gExec_async(callback,input_string,ctx) # Asynchronous call
self.jobs+=1
finally:
self.cond.release()
except:
traceback.print_exc()
return False
return True
def constructInput(self,prio,clientid):
operationid = int( 10000000 * random.random() )
for i in range( pack * ncores ) :
input = amebajobs.construct_job((clientid,operationid),i)
if not self.launchOperation(input,prio) :
print "Job was not submitted"
return False
self.cond.acquire()
try:
while self.jobs:
self.cond.wait()
finally:
self.cond.release()
return True
def evaluateFile(self,file_path,prio):
if not os.path.exists(file_path):
return False
try:
f = open(file_path,'r')
request_list = f.read().split("{{")[1:]
f.close()
except:
print "No se pudo leer el fichero %s" % file_path
#send operations to ameba file
for i in range(len(request_list)):
# print "Lanzando operacion %d" % i
if not self.launchOperation("{{"+request_list[i],prio):
return False
#wait for jobs termination.(AMI+AMD Issue)
self.cond.acquire()
try:
while self.jobs:
self.cond.wait()
finally:
self.cond.release()
return True
def run(self,args):
service_name = "HPCServerServant"
ic = None
ic = self.communicator()
# This is probably a bug somewhere, but is required to handle connection loss
ic.getProperties().setProperty( "Ice.ThreadPool.Client.SizeMax" , "20" )
# Launch file section
#---------------------------------------
file_name = None
if len(sys.argv) > 1 :
aux_name = sys.argv[1]
if os.path.exists(aux_name):
file_name=aux_name
if len(sys.argv) > 2 :
prio = sys.argv[2]
else:
prio = DEFAULT_PRIO
init_time = time.time()
subtotals = []
summary = open( outfile + ".out" , 'w' )
summary.write( "# args : %s\n" % sys.argv )
summary.write( "# cores : %s\n" % ncores )
if file_name is None :
summary.write( "# pack : %s\n" % pack )
else :
summary.write( "# input_file : %s\n" % file_name )
for i in range( npacks ) :
self.detailfile = open( os.path.join( outfile , "%s.out" % i ) , 'w' )
starttime = time.time()
if file_name is None :
clientid = int( 1000000 * random.random() )
self.constructInput(prio,clientid)
else :
self.evaluateFile(file_name,prio)
subtotal = time.time() - starttime
subtotals.append( subtotal )
summary.write( "%s %s %s %s\n" % ( i , subtotal , ameba_percent , ameba_range ) )
self.detailfile.close()
self.detailfile = None
summary.close()
if ic :
try:
ic.destroy()
except:
traceback.print_exc()
avg , err = stats( subtotals )
print "%s %s %s %s %s %s" % ( header , time.time() - init_time , avg , err , ameba_percent , ameba_range )
return True
header = "AVERAGE"
outfile = "output"
ncores = 8
pack = 30
npacks = 10
ameba_percent = False
ameba_range = False
if (__name__ == "__main__"):
if len(sys.argv) > 1 and sys.argv[1] == "--pack" :
sys.argv.pop(1)
pack = int(sys.argv.pop(1))
if len(sys.argv) > 1 and sys.argv[1] == "--npacks" :
sys.argv.pop(1)
npacks = int(sys.argv.pop(1))
if len(sys.argv) > 1 and sys.argv[1] == "--ameba-percent" :
sys.argv.pop(1)
ameba
|
_percent = int(sys.argv.pop(1))
if len(sys.argv) > 1 and sys.argv[1] == "--ameba-range" :
sys.argv.pop(1)
ameba_range = int(sys.argv.pop(1))
if ameba_percent :
ameba_range = False
if len(sys.argv) > 1 and sys.argv[1] == "--suffix" :
sys.argv.pop(1)
header = sys.argv.pop(1)
outfile = os.path.join
|
( outfile , header )
os.makedirs( outfile )
app = ExecClientApp()
ret = app.main(sys.argv,CONFIG_FILE)
sys.exit(ret)
|
w1ll1am23/simplisafe-python
|
tests/system/__init__.py
|
Python
|
mit
| 32
| 0
|
"""Define tes
|
ts for sy
|
stems."""
|
cdcf/time_tracker
|
app/main/errors.py
|
Python
|
bsd-3-clause
| 392
| 0
|
__author
|
__ = 'Cedric Da Costa Faro'
from flask import render_template
from . import main
@main.app_errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@main.app_errorhandler(405)
def method_not_allowed(e):
return render_template(
|
'405.html'), 405
@main.app_errorhandler(500)
def internal_server_error(e):
return render_template('500.html'), 500
|
AustinRoy7/Pomodoro-timer
|
venv/Lib/site-packages/setuptools/ssl_support.py
|
Python
|
mit
| 8,130
| 0.001107
|
import os
import socket
import atexit
import re
from setuptools.extern.six.moves import urllib, http_client, map
import pkg_resources
from pkg_resources import ResolutionError, ExtractionError
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
/usr/local/share/certs/ca-root-nss.crt
/etc/ssl/ca-bundle.pem
""".strip().split()
try:
HTTPSHandler = urllib.request.HTTPSHandler
HTTPSConnection = http_client.HTTPSConnection
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS
|
name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a l
|
abel other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib.request.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
_wincerts = None
def get_win_certfile():
global _wincerts
if _wincerts is not None:
return _wincerts.name
try:
from wincertstore import CertFile
except ImportError:
return None
class MyCertFile(CertFile):
def __init__(self, stores=(), certs=()):
CertFile.__init__(self)
for store in stores:
self.addstore(store)
self.addcerts(certs)
atexit.register(self.close)
def close(self):
try:
super(MyCertFile, self).close()
except OSError:
pass
_wincerts = MyCertFile(stores=['CA', 'ROOT'])
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
if os.name == 'nt':
return get_win_certfile()
else:
for cert_path in cert_paths:
if os.path.isfile(cert_path):
return cert_path
try:
import certifi
return certifi.where()
except (ImportError, ResolutionError, ExtractionError):
return None
|
nsalomonis/AltAnalyze
|
import_scripts/mergeFilesUnique.py
|
Python
|
apache-2.0
| 14,369
| 0.021992
|
import sys,string,os
sys.path.insert(1, os.path.join(sys.path[0], '..')) ### import parent dir dependencies
import os.path
import unique
import itertools
dirfile = unique
############ File Import Functions #############
def filepath(filename):
fn = unique.filepath(filename)
return fn
def read_directory(sub_dir):
dir_list = unique.read_directory(sub_dir)
|
return dir_list
def returnDirectories(sub_dir):
dir_list =
|
unique.returnDirectories(sub_dir)
return dir_list
class GrabFiles:
def setdirectory(self,value): self.data = value
def display(self): print self.data
def searchdirectory(self,search_term):
#self is an instance while self.data is the value of the instance
files = getDirectoryFiles(self.data,search_term)
if len(files)<1: print 'files not found'
return files
def returndirectory(self):
dir_list = getAllDirectoryFiles(self.data)
return dir_list
def getAllDirectoryFiles(import_dir):
all_files = []
dir_list = read_directory(import_dir) #send a sub_directory to a function to identify all files in a directory
for data in dir_list: #loop through each file in the directory to output results
data_dir = import_dir[1:]+'/'+data
if '.' in data:
all_files.append(data_dir)
return all_files
def getDirectoryFiles(import_dir,search_term):
dir_list = read_directory(import_dir) #send a sub_directory to a function to identify all files in a directory
matches=[]
for data in dir_list: #loop through each file in the directory to output results
data_dir = import_dir[1:]+'/'+data
if search_term not in data_dir and '.' in data: matches.append(data_dir)
return matches
def cleanUpLine(line):
line = string.replace(line,'\n','')
line = string.replace(line,'\c','')
data = string.replace(line,'\r','')
data = string.replace(data,'"','')
return data
def combineAllLists(files_to_merge,original_filename,includeColumns=False):
headers =[]; all_keys={}; dataset_data={}; files=[]; unique_filenames=[]
count=0
for filename in files_to_merge:
duplicates=[]
count+=1
fn=filepath(filename); x=0; combined_data ={}
if '/' in filename:
file = string.split(filename,'/')[-1][:-4]
else:
file = string.split(filename,'\\')[-1][:-4]
### If two files with the same name being merged
if file in unique_filenames:
file += str(count)
unique_filenames.append(file)
print file
files.append(filename)
for line in open(fn,'rU').xreadlines():
data = cleanUpLine(line)
t = string.split(data,'\t')
if x==0:
if data[0]!='#':
x=1
try: t = t[1:]
except Exception: t = ['null']
if includeColumns==False:
for i in t:
headers.append(i+'.'+file)
#headers.append(i)
else:
headers.append(t[includeColumns]+'.'+file)
else: #elif 'FOXP1' in data or 'SLK' in data or 'MBD2' in data:
key = t[0]
if includeColumns==False:
try: values = t[1:]
except Exception: values = ['null']
try:
if original_filename in filename and len(original_filename)>0: key = t[1]; values = t[2:]
except IndexError: print original_filename,filename,t;kill
else:
values = [t[includeColumns]]
#key = string.replace(key,' ','')
if len(key)>0 and key != ' ' and key not in combined_data: ### When the same key is present in the same dataset more than once
try: all_keys[key] += 1
except KeyError: all_keys[key] = 1
if permform_all_pairwise == 'yes':
try: combined_data[key].append(values); duplicates.append(key)
except Exception: combined_data[key] = [values]
else:
combined_data[key] = values
#print duplicates
dataset_data[filename] = combined_data
for i in dataset_data:
print len(dataset_data[i]), i
###Add null values for key's in all_keys not in the list for each individual dataset
combined_file_data = {}
for filename in files:
combined_data = dataset_data[filename]
###Determine the number of unique values for each key for each dataset
null_values = []; i=0
for key in combined_data: number_of_values = len(combined_data[key][0]); break
while i<number_of_values: null_values.append('0'); i+=1
for key in all_keys:
include = 'yes'
if combine_type == 'intersection':
if all_keys[key]>(len(files_to_merge)-1): include = 'yes'
else: include = 'no'
if include == 'yes':
try: values = combined_data[key]
except KeyError:
values = null_values
if permform_all_pairwise == 'yes':
values = [null_values]
if permform_all_pairwise == 'yes':
try:
val_list = combined_file_data[key]
val_list.append(values)
combined_file_data[key] = val_list
except KeyError: combined_file_data[key] = [values]
else:
try:
previous_val = combined_file_data[key]
new_val = previous_val + values
combined_file_data[key] = new_val
except KeyError: combined_file_data[key] = values
original_filename = string.replace(original_filename,'1.', '1.AS-')
export_file = output_dir+'/MergedFiles.txt'
fn=filepath(export_file);data = open(fn,'w')
title = string.join(['uid']+headers,'\t')+'\n'; data.write(title)
for key in combined_file_data:
#if key == 'ENSG00000121067': print key,combined_file_data[key];kill
new_key_data = string.split(key,'-'); new_key = new_key_data[0]
if permform_all_pairwise == 'yes':
results = getAllListCombinations(combined_file_data[key])
for result in results:
merged=[]
for i in result: merged+=i
values = string.join([key]+merged,'\t')+'\n'; data.write(values) ###removed [new_key]+ from string.join
else:
try:
values = string.join([key]+combined_file_data[key],'\t')+'\n'; data.write(values) ###removed [new_key]+ from string.join
except Exception: print combined_file_data[key];sys.exit()
data.close()
print "exported",len(dataset_data),"to",export_file
def customLSDeepCopy(ls):
ls2=[]
for i in ls: ls2.append(i)
return ls2
def getAllListCombinationsLong(a):
ls1 = ['a1','a2','a3']
ls2 = ['b1','b2','b3']
ls3 = ['c1','c2','c3']
ls = ls1,ls2,ls3
list_len_db={}
for ls in a:
list_len_db[len(x)]=[]
print len(list_len_db), list_len_db;sys.exit()
if len(list_len_db)==1 and 1 in list_len_db:
### Just simply combine non-complex data
r=[]
for i in a:
r+=i
else:
#http://code.activestate.com/recipes/496807-list-of-all-combination-from-multiple-lists/
r=[[]]
for x in a:
t = []
for y in x:
for i in r:
t.append(i+[y])
r = t
return r
def combineUniqueAllLists(files_to_merge,original_filename):
headers =[]; all_keys={}; dataset_data={}; files=[]
for filename in files_to_merge:
print filename
|
CanonicalLtd/landscape-client
|
landscape/sysinfo/tests/test_processes.py
|
Python
|
gpl-2.0
| 2,217
| 0
|
import unittest
from twisted.internet.defer import Deferred
from landscape.lib.testing import (
FSTestCase, TwistedTestCase, ProcessDataBuilder)
from landscape.sysinfo.sysinfo import SysInfoPluginRegistry
from landscape.sysinfo.processes import Processes
class ProcessesTest(FSTestCase, TwistedTestCase, unittest.TestCase):
def setUp(self):
super(ProcessesTest, self).setUp()
self.fake_proc = self.makeDir()
self.processes = Processes(proc_dir=self.fake_proc)
self.sysinfo = SysInfoPluginRegistry()
self.sysinfo.add(self.processes)
self.builder = ProcessDataBuilder(self.fake_proc)
def test_run_returns_succeeded_deferred(self):
result = self.processes.run()
self.assertTrue(isinstance(result, Deferred))
called = []
def callback(result):
called.append(True)
result.addCallback(callback)
self.assertTrue(called)
def test_number_of_processes(self):
"""The number of processes is added as a header."""
for i in range(3):
self.builder.create_data(i, self.builder.RUNNING, uid=0, gid=0,
process_name="foo%d" % (i,))
self.processes.run()
self.assertEqual(self.sysinfo.get_headers(),
[("Processes", "3")])
def test_no_zombies(self):
self.processes.run()
self.assertEqual(self.sysinfo.get_notes(), [])
def test_number
|
_of_zombies(self):
|
"""The number of zombies is added as a note."""
self.builder.create_data(99, self.builder.ZOMBIE, uid=0, gid=0,
process_name="ZOMBERS")
self.processes.run()
self.assertEqual(self.sysinfo.get_notes(),
["There is 1 zombie process."])
def test_multiple_zombies(self):
"""Stupid English, and its plurality"""
for i in range(2):
self.builder.create_data(i, self.builder.ZOMBIE, uid=0, gid=0,
process_name="ZOMBERS%d" % (i,))
self.processes.run()
self.assertEqual(self.sysinfo.get_notes(),
["There are 2 zombie processes."])
|
nikkitan/bitcoin
|
test/functional/rpc_scantxoutset.py
|
Python
|
mit
| 13,144
| 0.008673
|
#!/usr/bin/env python3
# Copyright (c) 2018-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the scantxoutset rpc call."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from decimal import Decimal
import shutil
import os
def descriptors(out):
return sorted(u['desc'] for u in out['unspents'])
class ScantxoutsetTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(110)
addr_P2SH_SEGWIT = self.nodes[0].getnewaddress("", "p2sh-segwit")
pubk1 = self.nodes[0].getaddressinfo(addr_P2SH_SEGWIT)['pubkey']
addr_LEGACY = self.nodes[0].getnewaddress("", "legacy")
pubk2 = self.nodes[0].getaddressinfo(addr_LEGACY)['pubkey']
addr_BECH32 = self.nodes[0].getnewaddress("", "bech32")
pubk3 = self.nodes[0].getaddressinfo(addr_BECH32)['pubkey']
self.nodes[0].sendtoaddress(addr_P2SH_SEGWIT, 0.001)
self.nodes[0].sendtoaddress(addr_LEGACY, 0.002)
self.nodes[0].sendtoaddress(addr_BECH32, 0.004)
#send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK
self.nodes[0].sendtoaddress("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 0.008) # (m/0'/0'/0')
self.nodes[0].sendtoaddress("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 0.016) # (m/0'/0'/1')
self.nodes[0].sendtoaddress("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 0.032) # (m/0'/0'/1500')
self.nodes[0].sendtoaddress("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 0.064) # (m/0'/0'/0)
self.nodes[0].sendtoaddress("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 0.128) # (m/0'/0'/1)
self.nodes[0].sendtoaddress("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 0.256) # (m/0'/0'/1500)
self.nodes[0].sendtoaddress("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 0.512) # (m/1/1/0')
self.nodes[0].sendtoaddress("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1.024) # (m/1/1/1')
self.nodes[0].sendtoaddress("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2.048) # (m/1/1/1500')
self.nodes[0].sendtoaddress("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4.096) # (m/1/1/0)
self.nodes[0].sendtoaddress("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8.192) # (m/1/1/1)
self.nodes[0].sendtoaddress("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16.384) # (m/1/1/1500)
self.nodes[0].generate(1)
self.log.info("Stop node, remove wallet, mine again some blocks...")
self.stop_node(0)
shutil.rmtree(os.path.join(self.nodes[0].datadir, "regtest", 'wallets'))
self.start_node(0)
self.nodes[0].generate(110)
scan = self.nodes[0].scantxoutset("start", [])
info = self.nodes[0].gettxoutsetinfo()
assert_equal(scan['success'], True)
assert_equal(scan['height'], info['height'])
assert_equal(scan['txouts'], info['txouts'])
assert_equal(scan['bestblock'], info['bestblock'])
self.restart_node(0, ['-nowallet'])
self.log.info("Test if we have found the non HD unspent outputs.")
assert_equal(self.nodes[0].scantxoutset("start", [ "pkh(" + pubk1 + ")", "pkh(" + pubk2 + ")", "pkh(" + pubk3 + ")"])['total_amount'], Decimal("0.002"))
assert_equal(self.nodes[0].scantxoutset("start", [ "wpkh(" + pubk1 + ")", "wpkh(" + pubk2 + ")", "wpkh(" + pubk3 + ")"])['total_amount'], Decimal("0.004"))
assert_equal(self.nodes[0].scantxoutset("start", [ "sh(wpkh(" + pubk1 + "))", "sh(wpkh(" + pubk2 + "))", "sh(wpkh(" + pubk3 + "))"])['total_amount'], Decimal("0.001"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(" + pubk1 + ")", "combo(" + pubk2 + ")", "combo(" + pubk3 + ")"])['total_amount'], Decimal("0.007"))
assert_equal(
|
self.nodes[0].scantxoutset("st
|
art", [ "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "addr(" + addr_BECH32 + ")"])['total_amount'], Decimal("0.007"))
assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "combo(" + pubk3 + ")"])['total_amount'], Decimal("0.007"))
self.log.info("Test range validation.")
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": -1}])
assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [-1, 10]}])
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}])
assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [2, 1]}])
assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [0, 1000001]}])
self.log.info("Test extended key derivation.")
# Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset.
# Note that all amounts in the UTXO set are powers of 2 multiplied by 0.001 BTC, so each amounts uniquely identifies a subset.
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], Decimal("0.008"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], Decimal("0.016"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], Decimal("0.032"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], Decimal("0.064"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], Decimal("0.128"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], Decimal("0.256"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], Decimal("0.024"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], Decimal("0.056"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], Decimal("0.192"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], Decimal("0.448"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], Decimal("
|
iandees/all-the-places
|
locations/spiders/petsmart.py
|
Python
|
mit
| 3,626
| 0.007722
|
import datetime
import re
import scrapy
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
day_mapping = {'MON': 'Mo','TUE': 'Tu','WED': 'We','THU': 'Th',
'FRI': 'Fr','SAT': 'Sa','SUN': 'Su'}
def convert_24hour(time):
"""
Takes 12 hour time as a string and converts it to 24 hour time.
"""
if len(time[:-2].split(':')) < 2:
hour = time[:-2]
minute = '00'
else:
hour, minute = time[:-2].split(':')
if time[-2:] == 'AM':
time_formatted = hour + ':' + minute
elif time[-2:] == 'PM':
time_formatted = str(int(hour)+ 12) + ':' + minute
if time_formatted in ['24:00','0:00','00:00']:
time_formatted = '23:59'
return time_formatted
class PetSmartSpider(scrapy.Spider):
download_delay = 0.2
name = "petsmart"
allowed_domains = ["petsmart.com"]
start_urls = (
'https://www.petsmart.com/store-locator/all/',
)
def parse(self, response):
state_urls = response.xpath('//li[@class="col-sm-12 col-md-4"]/a/@href').extract()
is_store_details_urls = response.xpath('//a[@class="store-details-link"]/@href').extract()
if not state_urls and is_store_details_urls:
for url in is_store_details_urls:
yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
else:
for url in state_urls:
yield scrapy.Request(response.urljoin(url))
def parse_store(self, response):
ref = re.search(r'.+/?\?(.+)', response.url).group(1)
properties = {
'name': response.xpath('//span[@itemprop="name"]/text()').extract_first().strip(),
'addr_full': response.xpath('//div[@itemprop="streetAddress"]/text()').extract_first(),
'city': response.xpath('//span[@itemprop="addressLocality"][1]/text()').extract_first().title(),
'state': response.xpath('//span[@itemprop="addressLocality"][2]/text()').extract_first(),
'postcode': response.xpath('//span[@itemprop="postalCode"]/text()').extract_first(),
'lat': float(response.xpath('//input[@name="storeLatitudeVal"]/@value').extract_first()),
'lon': float(response.xpath('//input[@name="storeLongitudeVal"]/@value').extract_first()),
'phone': response.xpath('//a[@class="store-contact-info"]/text()').extract_first(),
'ref': ref,
'website': response.url
}
hours = self.parse_hours(response.xpath('//div[@class="store-detail-address"]'))
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
def parse_hours(self, elements):
opening_h
|
ours = OpeningHours()
days = elements.xpath('//span[@itemprop="dayOfWeek"]/text()').extract()
today = (set(day_mapping) - set(days)).pop()
days.remove('TODAY')
days.insert(0,today)
open_hours = elements.xpath('//div[@class="store-hours"]
|
/time[@itemprop="opens"]/@content').extract()
close_hours = elements.xpath('//div[@class="store-hours"]/time[@itemprop="closes"]/@content').extract()
store_hours = dict((z[0],list(z[1:])) for z in zip(days, open_hours, close_hours))
for day, hours in store_hours.items():
if 'CLOSED' in hours:
continue
opening_hours.add_range(day=day_mapping[day],
open_time=convert_24hour(hours[0]),
close_time=convert_24hour(hours[1]))
return opening_hours.as_opening_hours()
|
antoinecarme/pyaf
|
tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_Lag1Trend_Seasonal_Hour_ARX.py
|
Python
|
bsd-3-clause
| 165
| 0.048485
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['Lag1T
|
rend']
|
, ['Seasonal_Hour'] , ['ARX'] );
|
rbian/tp-libvirt
|
libvirt/tests/src/numa/numa_capabilities.py
|
Python
|
gpl-2.0
| 1,889
| 0
|
import logging
from virttest import libvirt_xml
from virttest import utils_libvirtd
from virttest import utils_misc
from autotest.client.shared import error
def run(test, params, env):
"""
Test capabilities with host numa node topology
"""
libvirtd = utils_libvirtd.Libvirtd()
libvirtd.start()
try:
new_cap = libvirt_xml.CapabilityXML()
if not libvirtd.is_running():
raise error.TestFail("Libvirtd is not running")
topo = new_cap.cells_topology
logging.debug("topo xml is %s", topo.xmltreefile)
cell_list = topo.get_cell()
numa_info = utils_misc.NumaInfo()
for cell_num in range(len(cell_list)):
# check node distances
cell_distance = cell_list[cell_num].sibling
logging.debug("cell %s distance is %s", cell_num, cell_distance)
node_distance = numa_info.distances[cell_num]
for j in range(len(cell_list)):
if cell_distance[j]['value'] != node_distance[j]:
raise error.TestFail("cell distance value not expected.")
# check node cell cpu
cell_xm
|
l = cell_list[cell_num]
cpu_list_from_xml = cell_xml.cpu
node_ = numa_info.nodes[cell_num]
cpu_list = node_.cpus
logging.debug("cell %s cpu list is %s", cell_num, cpu_list)
cpu_topo_list = []
for cpu_id in cpu_
|
list:
cpu_dict = node_.get_cpu_topology(cpu_id)
cpu_topo_list.append(cpu_dict)
logging.debug("cpu topology list from capabilities xml is %s",
cpu_list_from_xml)
if cpu_list_from_xml != cpu_topo_list:
raise error.TestFail("cpu list %s from capabilities xml not "
"expected.")
finally:
libvirtd.restart()
|
xuru/pyvisdk
|
pyvisdk/enums/host_disk_partition_info_partition_format.py
|
Python
|
mit
| 247
| 0
|
#########
|
###############################
# Automatically generated, do not edit.
########################################
from pyvisdk.thirdparty import Enum
H
|
ostDiskPartitionInfoPartitionFormat = Enum(
'gpt',
'mbr',
'unknown',
)
|
kurdd/Oauth
|
app/models.py
|
Python
|
apache-2.0
| 1,109
| 0.012624
|
# Define a custom User class to work with django-social-auth
from django.db import models
from django.contrib.auth.models import User
class Task(models.Model):
name = models.CharField(max_length=200)
owner = models.ForeignKey(User)
finished = models.BooleanField(default=False)
shared = models.BooleanField(default=False)
class Viewer(models.Model):
name = models.ForeignKey(User)
tasks = models.ForeignKey(Task)
class Friends(models.Model):
created = models.DateTimeField(auto_now_add=True, editable=False)
creator = models.ForeignKey(User, related_name="friendship_creator_set")
friend = models.
|
ForeignKey(User, related_name="friend_set")
class CustomUserManager(models.Manager):
def create_user(self, userna
|
me, email):
return self.model._default_manager.create(username=username)
class CustomUser(models.Model):
username = models.CharField(max_length=128)
last_login = models.DateTimeField(blank=True, null=True)
objects = CustomUserManager()
def is_authenticated(self):
return True
|
omn0mn0m/JARVIS
|
jarvis/jarvis.py
|
Python
|
mit
| 3,867
| 0.006465
|
import nltk
import filemanager
import multiprocessing
import os
import ConfigParser
from assistant import Assistant, Messenger
from nltk.corpus import wordnet
resources_dir = 'resources\\'
login_creds = ConfigParser.SafeConfigParser()
if os.path.isfile(resources_dir + 'login_creds.cfg'):
login_creds.read(resources_dir + 'login_creds.cfg')
else:
print "No logins... creating now"
new_login_creds = open(resources_dir + 'login_creds.cfg', 'w')
login_creds.write(new_login_creds)
new_login_creds.close()
def fb_worker(email, password):
messenger = Messenger(email, password)
messenger.listen()
return
def check_for_word(word, verblist):
if word in verbs:
return True
target = wordnet.synsets(word)
for synonyms in target:
new_list = [str(x) for x in synonyms.lemma_names()]
if any(i in new_list for i in verblist):
return True
return False
if __name__ == '__main__':
use_speech = False
nlp_debug = False
jarvis = Assistant(use_speech)
jarvis.say('I have been fully loaded')
input = ''
while (input != 'Goodbye JARVIS'):
try:
input = jarvis.get_input()
if not input == '':
words = nltk.word_tokenize(input)
tagged = nltk.pos_tag(words)
verbs = []
proper_nouns = []
pronouns = []
has_question_word = False
has_question = False
for word in tagged:
if 'VB' in word[1]:
verbs.append(word[0].lower())
elif word[1] == 'NNP':
proper_nouns.append(word[0].lower())
elif 'PRP' in word[1]:
pronouns.append(word[0].lower())
elif word[1][0] == 'W':
has_question_word = True
has_question = has_question_word and len(pronouns) == 0
if nlp_debug:
print 'Tags: {}'.format(tagged)
print 'Verbs: {}'.format(verbs)
if not has_question:
if check_for_word('open', verbs):
jarvis.say(filemanager.try_open_executable(words, tagged))
elif check_for_word('res
|
pond', verbs):
if "facebook" in proper_nouns:
if not login_creds.has_section('Facebook'):
login_creds.add_section('Facebook')
login_creds.set('Facebook', 'email', raw_input('Enter your FB email: '))
login_creds.set('Facebook', 'password', raw_input('Enter your FB password: '))
with open(resour
|
ces_dir + 'login_creds.cfg', 'wb') as configfile:
login_creds.write(configfile)
fb_process = multiprocessing.Process(target = fb_worker, args = (login_creds.get('Facebook', 'email'), login_creds.get('Facebook', 'password')))
fb_process.daemon = True
fb_process.start()
jarvis.say('Answering your Facebook messages.')
else:
jarvis.respond(input)
else:
if not jarvis.search_wolfram(input):
jarvis.respond(input)
except Exception as e:
print e
try:
fb_process.terminate()
fb_process.join()
except NameError:
pass
break
|
mancoast/CPythonPyc_test
|
fail/332_test_codecs.py
|
Python
|
gpl-3.0
| 91,385
| 0.001609
|
import _testcapi
import codecs
import io
import locale
import sys
import unittest
import warnings
from test import support
if sys.platform == 'win32':
VISTA_OR_LATER = (sys.getwindowsversion().major >= 6)
else:
VISTA_OR_LATER = False
try:
import ctypes
except ImportError:
ctypes = None
SIZEOF_WCHAR_T = -1
else:
SIZEOF_WCHAR_T = ctypes.sizeof(ctypes.c_wchar)
def coding_checker(self, coder):
def check(input, expect):
self.assertEqual(coder(input), (expect, len(input)))
return check
class Queue(object):
"""
queue: write bytes at one end, read bytes from the other end
"""
def __init__(self, buffer):
self._buffer = buffer
def write(self, chars):
self._buffer += chars
def read(self, size=-1):
if size<0:
s = self._buffer
self._buffer = self._buffer[:0] # make empty
return s
else:
s = self._buffer[:size]
self._buffer = self._buffer[size:]
return s
class MixInCheckStateHandling:
def check_state_handling_decode(self, encoding, u, s):
for i in range(len(s)+1):
d = codecs.getincrementaldecoder(encoding)()
part1 = d.decode(s[:i])
state = d.getstate()
self.assertIsInstance(state[1], int)
# Check that the condition stated in the documentation for
# IncrementalDecoder.getstate() holds
if not state[1]:
# reset decoder to the default state without anything buffered
d.setstate((state[0][:0], 0))
|
# Feeding the previous input may not produce any ou
|
tput
self.assertTrue(not d.decode(state[0]))
# The decoder must return to the same state
self.assertEqual(state, d.getstate())
# Create a new decoder and set it to the state
# we extracted from the old one
d = codecs.getincrementaldecoder(encoding)()
d.setstate(state)
part2 = d.decode(s[i:], True)
self.assertEqual(u, part1+part2)
def check_state_handling_encode(self, encoding, u, s):
for i in range(len(u)+1):
d = codecs.getincrementalencoder(encoding)()
part1 = d.encode(u[:i])
state = d.getstate()
d = codecs.getincrementalencoder(encoding)()
d.setstate(state)
part2 = d.encode(u[i:], True)
self.assertEqual(s, part1+part2)
class ReadTest(MixInCheckStateHandling):
def check_partial(self, input, partialresults):
# get a StreamReader for the encoding and feed the bytestring version
# of input to the reader byte by byte. Read everything available from
# the StreamReader and check that the results equal the appropriate
# entries from partialresults.
q = Queue(b"")
r = codecs.getreader(self.encoding)(q)
result = ""
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
q.write(bytes([c]))
result += r.read()
self.assertEqual(result, partialresult)
# check that there's nothing left in the buffers
self.assertEqual(r.read(), "")
self.assertEqual(r.bytebuffer, b"")
# do the check again, this time using a incremental decoder
d = codecs.getincrementaldecoder(self.encoding)()
result = ""
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
result += d.decode(bytes([c]))
self.assertEqual(result, partialresult)
# check that there's nothing left in the buffers
self.assertEqual(d.decode(b"", True), "")
self.assertEqual(d.buffer, b"")
# Check whether the reset method works properly
d.reset()
result = ""
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
result += d.decode(bytes([c]))
self.assertEqual(result, partialresult)
# check that there's nothing left in the buffers
self.assertEqual(d.decode(b"", True), "")
self.assertEqual(d.buffer, b"")
# check iterdecode()
encoded = input.encode(self.encoding)
self.assertEqual(
input,
"".join(codecs.iterdecode([bytes([c]) for c in encoded], self.encoding))
)
def test_readline(self):
def getreader(input):
stream = io.BytesIO(input.encode(self.encoding))
return codecs.getreader(self.encoding)(stream)
def readalllines(input, keepends=True, size=None):
reader = getreader(input)
lines = []
while True:
line = reader.readline(size=size, keepends=keepends)
if not line:
break
lines.append(line)
return "|".join(lines)
s = "foo\nbar\r\nbaz\rspam\u2028eggs"
sexpected = "foo\n|bar\r\n|baz\r|spam\u2028|eggs"
sexpectednoends = "foo|bar|baz|spam|eggs"
self.assertEqual(readalllines(s, True), sexpected)
self.assertEqual(readalllines(s, False), sexpectednoends)
self.assertEqual(readalllines(s, True, 10), sexpected)
self.assertEqual(readalllines(s, False, 10), sexpectednoends)
# Test long lines (multiple calls to read() in readline())
vw = []
vwo = []
for (i, lineend) in enumerate("\n \r\n \r \u2028".split()):
vw.append((i*200)*"\3042" + lineend)
vwo.append((i*200)*"\3042")
self.assertEqual(readalllines("".join(vw), True), "".join(vw))
self.assertEqual(readalllines("".join(vw), False),"".join(vwo))
# Test lines where the first read might end with \r, so the
# reader has to look ahead whether this is a lone \r or a \r\n
for size in range(80):
for lineend in "\n \r\n \r \u2028".split():
s = 10*(size*"a" + lineend + "xxx\n")
reader = getreader(s)
for i in range(10):
self.assertEqual(
reader.readline(keepends=True),
size*"a" + lineend,
)
reader = getreader(s)
for i in range(10):
self.assertEqual(
reader.readline(keepends=False),
size*"a",
)
def test_bug1175396(self):
s = [
'<%!--===================================================\r\n',
' BLOG index page: show recent articles,\r\n',
' today\'s articles, or articles of a specific date.\r\n',
'========================================================--%>\r\n',
'<%@inputencoding="ISO-8859-1"%>\r\n',
'<%@pagetemplate=TEMPLATE.y%>\r\n',
'<%@import=import frog.util, frog%>\r\n',
'<%@import=import frog.objects%>\r\n',
'<%@import=from frog.storageerrors import StorageError%>\r\n',
'<%\r\n',
'\r\n',
'import logging\r\n',
'log=logging.getLogger("Snakelets.logger")\r\n',
'\r\n',
'\r\n',
'user=self.SessionCtx.user\r\n',
'storageEngine=self.SessionCtx.storageEngine\r\n',
'\r\n',
'\r\n',
'def readArticlesFromDate(date, count=None):\r\n',
' entryids=storageEngine.listBlogEntries(date)\r\n',
' entryids.reverse() # descending\r\n',
' if count:\r\n',
' entryids=entryids[:count]\r\n',
' try:\r\n',
' return [ frog.objects.BlogEntry.load(storageEngine, date, Id) for Id in entryids ]\r\n',
' except StorageError,x:\r\n',
' log.error("Error loading articles: "+str(x))\r\n',
' self.abort("cannot load articles")\r\n',
'\r\n',
'showdate=None\r\n',
'\r\n',
'arg=self.Request.getArg()\r\n',
'if arg=="today":\r\n',
|
Blizzard/s2protocol
|
s2protocol/diff.py
|
Python
|
mit
| 2,237
| 0.002235
|
#
# Protocol diffing tool from http://github.com/dsjoerg/s2protocol
#
# Usage: s2_cli.py --diff 38215,38749
#
import sys
import argparse
import pprint
from s2protocol.versions import build
def diff_things(typeinfo_index, thing_a, thing_b):
if type(thing_a) != type(thing_b):
print(
"typeinfo {} diff types: {} {}".format(
typeinfo_index, type(thing_a), type(thing_b)
)
)
return
if type(thing_a) == dict:
thing_a = thing_a.items()
thing_b = thing_b.items()
if type(thing_a) == list or type(thing_a) == tuple:
if len(thing_a) != len(thing_b):
print(
"typeinfo {} diff len: {} {}".format(
typeinfo_index, len(thing_a), len(thing_b)
)
)
else:
for ix in range(len(thing_a)):
diff_things(typeinfo_index, thing_a[ix], thing_b[ix])
elif thing_a != thing_b:
if type(thing_a) == int:
if (thing_a < 55 or thing_a - 1 != thing_b):
print(
"typeinfo {} diff number: {} {}".format(
typeinfo_index, thing_a, thing_b
)
)
else:
print(
"typeinfo {} diff string: {} {}".format(
typeinfo_index, thing_a, thing_b
)
)
def diff(protocol_a_ver, protocol_b_ver):
print(
"Comparing {} to {}".format(
protocol_a_ver, protocol_b_ver
)
)
protocol_a = build(protocol_a_ver)
protocol_b = build(protocol_b
|
_ver)
count_a = len(protocol_a.typeinfos)
count_b = len(protocol_b.typeinfos)
print("C
|
ount of typeinfos: {} {}".format(count_a, count_b))
for index in range(max(count_a, count_b)):
if index >= count_a:
print("Protocol {} missing typeinfo {}".format(protocol_a_ver, index))
continue
if index >= count_b:
print("Protocol {} missing typeinfo {}".format(protocol_b_ver, index))
continue
a = protocol_a.typeinfos[index]
b = protocol_b.typeinfos[index]
diff_things(index, a, b)
|
OCA/server-tools
|
base_report_auto_create_qweb/wizard/report_duplicate.py
|
Python
|
agpl-3.0
| 755
| 0.001325
|
# Authors: See README.RST for Contributors
# Copyright 2015-2017
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class IrActionsReportDuplicate(models.TransientModel):
_name =
|
"ir.actions.report.duplicate"
_description = "Duplicate Qweb report"
suffix = fields.Char(
string="Suffix", help="This suffix will be added to the report"
)
def duplicate_report(self):
self.ensure_one()
active_id = self.env.context.get("active_id")
model = self.env.context.get("active_
|
model")
if model:
record = self.env[model].browse(active_id)
record.with_context(suffix=self.suffix, enable_duplication=True).copy()
return {}
|
vivaxy/algorithms
|
python/problems/construct_the_rectangle.py
|
Python
|
mit
| 598
| 0.001672
|
"""
https://leetcode.com/problems/construct-the-rectangle/
https://leetcode.com/submissions/detail/107452214/
"""
import math
class Solution(object):
def constructRectangle(self, area):
"""
:type area: int
:rtype: List[int]
"""
W = int(math.sqrt(area))
while area % W != 0:
W -= 1
return [int(area / W), W]
import unittest
clas
|
s Test(unittest.TestCase):
def test(self):
solution = Solution()
self.assertEqual(solution.constructRe
|
ctangle(4), [2, 2])
if __name__ == '__main__':
unittest.main()
|
cschenck/blender_sim
|
fluid_sim_deps/blender-2.69/2.69/scripts/presets/camera/Nikon_D7000.py
|
Python
|
gpl-3.0
| 150
| 0
|
import bpy
bpy.context.object
|
.data.sensor_width = 23.6
bpy.context.object.data.sensor_height = 15.6
bpy.context.object.data.sensor
|
_fit = 'HORIZONTAL'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.