repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
bigodines/api-health
tests/test_verifier.py
Python
mit
1,105
0.00181
# -*- coding: utf-8 -*- import unittest import json from api_health.verifier import Verifier class JsonVerifier(unittest.TestCase): def test_constructor_should_be_smart_about_params(self): simple_json = u'{ "foo": "bar" }' json_dict = json.loads(simple_json) try: v1 = Verifier(simple_json) v2 = Verifier(json_dict) except: self.fail('Verifier() constructor should
deal with both ' 'string and object json') self.assertTrue(v1.has_property('foo')) self.assertTrue(v2.has_property('foo')) def test_should_check_for_json_property(self): simple_json = u'{ "foo": "bar" }' verifier =
Verifier(simple_json) self.assertTrue(verifier.has_property('foo')) self.assertTrue(verifier.does_not_have_property('bu')) self.assertFalse(verifier.has_property('bleh')) def test_should_check_arrays(self): array_json = u'{ "foo": "bar", "baz": [ 1, 2, 3] }' verifier = Verifier(array_json) self.assertTrue(verifier.has_property("baz[1]"))
kaye64/gem
content/player.py
Python
gpl-3.0
838
0.001193
# This file is part of Gem. # # Gem is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (
at your option) any later version. # # Gem is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more
details. # # You should have received a copy of the GNU General Public License # along with Gem. If not, see <http://www.gnu.org/licenses/\>. from gem.api import Location from enum import Enum LOG_TAG = "player" def player_position_update(player, location, warped): profile = player.profile profile.location = location
namunu/MBS_Patent
parser_test/test.py
Python
bsd-2-clause
962
0.008316
from cStringIO import StringIO from datetime import datetime from unidecode import unidecode from handler import Patobj, PatentHandler import re import uuid import xml.sax import xml_util import xml_driver xml_string = 'ipg050104.xml' xh = xml_driver.XMLHandler() parser = xml_dri
ver.make_parser() parser.setContentHandler(xh) parser.setFeature(xml_driver.handler.feature_external_ges, False) l = xml.sax.xmlreader.Locator() xh.setDocumentLocator(l) #parser.parse(StringIO(xml_string)) parser.parse(xml_string) print "parsing done" #print type(xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=False)) print xh.root.claims.contents_of('claim', '', as_string=True, upper=Fals
e) #print type(xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=True)) #print xh.root.us_bibliographic_data_grant.publication_reference.contents_of('document_id', '', as_string=True)
mbrubeck/servo
tests/wpt/web-platform-tests/webdriver/tests/get_window_rect/user_prompts.py
Python
mpl-2.0
2,153
0.000464
from tests.support.asserts import assert_error, assert_dialog_handled from tests.support.fixtures import create_dialog from tests.support.inline import inline alert_doc = inline("<script>window.alert()
</script>") def get_window_rect(session): return session.transport.send
( "GET", "session/{session_id}/window/rect".format(**vars(session))) def test_handle_prompt_dismiss_and_notify(): """TODO""" def test_handle_prompt_accept_and_notify(): """TODO""" def test_handle_prompt_ignore(): """TODO""" def test_handle_prompt_accept(new_session, add_browser_capabilites): _, session = new_session({"capabilities": {"alwaysMatch": add_browser_capabilites({"unhandledPromptBehavior": "accept"})}}) session.url = inline("<title>WD doc title</title>") create_dialog(session)("alert", text="dismiss #1", result_var="dismiss1") response = get_window_rect(session) assert response.status == 200 assert_dialog_handled(session, "dismiss #1") create_dialog(session)("confirm", text="dismiss #2", result_var="dismiss2") response = get_window_rect(session) assert response.status == 200 assert_dialog_handled(session, "dismiss #2") create_dialog(session)("prompt", text="dismiss #3", result_var="dismiss3") response = get_window_rect(session) assert response.status == 200 assert_dialog_handled(session, "dismiss #3") def test_handle_prompt_missing_value(session, create_dialog): session.url = inline("<title>WD doc title</title>") create_dialog("alert", text="dismiss #1", result_var="dismiss1") response = get_window_rect(session) assert_error(response, "unexpected alert open") assert_dialog_handled(session, "dismiss #1") create_dialog("confirm", text="dismiss #2", result_var="dismiss2") response = get_window_rect(session) assert_error(response, "unexpected alert open") assert_dialog_handled(session, "dismiss #2") create_dialog("prompt", text="dismiss #3", result_var="dismiss3") response = get_window_rect(session) assert_error(response, "unexpected alert open") assert_dialog_handled(session, "dismiss #3")
rismalrv/edx-platform
lms/djangoapps/courseware/testutils.py
Python
agpl-3.0
7,642
0.002355
""" Common test utilities for courseware functionality """ from abc import ABCMeta, abstractmethod from datetime import datetime import ddt from mock import patch from urllib import urlencode from lms.djangoapps.courseware.url_helpers import get_redirect_url from student.tests.factories import AdminFactory, UserFactory, CourseEnrollmentFactory from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore.django import modulestore from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls @ddt.ddt class RenderXBlockTestMixin(object): """ Mixin for testing the courseware.render_xblock function. It can be used for testing any higher-level endpoint that calls this method. """ __metaclass__ = ABCMeta # DOM elements that appear in the LMS Courseware, # but are excluded from the xBlock-only rendering. COURSEWARE_CHROME_HTML_ELEMENTS = [ '<ol class="course-tabs"', '<footer id="footer-openedx"', '<div class="window-wrap"', '<div class="preview-menu"', '<div class="container"' ] # DOM elements that appear in an xBlock, # but are excluded from the xBlock-only rendering. XBLOCK_REMOVED_HTML_ELEMENTS = [ '<div class="wrap-instructor-info"', ] @abstractmethod def get_response(self, url_encoded_params=None): """ Abstract method to get the response from the endpoint that is being tested. Arguments: url_encoded_params - URL encoded parameters that should be appended to the requested URL. """ pass # pragma: no cover def login(self): """ Logs in the test user. """ self.client.login(username=self.user.username, password='test') def setup_course(self, default_store=None): """ Helper method to create the course. """ if not default_store: default_store = self.store.default_modulestore.get_modulestore_type() with self.store.default_store(default_store): self.course = CourseFactory.create() # pylint: disable=attribute-defined-outside-init chapter = ItemFactory.create(parent=self.course, category='chapter') self.html_block = ItemFactory.create( # pylint: disable=attribute-defined-outside-init parent=chapter, category='html', data="<p>Test HTML Content<p>" ) def setup_user(self, admin=False, enroll=False, login=False): """ Helper method to create the user. """ self.user = AdminFactory() if admin else UserFactory() # pylint: disable=attribute-defined-outside-init if enroll: CourseEnrollmentFactory(user=self.user, course_id=self.course.id) if login: self.login() def verify_response(self, expected_response_code=200, url_params=None): """ Helper method that calls the endpoint, verifies the expected response code, and returns the response. """ if url_params: url_params = urlencode(url_params) response = self.get_response(url_params) if expected_response_code == 200: self.assertContains(response, self.html_block.data, status_code=expected_response_code) for chrome_element in [self.COURSEWARE_CHROME_HTML_ELEMENTS + self.XBLOCK_REMOVED_HTML_ELEMENTS]: self.assertNotContains(response, chrome_element) else: self.assertNotContains(response, self.html_block.data, status_code=expected_response_code) return response @ddt.data( (ModuleStoreEnum.Type.mongo, 7), (ModuleStoreEnum.Type.split, 5), ) @ddt.unpack def test_courseware_html(self, default_store, mongo_calls): """ To verify that the removal of courseware chrome elements is working, we include this test here to make sure the chrome elements that should be removed actually exist in the full courseware page. If this test fails, it's probably because the HTML template for courseware has changed and COURSEWARE_CHROME_HTML_ELEMENTS needs to be updated. """ with self.store.default_store(default_store): self.setup_course(default_store) self.setup_user(admin=True, enroll=True, login=True) with check_mongo_calls(mongo_calls): url = get_redirect_url(self.course.id, self.html_block.location) response = self.client.get(url) for chrome_element in self.COURSEWARE_CHROME_HTML_ELEMENTS: self.assertContains(response, chrome_element) @ddt.data( (ModuleStoreEnum.Type.mongo, 5), (ModuleStoreEnum.Type.split, 5), ) @ddt.unpack def test_success_enrolled_staff(self, default_store, mongo_calls): with self.store.default_store(default_store): self.setup_course(default_store) self.setup_user(admin=True, enroll=True, login=True) # The 5 mongoDB calls include calls for # Old Mongo: # (1) fill_in_run # (2) get_course in get_course_with_access # (3) get_item for HTML block in get_module_by_usage_id # (4) get_parent when loading HTML block # (5) edx_notes descriptor call to get_course # Split: # (1) course_index - bulk_operation call # (2) structure - get_course_with_access # (3) definition - get_course_with_access # (4) definition - HTML block # (5) definition - edx_notes decorator (original_get_html) with check_mongo_calls(mongo_calls):
self.verify_response() def test_success_unenrolled_staff(self): self.setup_course() self.setup_user(admin=True, enroll=False, login=True) self.verify_response() def test_success_enrolled_
student(self): self.setup_course() self.setup_user(admin=False, enroll=True, login=True) self.verify_response() def test_unauthenticated(self): self.setup_course() self.setup_user(admin=False, enroll=True, login=False) self.verify_response(expected_response_code=404) def test_unenrolled_student(self): self.setup_course() self.setup_user(admin=False, enroll=False, login=True) self.verify_response(expected_response_code=404) @patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False}) def test_fail_block_unreleased(self): self.setup_course() self.setup_user(admin=False, enroll=True, login=True) self.html_block.start = datetime.max modulestore().update_item(self.html_block, self.user.id) self.verify_response(expected_response_code=404) def test_fail_block_nonvisible(self): self.setup_course() self.setup_user(admin=False, enroll=True, login=True) self.html_block.visible_to_staff_only = True modulestore().update_item(self.html_block, self.user.id) self.verify_response(expected_response_code=404) def test_student_view_param(self): self.setup_course() self.setup_user(admin=False, enroll=True, login=True) self.verify_response(url_params={'view': 'student_view'}) def test_unsupported_view_param(self): self.setup_course() self.setup_user(admin=False, enroll=True, login=True) self.verify_response(url_params={'view': 'author_view'}, expected_response_code=400)
pierky/ripe-atlas-tools
ripe/atlas/tools/settings/__init__.py
Python
gpl-3.0
7,761
0
# Copyright (c) 2016 RIPE NCC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import collections import copy import os import re import yaml class Configuration(object): """ A singleton configuration class that's smart enough to create a config out of defaults + yaml """ USER_CONFIG_DIR = os.path.join( os.path.expanduser("~"), ".config", "ripe-atlas-tools") USER_RC = os.path.join(USER_CONFIG_DIR, "rc") DEFAULT = { "authorisation": { "fetch": "", "fetch_aliases": {}, "create": "", }, "specification": { "af": 4, "description": "", "source": { "type": "area", "value": "WW", "requested": 50, }, "times": { "one-off": True, "interval": None, "start": None, "stop": None, }, "types": { "ping": { "packets": 3, "packet-interval": 1000, "size": 48 }, "traceroute": { "packets": 3, "size": 48, "protocol": "ICMP", "dont-fragment": False, "paris": 0, "first-hop": 1, "max-hops": 255, "port": 80, "destination-option-size": None, "hop-by-hop-option-size": None, "timeout": 4000 }, "sslcert": {
"port": 443 }, "ntp": { "packets": 3, "timeout": 4000 }, "dns": { "set-cd-bit": False, "set-
do-bit": False, "protocol": "UDP", "query-class": "IN", "query-type": "A", "query-argument": None, "set-nsid-bit": False, "udp-payload-size": 512, "set-rd-bit": True, "retry": 0 }, "http": { "header-bytes": 0, "version": "1.1", "method": "GET", "port": 80, "path": "/", "query-string": None, "user-agent": "RIPE ATLAS: https://atlas.ripe.net/", "body-bytes": None, "timing-verbosity": 0, }, }, "tags": { "ipv4": { "ping": { "include": [], "exclude": [] }, "traceroute": { "include": [], "exclude": [] }, "dns": { "include": [], "exclude": [] }, "sslcert": { "include": [], "exclude": [] }, "http": { "include": [], "exclude": [] }, "ntp": { "include": [], "exclude": [] }, "all": { "include": ["system-ipv4-works"], "exclude": [] }, }, "ipv6": { "ping": { "include": [], "exclude": [] }, "traceroute": { "include": [], "exclude": [] }, "dns": { "include": [], "exclude": [] }, "sslcert": { "include": [], "exclude": [] }, "http": { "include": [], "exclude": [] }, "ntp": { "include": [], "exclude": [] }, "all": { "include": ["system-ipv6-works"], "exclude": [] } } } }, "ripe-ncc": { "endpoint": "https://atlas.ripe.net", "version": 0, } } def get(self): r = copy.deepcopy(self.DEFAULT) if os.path.exists(self.USER_RC): with open(self.USER_RC) as y: custom = yaml.load(y) if custom: r = self.deep_update(r, custom) return r @classmethod def deep_update(cls, d, u): """ Updates a dictionary with another dictionary, only it goes deep. Stolen from http://stackoverflow.com/questions/3232943/ """ for k, v in u.items(): if isinstance(v, collections.Mapping): r = cls.deep_update(d.get(k, {}), v) d[k] = r else: d[k] = u[k] return d @staticmethod def write(config): """ PyYaml is incapable of preserving comments, or even specifying them as an argument to `.dump()` (http://pyyaml.org/ticket/114), so we have to do some regex gymnastics here to make sure that the config file remains easy for n00bs to read. """ template = os.path.join( os.path.dirname(__file__), "templates", "base.yaml") authorisation = re.compile("^authorisation:$", re.MULTILINE) tags = re.compile("^ tags:$", re.MULTILINE) specification = re.compile("^specification:$", re.MULTILINE) ripe = re.compile("^ripe-ncc:$", re.MULTILINE) with open(template) as t: payload = str(t.read()).format( payload=yaml.dump( config, default_flow_style=False ) ) payload = ripe.sub( "\n# Don't mess with these, or Bad Things may happen\n" "ripe-ncc:", payload ) payload = authorisation.sub( "# Authorisation\n" "authorisation:", payload ) payload = specification.sub( "\n# Measurement Creation\n" "specification:", payload ) payload = tags.sub( " # Tags added to probes selection\n" " tags:", payload ) with open(Configuration.USER_RC, "w") as rc: rc.write(payload) conf = Configuration().get()
jat255/hyperspy
hyperspy/learn/ornmf.py
Python
gpl-3.0
13,811
0.00029
# -*- coding: utf-8 -*- # Copyright 2007-2022 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import logging from itertools import chain import numpy as np from scipy.stats import halfnorm from hyperspy.external.progressbar import progressbar from hyperspy.misc.math_tools import check_random_state _logger = logging.getLogger(__name__) def _thresh(X, lambda1, vmax): """Soft-thresholding with clipping.""" res = np.abs(X) - lambda1 np.maximum(res, 0.0, out=res) res *= np.sign(X) np.clip(res, -vmax, vmax, out=res) return res def _mrdivide(B, A): """Solves xB = A as per Matlab.""" if isinstance(B, np.ndarray): if len(B.shape) == 2 and B.shape[0] == B.shape[1]: # square array return np.linalg.solve(A.T, B.T).T else: # Set rcond default value to match numpy 1.14 default value with # previous numpy version rcond = np.finfo(float).eps * max(A.shape) return np.linalg.lstsq(A.T, B.T, rcond=rcond)[0].T else: return B / A def _project(W): newW = W.copy() np.maximum(newW, 0, out=newW) sumsq = np.sqrt(np.sum(W ** 2, axis=0)) np.maximum(sumsq, 1, out=sumsq) return _mrdivide(newW, np.diag(sumsq)) def _solveproj(v, W, lambda1, kappa=1, h=None, e=None, vmax=None): m, n = W.shape v = v.T if vmax is None: vmax = v.max() if len(v.shape) == 2: batch_size = v.shape[1] eshape = (m, batch_size) hshape = (n, batch_size) else: eshape = (m,) hshape = (n,) if h is None or h.shape != hshape: h = np.zeros(hshape) if e is None or e.shape != eshape: e = np.zeros(eshape) eta = kappa / np.linalg.norm(W, "fro") ** 2 maxiter = 1e6 iters = 0 while True: iters += 1 # Solve for h htmp = h h = h - eta * W.T @ (W @ h + e - v) np.maximum(h, 0.0, out=h) # Solve for e etmp = e e = _thresh(v - W @ h, lambda1, vmax) # Stop conditions stoph = np.linalg.norm(h - htmp, 2) stope = np.linalg.norm(e - etmp, 2) stop = max(stoph, stope) / m if stop < 1e-5 or iters > maxiter: break return h, e class ORNMF: """Performs Online Robust NMF with missing or corrupted data. The ORNMF code is based on a transcription of the online proximal gradient descent (PGD) algorithm MATLAB code obtained from the authors of [Zhao2016]_. It has been updated to also include L2-normalization cost function that is able to deal with sparse corruptions and/or outliers slightly faster (please see ORPCA implementation for details). A further modification has been made to allow for a changing subspace W, where X ~= WH^T + E in the ORNMF framework. Read more in the :ref:`User Guide <mva.rnmf>`. References ---------- .. [Zhao2016] Zhao, Renbo, and Vincent YF Tan. "Online nonnegative matrix factorization with outliers." Acoustics, Speech and Signal Processing (ICASSP), 2016 IEEE International Conference on. IEEE, 2016. """ def __init__( self, rank, store_error=False, lambda1=1.0, kappa=1.0, method="PGD", subspace_learning_rate=1.0, subspace_momentum=0.5, random_state=None, ): """Creates Online Robust NMF instance that can learn a representation. Parameters ---------- rank : int The rank of the representation (number of components/factors) store_error : bool, default False If True, stores the sparse error matrix. lambda1 : float Nuclear norm regularization parameter. kappa : float Step-size for projection solver. method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD' * 'PGD' - Proximal gradient descent * 'RobustPGD' - Robust proximal gradient descent * 'MomentumSGD' - Stochastic gradient descent with momentum subspace_learning_rate : float Learning rate for the 'MomentumSGD' method. Should be a float > 0.0 subspace_momentum : float Momentum parameter for 'MomentumSGD' method, should be a float between 0 and 1. random_state : None or int or RandomState instance, default None Used to initialize the subspace on the first iteration. """ self.n_features = None self.iterating = False self.t = 0 if store_error: self.E = [] else: self.E = None self.rank = rank self.robust = False self.subspace_tracking = False self.lambda1 = lambda1 self.kappa = kappa self.subspace_learning_rate = subspace_learning_rate self.subspace_momentum = subspace_momentum self.random_state = check_random_state(random_state) # Check options are valid if method not in ("PGD", "RobustPGD", "MomentumSGD"): raise ValueError("'method' not recognised") if method == "RobustPGD": self.robust = True if method == "MomentumSGD": self.subspace_tracking = True if subspace_momentum < 0.0 or subspace_momentum > 1: raise ValueError("'subspace_momentum' must be a float between 0 and 1") def _setup(self, X): self.h, self.e, self.v = None, None, None if isinstance(X, np.ndarray): n, m = X.shape avg = np.sqrt(X.mean() / m) iterating = False else: x = next(X) m = len(x) avg = np.sqrt(x.mean() / m) X = chain([x], X) iterating = True self.n_features = m self.iterating = iterating self.W = halfnorm.rvs( size=(self.n_features, self.rank), random_state=self.random_state ) self.W = np.abs(avg * self.W / np.sqrt(self.rank)) self.H = [] if self.subspace_tracking: self.vnew = np.zeros_like(self.W) else: self.A = np.zeros((self.rank, self.rank)) self.B = np.zeros((self.n_features, self.rank)) return X def fit(self, X, batch_size=None): """Learn NMF components from the data. Parameters ---------- X : {numpy.ndarray, iterator} [n_samples x n_features] matrix of observations or an iterator that yields samples, each with n_features elements. batch_size : {None, int} If not None, learn the data in batches, each of batch_size samples or less. """ if self.n_features is None: X = self._setup(X) num = None prod = np.outer if batch_size is not None: if not isinstance(X
, np.ndarray): raise ValueError("can't batch iterating data") else: prod = np.dot length = X.shape[0]
num = max(length // batch_size, 1) X = np.array_split(X, num, axis=0) if isinstance(X, np.ndarray): num = X.shape[0] X = iter(X) h, e = self.h, self.e for v in progressbar(X, leave=False, total=num, disable=num == 1): h, e = _solveproj(v, self.W, self.lambda1, self.kappa, h=h, e=e) self.v = v self.e = e se
Tinkerforge/brickv
src/brickv/plugin_system/plugins/uv_light_v2/__init__.py
Python
gpl-2.0
931
0
# -*- coding: utf-8 -*- """ UV Light 2.0 Plugin Copyright (C) 2018 Ishraq Ibne Ashraf <ishraq@tinkerforge.com> __init__.py: Package initialization This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307
, USA. """ from brickv.plugin_system.plugins.uv_light_v2.uv_l
ight_v2 import UVLightV2 device_class = UVLightV2
sassoftware/conary
conary/local/schema.py
Python
apache-2.0
36,311
0.003883
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import errno import fcntl import os import sys import itertools from conary import trove, deps, errors, files, streams from conary.dbstore import idtable, migration, sqlerrors # Stuff related to SQL schema maintenance and migration TROVE_TROVES_BYDEFAULT = 1 << 0 TROVE_TROVES_WEAKREF = 1 << 1 VERSION = 20 def resetTable(cu, name): try: cu.execute("DELETE FROM %s" % name, start_transaction = False) return True except Exception, e: return False def _createVersions(db, cu = None): if "Versions" in db.tables: return if cu is None: cu = db.cursor() if idtable.createIdTable(db, "Versions", "versionId", "version"): cu.execute("INSERT INTO Versions (versionId, version) VALUES (0, NULL)") db.commit() db.loadSchema() # Schema creation functions def _createFlavors(db): if "Flavors" in db.tables: return cu = db.cursor() idtable.createIdTable(db, "Flavors", "flavorId", "flavor") cu.execute("SELECT FlavorID from Flavors") if cu.fetchone() == None: # reserve flavor 0 for "no flavor information" cu.execute("INSERT INTO Flavors VALUES (0, NULL)") idtable.createMappingTable(db, "DBFlavorMap", "instanceId", "flavorId") db.commit() db.loadSchema() def createDBTroveFiles(db): if "DBTroveFiles" in db.tables: return cu = db.cursor() _createVersions(db, cu) cu.execute(""" CREATE TABLE DBTroveFiles( streamId %(PRIMARYKEY)s, pathId BINARY(16), versionId INTEGER, path %(STRING)s, fileId BINARY(20), instanceId INTEGER, isPresent INTEGER, stream BLOB )""" % db.keywords) cu.execute("CREATE INDEX DBTroveFilesIdx ON DBTroveFiles(fileId)") cu.execute("CREATE INDEX DBTroveFilesInstanceIdx2 ON DBTroveFiles(instanceId, pathId)") cu.execute("CREATE INDEX DBTroveFilesPathIdx ON DBTroveFiles(path)") idtable.createIdTable(db, "Tags", "tagId", "tag") cu.execute(""" CREATE TABLE DBFileTags( streamId INTEGER, tagId INTEGER )""") db.commit() db.loadSchema() def createInstances(db): if "Instances" in db.tables: return cu = db.cursor() _createVersions(db, cu) cu.execute(""" CREATE TABLE Instances( instanceId %(PRIMARYKEY)s, troveName %(STRING)s, versionId INTEGER, flavorId INTEGER, timeStamps %(STRING)s, isPresent INTEGER, pinned BOOLEAN )""" % db.keywords) cu.execute("CREATE INDEX InstancesNameIdx ON Instances(troveName)") cu.execute("CREATE UNIQUE INDEX InstancesIdx ON " "Instances(troveName, versionId, flavorId)") db.commit() db.loadSchema() def _createTroveTroves(db): if "TroveTroves" in db.tables: return cu = db.cursor() cu.execute(""" CREATE TABLE TroveTroves( instanceId INTEGER NOT NULL, includedId INTEGER NOT NULL, flags INTEGER, inPristine BOOLEAN )""") # this index is so we can quickly tell what troves are needed by another trove cu.execute("CREATE INDEX TroveTrovesIncludedIdx ON TroveTroves(includedId)") # This index is used to enforce that TroveTroves only contains # unique TroveTrove (instanceId, includedId) pairs. cu.execute("CREATE UNIQUE INDEX TroveTrovesInstanceIncluded_uq ON " "TroveTroves(instanceId,includedId)") db.commit() db.loadSchema() def createTroveInfo(db): if "TroveInfo" in db.tables: return cu = db.cursor() cu.execute(""" CREATE TABLE TroveInfo( instanceId INTEGER NOT NULL, infoType INTEGER NOT NULL, data %(MEDIUMBLOB)s )""" % db.keywords) cu.execute("CREATE INDEX TroveInfoIdx ON TroveInfo(instanceId)") cu.execute("CREATE INDEX TroveInfoTypeIdx ON TroveInfo(infoType, data)") cu.execute("CREATE INDEX TroveInfoInstTypeIdx ON TroveInfo(instanceId, infoType)") db.commit() db.loadSchema() def createMetadata(db): commit = False cu = db.cursor() _createVersions(db, cu) if 'Metadata' not in db.tables: cu.execute(""" CREATE TABLE Metadata( metadataId %(PRIMARYKEY)s, itemId INTEGER NOT NULL, versionId INTEGER NOT NULL, branchId INTEGER NOT NULL, timeStamp NUMERIC(13,3) NOT NULL )""" % db.keywords) commit = True if 'MetadataItems' not in db.tables: cu.execute(""" CREATE TABLE Meta
dataItems( metadataId INTEGER NOT NULL, class INTEGER NOT NULL, data TEXT NOT NULL, language VARCHAR(254) NOT NULL DEFAULT 'C' )
""") cu.execute("CREATE INDEX MetadataItemsIdx ON MetadataItems(metadataId)") commit = True if commit: db.commit() db.loadSchema() def createDataStore(db): if "DataStore" in db.tables: return cu = db.cursor() cu.execute(""" CREATE TABLE DataStore( hash BINARY(20) NOT NULL, count INTEGER, data BLOB )""") cu.execute("CREATE INDEX DataStoreIdx ON DataStore(hash)") db.commit() db.loadSchema() def createDatabaseAttributes(db): if "DatabaseAttributes" in db.tables: return cu = db.cursor() cu.execute(""" CREATE TABLE DatabaseAttributes( id %(PRIMARYKEY)s, name %(STRING)s, value %(STRING)s ) """ % db.keywords) cu.execute("CREATE UNIQUE INDEX DatabaseAttributesNameIdx " "ON DatabaseAttributes(name)") cu.execute("INSERT INTO DatabaseAttributes (name, value) " "VALUES ('transaction counter', '0')") db.commit() db.loadSchema() def _createDepTable(db, cu, name, isTemp): d = {"tmp" : "", "name" : name} startTrans = not isTemp if isTemp: if name in db.tempTables: resetTable(cu, name) return False d['tmp'] = 'TEMPORARY' cu.execute(""" CREATE %(tmp)s TABLE %(name)s( depId %%(PRIMARYKEY)s, class INTEGER NOT NULL, name VARCHAR(254) NOT NULL, flag VARCHAR(254) NOT NULL ) %%(TABLEOPTS)s""" % d % db.keywords, start_transaction = (not isTemp)) cu.execute("CREATE UNIQUE INDEX %sIdx ON %s(class, name, flag)" % (name, name), start_transaction = startTrans) if isTemp: db.tempTables[name] = True def _createRequiresTable(db, cu, name, isTemp): d = { "tmp" : "", "name" : name, "constraint" : "", "tmpCol" : ""} startTrans = not isTemp if isTemp: if name in db.tempTables: resetTable(cu, name) return False d['tmp'] = 'TEMPORARY' d['tmpCol'] = ',satisfied INTEGER DEFAULT 0' else: d['constraint'] = """, CONSTRAINT %(name)s_instanceId_fk FOREIGN KEY (instanceId) REFERENCES Instances(instanceId) ON DELETE RESTRICT ON UPDATE CASCADE, CONSTRAINT %(name)s_depId_fk FOREIGN KEY (depId) REFERENCES Dependencies(depId) ON DELETE RESTRICT ON UPDATE CASCADE """ %d cu.execute(""" CREATE %(tmp)s TABL
JulienMcJay/eclock
windows/Python27/Lib/site-packages/pywin32-218-py2.7-win32.egg/win32service.py
Python
gpl-2.0
283
0.035336
def __bootstrap__(): global __boo
tstrap__, __loader__, __file__ import sys, pkg_resources, imp __file__ = pkg_resources.resource_filename(__name__,'win32service.pyd') __loader__ = None; del __bootstrap__, __loader__ imp.load_dynamic(__name__,__file__) _
_bootstrap__()
reyoung/Paddle
python/paddle/fluid/layers/ops.py
Python
apache-2.0
3,701
0.00027
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__
import print_function from .layer_function_generator import generate_layer_fn, generate_laye
r_fn_noattr from .. import core from ..framework import convert_np_dtype_to_dtype_ __activations_noattr__ = [ 'sigmoid', 'logsigmoid', 'exp', 'tanh', 'tanh_shrink', 'softshrink', 'sqrt', 'abs', 'ceil', 'floor', 'cos', 'sin', 'round', 'reciprocal', 'square', 'softplus', 'softsign', ] __all__ = [] for _OP in set(__all__): globals()[_OP] = generate_layer_fn(_OP) # It is a hot fix in some unittest using: # fluid.layers.scale(x=x, scale=10.0, out=out_var) # e.g.: test_program_code.py, test_dist_train.py globals()['_scale'] = generate_layer_fn('scale') globals()['_elementwise_div'] = generate_layer_fn('elementwise_div') __all__ += __activations_noattr__ for _OP in set(__activations_noattr__): globals()[_OP] = generate_layer_fn_noattr(_OP) __all__ += ["uniform_random"] _uniform_random_ = generate_layer_fn('uniform_random') def uniform_random(shape, dtype=None, min=None, max=None, seed=None): locals_var = locals().keys() if not isinstance(dtype, core.VarDesc.VarType): dtype = convert_np_dtype_to_dtype_(dtype) kwargs = dict() for name in locals_var: val = locals()[name] if val is not None: kwargs[name] = val return _uniform_random_(**kwargs) uniform_random.__doc__ = _uniform_random_.__doc__ + """ Examples: >>> result = fluid.layers.uniform_random(shape=[32, 784]) """ __all__ += ['hard_shrink'] _hard_shrink_ = generate_layer_fn('hard_shrink') def hard_shrink(x, threshold=None): locals_var = locals().keys() kwargs = dict() for name in locals_var: val = locals()[name] if val is not None: kwargs[name] = val return _hard_shrink_(**kwargs) hard_shrink.__doc__ = _hard_shrink_.__doc__ + """ Examples: >>> data = fluid.layers.data(name="input", shape=[784]) >>> result = fluid.layers.hard_shrink(x=data, threshold=0.3) """ __all__ += ['cumsum'] _cum_sum_ = generate_layer_fn('cumsum') def cumsum(x, axis=None, exclusive=None, reverse=None): locals_var = locals().keys() kwargs = dict() for name in locals_var: val = locals()[name] if val is not None: kwargs[name] = val return _cum_sum_(**kwargs) cumsum.__doc__ = _cum_sum_.__doc__ + """ Examples: >>> data = fluid.layers.data(name="input", shape=[32, 784]) >>> result = fluid.layers.cumsum(data, axis=0) """ __all__ += ['thresholded_relu'] _thresholded_relu_ = generate_layer_fn('thresholded_relu') def thresholded_relu(x, threshold=None): locals_var = locals().keys() kwargs = dict() for name in locals_var: val = locals()[name] if val is not None: kwargs[name] = val _thresholded_relu_(**kwargs) thresholded_relu.__doc__ = _thresholded_relu_.__doc__ + """ Examples: >>> data = fluid.layers.data(name="input", shape=[1]) >>> result = fluid.layers.thresholded_relu(data, threshold=0.4) """
Huyuwei/tvm
tests/webgl/test_static_webgl_library.py
Python
apache-2.0
2,490
0.001606
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Create a static WebGL library and run it in the browser.""" from __future__ import absolute_import, print_function import os, shutil, SimpleHTTPServer, SocketServer import tvm from tvm.contrib import emscripten, util import numpy as np def try_static_webgl_library(): curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) # Change to lib/ which contains "libtvm_runtime.bc". os.chdir(os.path.join(curr_path, "../../lib")) # Create OpenGL module. n = tvm.var("n") A = tvm.placeholder((n,), name='A', dtype="float") B = tvm.compute((n,), lambda *i: A[i], name="B") s = tvm.create_schedule(B.op) s[B].opengl() target_host = "llvm -target=asmjs-unknown-emscripten -system-lib" f = tvm.build(s, [A, B], name="identity", target="opengl", target_host=target_host) # Create a JS library that contains both the module and the tvm runtime. path_dso = "identity_static.js" f.export_library(path_dso, emscripten.create_js, options=[
"-s", "USE_GLFW=3", "-s", "USE_WEBGL2=1", "-lglfw", ]) # Create "tvm_runtime.js" and "identity_static.html" in lib/ shutil.copyfile(os.path.join(curr_path, "../../web/
tvm_runtime.js"), "tvm_runtime.js") shutil.copyfile(os.path.join(curr_path, "test_static_webgl_library.html"), "identity_static.html") port = 8080 handler = SimpleHTTPServer.SimpleHTTPRequestHandler httpd = SocketServer.TCPServer(("", port), handler) print("Please open http://localhost:" + str(port) + "/identity_static.html") httpd.serve_forever() if __name__ == "__main__": try_static_webgl_library()
plotly/plotly.py
packages/python/plotly/plotly/validators/scatter/marker/colorbar/_xpad.py
Python
mit
460
0.002174
import _pl
otly_utils.basevalidators class XpadValidator(_plotly_utils.basevalidators.NumberValidator): def __init__( self, plotly_name="x
pad", parent_name="scatter.marker.colorbar", **kwargs ): super(XpadValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "colorbars"), min=kwargs.pop("min", 0), **kwargs )
lmittmann/clr
style/__init__.py
Python
mit
533
0
import sys import pkg_resources from style.styled_string_builder import _StyledStringBuilder t
ry: __version__ = pkg_resources.get_distribution
('style').version except Exception: __version__ = 'unknown' _enabled = sys.stdout.isatty() if '--color' in sys.argv: _enabled = True elif '--no-color' in sys.argv: _enabled = False styled_string_builder = _StyledStringBuilder([], True) styled_string_builder.enabled = _enabled styled_string_builder.__version__ = __version__ sys.modules[__name__] = styled_string_builder
jpetto/olympia
src/olympia/addons/tests/test_decorators.py
Python
bsd-3-clause
5,023
0
from django import http import mock from nose.tools import eq_ from olympia.amo.tests import TestCase from olympia.addons import decorators as dec from olympia.addons.models import Addon class TestAddonView(TestCase): def setUp(self): super(TestAddonView, self).setUp() self.addon = Addon.objects.create(slug='x', type=1) self.func = mock.Mock() self.func.return_value = mock.sentinel.OK self.func.__name__ = 'mock_function' self.view = dec.addon_view(self.func) self.request = mock.Mock() self.slug_path = '/addon/%s/reviews' % self.addon.slug self.request.path = self.id_path = '/addon/%s/reviews' % self.addon.id self.request.GET = {} def test_301_by_id(self): res = self.view(self.request, str(self.addon.id)) self.assert3xx(res, self.slug_path, 301) def test_slug_replace_no_conflict(self): self.request.path = '/addon/{id}/reviews/{id}345/path'.format( id=self.addon.id) res = self.view(self.request, str(self.addon.id)) self.assert3xx(res, '/addon/{slug}/reviews/{id}345/path'.format( id=self.addon.id, slug=self.addon.slug), 301) def test_301_with_querystring(self): self.request.GET = mock.Mock() self.request.GET.urlencode.return_value = 'q=1' res = self.view(self.request, str(self.addon.id)) self.assert3xx(res, self.slug_path + '?q=1', 301) def test_200_by_slug(self): res = self.view(self.request, self.addon.slug) eq_(res, mock.sentinel.OK) def test_404_by_id(self): with self.assertRaises(http.Http404): self.view(self.request, str(self.addon.id * 2)) def test_404_by_slug(self): with self.assertRaises(http.Http404): self.view(self.request, self.addon.slug + 'xx') def test_alternate_qs_301_by_id(self): def qs(): return Addon.ob
jects.filter(type=1) view = dec.addon_view_factory(qs=qs)(self.func) res = view(self.request, str(self.addon.id)) self.assert3xx(res, self.slug_path, 301) def test_alternate_qs_200_by_slug(self): def qs(): return Addon.objects.filter(type=1) view = dec.addon_view_factory(qs=qs)(self.func) r
es = view(self.request, self.addon.slug) eq_(res, mock.sentinel.OK) def test_alternate_qs_404_by_id(self): def qs(): return Addon.objects.filter(type=2) view = dec.addon_view_factory(qs=qs)(self.func) with self.assertRaises(http.Http404): view(self.request, str(self.addon.id)) def test_alternate_qs_404_by_slug(self): def qs(): return Addon.objects.filter(type=2) view = dec.addon_view_factory(qs=qs)(self.func) with self.assertRaises(http.Http404): view(self.request, self.addon.slug) def test_addon_no_slug(self): app = Addon.objects.create(type=1, name='xxxx') res = self.view(self.request, app.slug) eq_(res, mock.sentinel.OK) def test_slug_isdigit(self): app = Addon.objects.create(type=1, name='xxxx') app.update(slug=str(app.id)) r = self.view(self.request, app.slug) eq_(r, mock.sentinel.OK) request, addon = self.func.call_args[0] eq_(addon, app) class TestAddonViewWithUnlisted(TestAddonView): def setUp(self): super(TestAddonViewWithUnlisted, self).setUp() self.view = dec.addon_view_factory( qs=Addon.with_unlisted.all)(self.func) @mock.patch('olympia.access.acl.check_unlisted_addons_reviewer', lambda r: False) @mock.patch('olympia.access.acl.check_addon_ownership', lambda *args, **kwargs: False) def test_unlisted_addon(self): """Return a 404 for non authorized access.""" self.addon.update(is_listed=False) with self.assertRaises(http.Http404): self.view(self.request, self.addon.slug) @mock.patch('olympia.access.acl.check_unlisted_addons_reviewer', lambda r: False) @mock.patch('olympia.access.acl.check_addon_ownership', lambda *args, **kwargs: True) def test_unlisted_addon_owner(self): """Addon owners have access.""" self.addon.update(is_listed=False) assert self.view(self.request, self.addon.slug) == mock.sentinel.OK request, addon = self.func.call_args[0] assert addon == self.addon @mock.patch('olympia.access.acl.check_unlisted_addons_reviewer', lambda r: True) @mock.patch('olympia.access.acl.check_addon_ownership', lambda *args, **kwargs: False) def test_unlisted_addon_unlisted_admin(self): """Unlisted addon reviewers have access.""" self.addon.update(is_listed=False) assert self.view(self.request, self.addon.slug) == mock.sentinel.OK request, addon = self.func.call_args[0] assert addon == self.addon
PrzemekBurczyk/dalvik-compiler
src/items/string_id_item.py
Python
mit
347
0.005764
''' Created on 3 cze 2014 @author: Przemek ''' from src.items.bytes import Bytes from src.parser.measurable import Measurable class StringIdItem(Measurable): ''' classdocs ''' def __init__(self, parent): '''
Constructor ''' Measurable.__init__(self, par
ent) self._data = Bytes(self, 4)
superstack/nova
nova/tests/api/openstack/__init__.py
Python
apache-2.0
3,850
0.000519
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob.dec from nova import test from nova import context from nova import flags from nova.api.openstack.limits import RateLimitingMiddleware from nova.api.openstack.common import limited from nova.tests.api.openstack import fakes from webob import Request FLAGS = flags.FLAGS @webob.dec.wsgify def simple_wsgi(req): return "" class RateLimitingMiddlewareTest(test.TestCase): def test_get_action_name(self): middleware = RateLimitingMiddleware(simple_wsgi) def verify(method, url, action_name): req = Request.blank(url) req.method = method action = middleware.get_action_name(req) self.assertEqual(action, action_name) verify('PUT', '/servers/4', 'PUT') verify('DELETE', '/servers/4', 'DELETE') verify('POST', '/images/4', 'POST') verify('POST', '/servers/4', 'POST servers') verify('GET', '/foo?a=4&changes-since=never&b=5', 'GET changes-since') verify('GET', '/foo?a=4&monkeys-since=never&b=5', None) verify('GET', '/servers/4', None) verify('HEAD', '/servers/4', None) def exhaust(self, middleware, method, url, username, times): req = Request.blank(url, dict(REQUEST_METHOD=method), headers={'X-Auth-User': username}) req.environ['nova.context'] = context.RequestContext(username, username) for i in range(times): resp = req.get_response(middleware) self.assertEqual(resp.status_int, 200) resp = req.get_response(middleware) self.assertEqual(resp.status_int, 413) self.assertTrue('Retry-After' in resp.headers) def test_single_action(self): middleware = RateLimitingMiddleware(simple_wsgi) self.exhaust(middleware, 'DELETE', '/servers/4', 'usr1', 100) self.exhaust(middleware, 'DELETE', '/servers/4', 'usr2', 100) def test_POST_servers_action_implies_POST_action(self): middleware = RateLimitingMiddleware(simple_wsgi) self.exhaust(middleware, 'POST', '/servers/4', 'usr1', 10) self.exhaust(middleware, 'POST', '/images/4', 'usr2', 10) self.assertTrue(set(middleware.limiter._levels) == \
set(['usr1:POST', 'usr1:POST serve
rs', 'usr2:POST'])) def test_POST_servers_action_correctly_ratelimited(self): middleware = RateLimitingMiddleware(simple_wsgi) # Use up all of our "POST" allowance for the minute, 5 times for i in range(5): self.exhaust(middleware, 'POST', '/servers/4', 'usr1', 10) # Reset the 'POST' action counter. del middleware.limiter._levels['usr1:POST'] # All 50 daily "POST servers" actions should be all used up self.exhaust(middleware, 'POST', '/servers/4', 'usr1', 0) def test_proxy_ctor_works(self): middleware = RateLimitingMiddleware(simple_wsgi) self.assertEqual(middleware.limiter.__class__.__name__, "Limiter") middleware = RateLimitingMiddleware(simple_wsgi, service_host='foobar') self.assertEqual(middleware.limiter.__class__.__name__, "WSGIAppProxy")
Micronaet/micronaet-quality
quality/etl/errata_corrige.py
Python
agpl-3.0
1,215
0.014815
#!/usr/bin/env python # -*- encoding: utf-8 -*- # Modules used for ETL - Create User # Modules required: import os import xmlrpclib, sys, csv, ConfigParser from datetime import datetime # Set up parameters (for connection to Open ERP Database) ********************* config = ConfigParser.ConfigParser() file_config = os.path.expanduser('~/ETL/generalfood/openerp.cfg') config.read([file_config]) dbna
me = config.get('dbaccess','dbname') user = config.get('dbaccess','user') pwd = config.get('dbaccess','pwd') server = config.get('db
access','server') port = config.get('dbaccess','port') # verify if it's necessary: getint separator = eval(config.get('dbaccess','separator')) # test # XMLRPC connection for autentication (UID) and proxy sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/common' % (server, port), allow_none=True) uid = sock.login(dbname ,user ,pwd) sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % (server, port), allow_none=True) if len(sys.argv) != 2: print "Use: errata_corrige parameters\n parameters: partner" sys.exit() if sys.argv[1] == 'partner': result = sock.execute(dbname, uid, pwd, "quality.claim" , "correct_parent_partner") print "Partner updated"
Cheaterman/kivy
examples/3Drendering/main.py
Python
mit
2,405
0
''' 3D Rotating Monkey Head ======================== This example demonstrates using OpenGL to display a rotating monkey head. This includes loading a Blender OBJ file, shaders written in OpenGL's Shading Language (GLSL), and using scheduled callbacks. The monkey.obj file is an OBJ file output from the Blender free 3D creation software. The file is text, listing vertices and faces and is loaded using a class in the file objloader.py. The file simple.glsl is a simple
vertex and fragment shader written in GLSL. ''' from kivy.app import App from kivy.clock import Clock from kivy.core.window import Window from kivy.uix.widget import Widget from kivy.resources import resource_find from kivy.graphics.transformation import Matrix from kivy.graphics.opengl import * from kivy.graphics import * from objloader import ObjFile class Renderer(Widget): def __init__(self, **kwargs): self
.canvas = RenderContext(compute_normal_mat=True) self.canvas.shader.source = resource_find('simple.glsl') self.scene = ObjFile(resource_find("monkey.obj")) super(Renderer, self).__init__(**kwargs) with self.canvas: self.cb = Callback(self.setup_gl_context) PushMatrix() self.setup_scene() PopMatrix() self.cb = Callback(self.reset_gl_context) Clock.schedule_interval(self.update_glsl, 1 / 60.) def setup_gl_context(self, *args): glEnable(GL_DEPTH_TEST) def reset_gl_context(self, *args): glDisable(GL_DEPTH_TEST) def update_glsl(self, delta): asp = self.width / float(self.height) proj = Matrix().view_clip(-asp, asp, -1, 1, 1, 100, 1) self.canvas['projection_mat'] = proj self.canvas['diffuse_light'] = (1.0, 1.0, 0.8) self.canvas['ambient_light'] = (0.1, 0.1, 0.1) self.rot.angle += delta * 100 def setup_scene(self): Color(1, 1, 1, 1) PushMatrix() Translate(0, 0, -3) self.rot = Rotate(1, 0, 1, 0) m = list(self.scene.objects.values())[0] UpdateNormalMatrix() self.mesh = Mesh( vertices=m.vertices, indices=m.indices, fmt=m.vertex_format, mode='triangles', ) PopMatrix() class RendererApp(App): def build(self): return Renderer() if __name__ == "__main__": RendererApp().run()
alexgorban/models
official/nlp/xlnet/xlnet_config.py
Python
apache-2.0
6,110
0.003764
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utility functions used in XLNet model.""" from __future__ import absolute_import from __future__ import division # from __future__ import google_type_annotations from __future__ import print_function import json import os import tensorflow as tf def create_run_config(is_training, is_finetune, flags): """Helper function for creating RunConfig.""" kwargs = dict( is_training=is_training, use_tpu=flags.use_tpu, dropout=flags.dropout, dropout_att=flags.dropout_att, init_method=flags.init_method, init_range=flags.init_range, init_std=flags.init_std, clamp_len=flags.clamp_len) if not is_finetune: kwargs.update(dict( mem_len=flags.mem_len, reuse_len=flags.reuse_len, bi_data=flags.bi_data, clamp_len=flags.clamp_len, same_length=flags.same_length)) return RunConfig(**kwargs) # TODO(hongkuny): refactor XLNetConfig and RunConfig. class XLNetConfig(object): """Configs for XLNet model. XLNetConfig contains hyperparameters that are specific to a model checkpoint; i.e., these hyperparameters should be the same between pretraining and finetuning. The following hyperparameters are defined: n_layer: int, the number of layers. d_model: int, the hidden size. n_head: int, the number of attention heads. d_head: int, the dimension size of each attention head. d_inner: int, the hidden size in feed-forward layers. ff_activation: str, "relu" or "gelu". untie_r: bool, whether to untie the biases in attention. n_token: int, the vocab size. """ def __init__(self, FLAGS=None, json_path=None, args_dict=None): """Constructing an XLNetConfig. One of FLAGS or json_path should be provided. Args: FLAGS: An FLAGS instance. json_path: A path to a json config file. args_dict: A dict for args. """ assert FLAGS is not None or json_path is not None or args_dict is not None self.keys = ['n_layer', 'd_model', 'n_head', 'd_head', 'd_inner', 'ff_activation', 'untie_r', 'n_token'] if FLAGS is not None: self.init_from_flags(FLAGS) if json_path is not None: self.init_from_json(json_path) if args_dict is not None: self.init_from_dict(args_dict) def init_from_dict(self, args_dict): """Constructs a `BertConfig` from a Python dictionary of parameters.""" for key in self.keys: setattr(self, key, args_dict[key]) def init_from_flags(self, flags): for key in self.keys: setattr(self, key, getattr(flags, key)) def init_from_json(self, json_path): with tf.io.gfile.GFile(json_path) as f: json_data = json.load(f) self.init_from_dict(json_data) def to_json(self, json_path): """Save XLNetConfig to a json file.""" json_data = {} for key in self.keys: json_data[key] = getattr(self, key) json_dir = os.path.dirname(json_path) if not tf.io.gfile.exists(json_dir): tf.io.gfile.makedirs(json_dir) with tf.io.gfile.GFile(json_path, 'w') as f: json.dump(json_data, f, indent=4, sort_keys=True) class RunConfig(object): """Class of RunConfig. RunConfig contains hyperparameters that could be different between pretraining and finetuning. These hyperparameters can also be changed from run to run. We store them separately from XLNetConfig for flexibility. """ def __init__(self, is_training, use_tpu, dropout, dropout_att, init_method='normal', init_range=0.1, init_std=0.02, mem_len=None, reuse_len=None, bi_data=False, clamp_len=-1, same_length=False, use_cls_mask=True): """Initializes RunConfig. Args: is_training: bool, whether in training mode. use_tpu: bool, whether TPUs are used. dropout: float, dropout rate. dropout_att: float, dropout rate on attention probabilities. init_method: str, the initialization scheme, either "normal" or "uniform". init_range: float, initialize the parameters with a uniform distribution in [-init_range, init_range]. Only effective when init="uniform". init_std: float, initialize the parameters with a normal distribution
with mean 0 and stddev init_std. Only effective when init="normal". mem_len: int, the number of tokens to cache. reuse_len: int, the number of tokens in the currect batch to be cached and reused in the future. bi_data: bool, whether to use bidirectional input pipeline.
Usually set to True during pretraining and False during finetuning. clamp_len: int, clamp all relative distances larger than clamp_len. -1 means no clamping. same_length: bool, whether to use the same attention length for each token. use_cls_mask: bool, whether to introduce cls mask. """ self.init_method = init_method self.init_range = init_range self.init_std = init_std self.is_training = is_training self.dropout = dropout self.dropout_att = dropout_att self.use_tpu = use_tpu self.mem_len = mem_len self.reuse_len = reuse_len self.bi_data = bi_data self.clamp_len = clamp_len self.same_length = same_length self.use_cls_mask = use_cls_mask
sanchopanca/my-long-term-memory
app.py
Python
apache-2.0
269
0
#!/usr/bin/env python3 import sys from mltm.cli import add_
entry, show_entries if __name__ == '__main__': n = len(sys.argv[1:]) if n == 0: show_entries() elif sys.argv[1] == 'add': a
dd_entry() else: show_entries(sys.argv[1])
biomodels/BIOMD0000000102
BIOMD0000000102/model.py
Python
cc0-1.0
427
0.009368
import os path = os.path.di
rname(os.path.realpath(__file__)) sbmlFilePath = os.path.join(path, 'BIOMD0000000102.xml') with open(sbmlFilePath,'r') as f: sbmlString = f.read() def module_exists(module_name): try: __import__(module_name) except ImportError: return False
else: return True if module_exists('libsbml'): import libsbml sbml = libsbml.readSBMLFromString(sbmlString)
testbhearsum/balrog
src/auslib/web/admin/views/releases.py
Python
mpl-2.0
26,017
0.003306
import difflib import json import connexion from flask import Response, abort, jsonify from auslib.blobs.base import BlobValidationError, createBlob from auslib.db import OutdatedDataError, ReadOnlyError from auslib.global_state import dbo from auslib.web.admin.views.base import AdminView, requirelogin, serialize_signoff_requirements from auslib.web.admin.views.problem import problem from auslib.web.admin.views.scheduled_changes import ( EnactScheduledChangeView, ScheduledChangeHistoryView, ScheduledChangesView, ScheduledChangeView, SignoffsView, ) from auslib.web.common.releases import release_list, serialize_releases __all__ = ["SingleReleaseView", "SingleLocaleView"] def createRelease(release, product, changed_by, transaction, releaseData): blob = createBlob(releaseData) dbo.releases.insert(changed_by=changed_by, transaction=transaction, name=release, product=product, data=blob) return dbo.releases.getReleases(name=release, transaction=transaction)[0] # TODO: certain cases here can return a 400 while still modifying the database # https://bugzilla.mozilla.org/show_bug.cgi?id=1246993 has more details def changeRelease(release, changed_by, transaction, existsCallback, commitCallback, log): """Generic function to change an aspect of a release. It relies on a PartialReleaseForm existing and does some upfront work and checks before doing anything. It will, for the named release and any found in the 'copyTo' field of the PartialReleaseForm: - Create the release if it doesn't already exist. - return a 400 Response if the release exists and old_data_version doesn't. - return a 400 Response if the product name in the form doesn't match the existing one. - update the version column of the release table if the one in the form doesn't match it. - if the release already exists, 'existsCallback' will be called. If that function returns True, a 201 Response will be returned upon successful completion. If that function returns False, a 200 Response will be returned instead. @type release: string @param release: The primary release to update. Additional releases found in the 'copyTo' field of the PartialReleaseForm will also be updated. @type changed_by: string @param changed_by: The username making the change. @type transaction: AUSTransaction object @param transaction: The transaction object to be used for all database operations. @type existsCallback: callable @param existsCallback: The callable to call to determine whether to consider this a "new" change or not. It must receive 3 positional arguments: - the name of the release - the product name from the PartialReleaseForm - the version from the PartialReleaseForm @type commitCallback: callable @param commitCallback: The callable to call after all prerequisite checks and updates are done. It must receive 6 positional arguments: - the name of the release - the product name from the PartialReleaseForm - the version from the PartialReleaseForm - the data from the PartialReleaseForm - the most recent version of the data for the release from the database - the old_data_version from the PartialReleaseForm """ new = True product = connexion.request.get_json().get("product") incomingData = json.loads(connexion.request.get_json().get("data")) copyTo = list() if connexion.request.get_json().get("copyTo"): copyTo = json.loads(connexion.request.get_json().get("copyTo")) alias = list() if connexion.request.get_json().get("alias"): alias = json.loads(connexion.request.get_json().get("alias")) old_data_version = connexion.request.get_json().get("data_version") # schema_version is an attribute at the root level of a blob. # Endpoints that receive an entire blob can find it there. # Those that don't have to pass it as a form element instead. if connexion.request.get_json().get("schema_version"): schema_version = connexion.request.get_json().get("schema_version") elif incomingData.get("schema_version"): schema_version = incomingData.get("schema_version") else: return problem(400, "Bad Request", "schema_version is required") if connexion.request.get_json().get("hashFunction"): hashFunction = connexion.request.get_json().get("hashFunction") elif incomingData.get("hashFunction"): hashFunction = incomingData.get("hashFunction") else: hashFunction = None allReleases = [release] if copyTo: allReleases += copyTo for rel in allReleases: try: releaseInfo = dbo.releases.getReleases(name=rel, transaction=transaction)[0] if existsCallback(rel, product): new = False # "release" is the one named in the URL (as opposed to the # ones that can be provided in copyTo), and we treat it as # the "primary" one if rel == release: # Make sure that old_data_version is provided, because we need to verify it when updating. if not old_data_version: msg = "Release exists, data_version must be provided" log.warning("Bad input: %s", rel) return problem(400, "Bad Request", msg) # If the product we're given doesn't match the one in the DB, panic. if product != releaseInfo["product"]: msg = "Product name '%s' doesn't match the one on the release object ('%s')
for release '%s'" % (product, releaseInfo["product"], rel) log.warning("Bad input: %s", rel) return problem(400, "Bad Request", msg) if "hashFunction" in releaseInfo["data"] and hashFunction and hashFunction != releaseInfo["data"]["hashFunction"]: msg = "hashFunction '{0}' doesn't match the one on the relea
se " "object ('{1}') for release '{2}'".format( hashFunction, releaseInfo["data"]["hashFunction"], rel ) log.warning("Bad input: %s", rel) return problem(400, "Bad Request", msg) # If this isn't the release in the URL... else: # Use the data_version we just grabbed from the dbo. old_data_version = releaseInfo["data_version"] except IndexError: # If the release doesn't already exist, create it, and set old_data_version appropriately. newReleaseData = dict(name=rel, schema_version=schema_version) if hashFunction: newReleaseData["hashFunction"] = hashFunction try: releaseInfo = createRelease(rel, product, changed_by, transaction, newReleaseData) except BlobValidationError as e: msg = "Couldn't create release: %s" % e log.warning("Bad input: %s", rel) return problem(400, "Bad Request", msg, ext={"exception": e.errors}) except ValueError as e: msg = "Couldn't create release: %s" % e log.warning("Bad input: %s", rel) return problem(400, "Bad Request", msg, ext={"exception": e.args}) old_data_version = 1 extraArgs = {} if alias: extraArgs["alias"] = alias try: commitCallback(rel, product, incomingData, releaseInfo["data"], old_data_version, extraArgs) except BlobValidationError as e: msg = "Couldn't update release: %s" %
openstack/tempest
tempest/test_discover/test_discover.py
Python
apache-2.0
1,891
0
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import unittest from tempest.test_discover import plugins def load_tests(loader, tests, pattern): ext_plugins = plugins.TempestTestPluginManager() suite = unittest.TestSuite() base_path = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0] base_path = os.path.split(base_path)[0] # Load local tempest tests for test_dir in ['api', 'scenario']: full_test_dir = os.path.join(base_path, 'tempest', test_dir) if not pattern: suite.addTests(loader.discover(full_test_dir, top_level_dir=base_path)) else: suite.addTests(loader.discover(full_test_dir, pattern=pattern, top_level_dir=base_path)) plugin_load_tests = ext_plugins.get_plugin_load_tests_tuple() if not plugin_load_tests:
return suite # Load any installed plugin tests for plugin in plugin_load_tests: test_dir, top_path = plugin_load_tests[plugin] if not pattern: suite.addTests(loader.dis
cover(test_dir, top_level_dir=top_path)) else: suite.addTests(loader.discover(test_dir, pattern=pattern, top_level_dir=top_path)) return suite
sahdman/Plane
Servo.py
Python
gpl-3.0
804
0.039801
import Adafruit_BBIO.PWM as PWM class Servo: def __init__(self, pin): self.servo_pin = pin self.duty_min = 3 self.duty_max = 14.5
self.duty_span = self.duty_max - self.duty_min def StartServo(self): print("Starting servo") print(self.servo_pin) PWM.start(self.servo_pin, (100 - self.duty_min), 60.0, 1) self.current_angle = 90.0 self.SetAngle(self.current_angle) def SetAngle(self, angle): angle_f = float(angle) duty = 100 - ((angle_f / 180) * self.d
uty_span + self.duty_min) PWM.set_duty_cycle(self.servo_pin, duty) def IncreaseAngle(self, angle): self.current_angle += angle self.SetAngle(self.current_angle) def DecreaseAngle(self, angle): self.current_angle -= angle self.SetAngle(self.current_angle) def StopServo(self): PWM.stop(self.servo_pin)
atvcaptain/enigma2
lib/python/Components/Renderer/Progress.py
Python
gpl-2.0
1,048
0.02958
from __future__ import absolute_import from Components.VariableValue import VariableValue from Components.Renderer.Renderer import Renderer from enigma import eSlider class Progress(VariableValue, Renderer): def __init__(self): Renderer.__init__(self) VariableValue.__init__(self) self.__start = 0 self.__end = 100 GUI_WIDGET = eSlider def changed(self, what): if what[0] == sel
f.CHANGED_CLEAR: (self.range, self.value) = ((0, 1), 0) return range = self.source.range or 100 value = self.source.value if value is None: value = 0 if range > 2**31-1: range = 2**31-1 if value > range: value = range if value < 0: value
= 0 (self.range, self.value) = ((0, range), value) def postWidgetCreate(self, instance): instance.setRange(self.__start, self.__end) def setRange(self, range): (self.__start, self.__end) = range if self.instance is not None: self.instance.setRange(self.__start, self.__end) def getRange(self): return self.__start, self.__end range = property(getRange, setRange)
1mentat/card
setup.py
Python
gpl-2.0
417
0.002398
from distutils.core import setup setup(name="card", auth
or="Benoit Mich
au", author_email="michau.benoit@gmail.com", url="http://michau.benoit.free.fr/codes/smartcard/", description="A library to manipulate smartcards used in telecommunications systems (SIM, USIM)", long_description=open("README.txt", "r").read(), version="0.1.0", license="GPLv2", packages=["card"])
cheral/orange3
Orange/widgets/visualize/tests/test_owscatterplot.py
Python
bsd-2-clause
3,898
0.000257
# Test methods with long descriptive names can omit docstrings # pylint: disable=missing-docstring from unittest.mock import MagicMock import numpy as np from AnyQt.QtCore import QRectF from Orange.data import Table, Domain, ContinuousVariable, DiscreteVariable from Orange.widgets.tests.base import WidgetTest, WidgetOutputsTestMixin from Orange.widgets.visualize.owscatterplot import \ OWScatterPlot, ScatterPlotVizRank class TestOWScatterPlot(WidgetTest, WidgetOutputsTestMixin): @classmethod def setUpClass(cls): super().setUpClass() WidgetOutputsTestMixin.init(cls) cls.signal_name = "Data" cls.signal_data = cls.data def setUp(self): self.widget = self.create_widget(OWScatterPlot) def test_set_data(self): # Connect iris to scatter plot self.send_signal("Data", self.data) # First two attribute should be selected as x an y self.assertEqual(self.widget.attr_x, self.data.domain[0]) self.assertEqual(self.widget.attr_y, self.data.domain[1]) # Class var should be selected as color self.assertIs(self.widget.graph.attr_color, self.data.domain.class_var) # Change which attributes are displayed self.widget.attr_x = self.data.domain[2] self.widget.attr_y = self.data.domain[3] # Disconnect the data self.send_signal("Data", None) # removing data should have cleared attributes
self.assertEqual(self.widget.attr_x, None) self.assertEqual(self.widget.attr_y, None) self.assertEqual(self.widget.graph.attr_color, None) # and remove the legend self.assertEqual(self.widget.graph.legend, None) # Connect iris again # same attributes t
hat were used last time should be selected self.send_signal("Data", self.data) self.assertIs(self.widget.attr_x, self.data.domain[2]) self.assertIs(self.widget.attr_y, self.data.domain[3]) def test_score_heuristics(self): domain = Domain([ContinuousVariable(c) for c in "abcd"], DiscreteVariable("c", values="ab")) a = np.arange(10).reshape((10, 1)) data = Table(domain, np.hstack([a, a, a, a]), a >= 5) self.send_signal("Data", data) vizrank = ScatterPlotVizRank(self.widget) self.assertEqual([x.name for x in vizrank.score_heuristic()], list("abcd")) def test_optional_combos(self): domain = self.data.domain d1 = Domain(domain.attributes[:2], domain.class_var, [domain.attributes[2]]) t1 = Table(d1, self.data) self.send_signal("Data", t1) self.widget.graph.attr_size = domain.attributes[2] d2 = Domain(domain.attributes[:2], domain.class_var, [domain.attributes[3]]) t2 = Table(d2, self.data) self.send_signal("Data", t2) def _select_data(self): self.widget.graph.select_by_rectangle(QRectF(4, 3, 3, 1)) return self.widget.graph.get_selection() def test_error_message(self): """Check if error message appears and then disappears when data is removed from input""" data = self.data.copy() data.X[:, 0] = np.nan self.send_signal("Data", data) self.assertTrue(self.widget.Warning.missing_coords.is_shown()) self.send_signal("Data", None) self.assertFalse(self.widget.Warning.missing_coords.is_shown()) def test_report_on_empty(self): self.widget.report_plot = MagicMock() self.widget.report_caption = MagicMock() self.widget.report_items = MagicMock() self.widget.send_report() # Essentially, don't crash self.widget.report_plot.assert_not_called() self.widget.report_caption.assert_not_called() self.widget.report_items.assert_not_called()
stack-of-tasks/rbdlpy
tutorial/lib/python2.7/site-packages/OpenGL/raw/GLX/OML/sync_control.py
Python
lgpl-3.0
1,569
0.045252
'''Autogenerated by xml_generate script, do not edit!''' from OpenGL import platform as _p, arrays # Code generation uses this from OpenGL.raw.GLX import _types as _cs # End users want this... from OpenGL.raw.GLX._types import * from OpenGL.raw.GLX import _errors from OpenGL.constant import Constant as _C import ctypes _EXTENSION_NAME = 'GLX_OML_sync_control' def _f( function ): return _p.createFunction( function,_p.PLATFORM.GLX,'GLX_OML_sync_control',error_checker=_errors._error_checker) @_f @_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,ctypes.POINTER(_cs.int32_t),ctypes.POINTER(_cs.int32_t)) def glXGetMscRateOML(dpy,drawable,numerator,denominator):pass @_f @_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t)) def glXGetSyncValuesOML(dpy,drawable,ust,msc,sbc):pass @_f @_p.types(_cs.int64_t,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,_cs.int64_t,_cs.int64_t) def glXSwapBuffersMscOML(dpy,drawable,target_msc,divisor,remainder):pass @_f @_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,_cs.int64_t,_cs.int64_t,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t)) def glXWaitForMscOML(dpy,drawable,target_msc,divisor,remainder,ust,msc,sbc):pass @_f @_p.
types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int6
4_t)) def glXWaitForSbcOML(dpy,drawable,target_sbc,ust,msc,sbc):pass
rosspalmer/bitQuant
bitquant/data/clss.py
Python
mit
3,127
0.011193
import conv import tools from ..api.clss import api from ..sql.clss import sql from pandas import DataFrame import time as tm class data(object): def __init__(self): self.a = api() self.s = sql() self.jobs = [] self.trd = DataFrame() self.prc = DataFrame() def add_trades(self, exchange, symbol, limit='', since='', auto_since='no', ping_limit=1.0): job = {'exchange':exchange,'symbol':symbol} self.a.add_job(exchange, symbol, 'trades', limit=limit, since=since, auto_since=auto_since, ping_limit=ping_limit) self.jobs.append(job) def get_trades(self, exchange='', symbol='', start=''): trd = self.s.select('trades',exchange=exchange, symbol=symbol,start=start) self.trd = self.trd.append(trd) self.trd = self.trd.drop_duplicates(['tid','exchange']) def run_trades(self, exchange, symbol): self.trd = self.trd.append(self.a.run(exchange,symbol,'trades')) self.trd = self.trd.drop_duplicates(['tid','exchange']) def run_loop(self, time, to_sql=60, log='no'): dump = tm.time() + to_sql end = tm.time() + time while tm.time() < end: for job in self.jobs:
self.run_trades(job['exchange'], job['symbol']) if tm.time() > dump: dump = tm.time() + to_sql self.to_sql(log) def get_price(self, exchange='', symbol='', freq='', start=''): prc = self.s.select('price',exchange=exchange,symbol=symbol,
freq=freq, start=start) self.prc = self.prc.append(prc) self.prc = self.prc.drop_duplicates(['timestamp','exchange', 'symbol','freq']) return prc def run_price(self, exchange, symbol, freq, label='left', from_sql='no', start=''): if from_sql == 'yes': self.get_trades(exchange, symbol, start=start) # get_trades already applied exchange, symbol checks trd = self.trd else: trd = self.trd if exchange <> '': trd = self.trd[self.trd.exchange==exchange] if symbol <> '': trd = self.trd[self.trd.symbol==symbol] trd = tools.date_index(trd) if len(trd.index) > 0: prc = conv.olhcv(trd, freq, label=label) self.prc = self.prc.append(prc) self.prc = self.prc.drop_duplicates(['timestamp','exchange', 'symbol','freq']) def to_sql(self, log='no'): if 'sent' in self.trd: trd = self.trd[self.trd['sent']<>'yes'] else: trd = self.trd if 'sent' in self.prc: prc = self.prc[self.prc['sent']<>'yes'] else: prc = self.prc self.s.insert('trades', trd) self.s.insert('price', prc) if log == 'yes': print trd print prc self.trd['sent'] = 'yes' self.prc['sent'] = 'yes'
pydsigner/taskit
setup.py
Python
lgpl-3.0
1,455
0.003436
#! /usr/bin/env python try: from setuptools import setup except ImportError:
from distutils.core import setup import sys import taskit long_description = '''TaskIt -- A light-weight task management library. TaskIt is a light-weight library to turn a function into a full-featured, threadable task. It is completely X-Python Compatible and has no external dependencies. The simple version is completely self-contained, whereas the distributed version has a sim
ple, obvious way to connect with the backends.''' def main(): setup(script_args=sys.argv[1:] if len(sys.argv) > 1 else ['install'], name='taskit', version=taskit.__version__, description='TaskIt -- A light-weight task management library.', long_description=long_description, author='Daniel Foerster/pydsigner', author_email='pydsigner@gmail.com', packages=['taskit'], package_data={'taskit': ['doc/*.md']}, license='LGPLv3', url='http://github.com/pydsigner/taskit', classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Programming Language :: Python',]) if __name__ == '__main__': main()
wavicles/pycode-browser
Code/Physics/spring2.py
Python
gpl-3.0
1,784
0.040919
""" spring2.py The rk4_two() routine in this program does a two step integration using an array method. The current x and xprime values are kept in a global list named 'val'. val[0] = current positi
on; val[1] = current velocity The results are compared with analytically calculated values. """ from pylab import * def accn(t, v
al): force = -spring_const * val[0] - damping * val[1] return force/mass def vel(t, val): return val[1] def rk4_two(t, h): # Time and Step value global xxp # x and xprime values in a 'xxp' k1 = [0,0] # initialize 5 empty lists. k2 = [0,0] k3 = [0,0] k4 = [0,0] tmp= [0,0] k1[0] = vel(t,xxp) k1[1] = accn(t,xxp) for i in range(2): # value of functions at t + h/2 tmp[i] = xxp[i] + k1[i] * h/2 k2[0] = vel(t + h/2, tmp) k2[1] = accn(t + h/2, tmp) for i in range(2): # value of functions at t + h/2 tmp[i] = xxp[i] + k2[i] * h/2 k3[0] = vel(t + h/2, tmp) k3[1] = accn(t + h/2, tmp) for i in range(2): # value of functions at t + h tmp[i] = xxp[i] + k3[i] * h k4[0] = vel(t+h, tmp) k4[1] = accn(t+h, tmp) for i in range(2): # value of functions at t + h xxp[i] = xxp[i] + ( k1[i] + \ 2.0*k2[i] + 2.0*k3[i] + k4[i]) * h/ 6.0 t = 0.0 # Stating time h = 0.01 # Runge-Kutta step size, time increment xxp = [2.0, 0.0] # initial position & velocity spring_const = 100.0 # spring constant mass = 2.0 # mass of the oscillating object damping = 0.0 tm = [0.0] # Lists to store time, position & velocity x = [xxp[0]] xp = [xxp[1]] xth = [xxp[0]] while t < 5: rk4_two(t,h) # Do one step RK integration t = t + h tm.append(t) xp.append(xxp[1]) x.append(xxp[0]) th = 2.0 * cos(sqrt(spring_const/mass)* (t)) xth.append(th) plot(tm,x) plot(tm,xth,'+') show()
tridge/DavisSi1000
Firmware/tools/davis_log.py
Python
bsd-2-clause
715
0.004196
#!/usr/bin/env python # reflect input bytes to output, printing as it goes import serial, sys, optparse, time parser = optparse.OptionParser("davis_log") parser.add_option("--baudrate", type='int', default=57600, help='baud rate') opts, args = parser.parse_args() if len(args) != 2: print("usage: reflector.py <DEVICE> <logfile>") sys.exit(1) device = args[0] logfile = args[1] port = serial.Serial
(device, opts.baudrate, timeout=5, dsrdtr=False, rtscts=False, xonxoff=False) log = open(logfile, mode="a") while True: line = port.readline() line = line.rstrip(
) out = "%s %.2f\n" % (line, time.time()) log.write(out); log.flush() sys.stdout.write(out) sys.stdout.flush()
ywangd/stash
bin/crypt.py
Python
mit
2,462
0.001219
# -*- coding: utf-8 -*- ''' File encryption for stash Uses AES in CBC mode. usage: crypt.py [-h] [-k KEY] [-d] infile [outfile] positional arguments: infile File to encrypt/decrypt. outfile Output file. optional arguments: -h, --help show this help message and exit -k KEY, --key KEY Encrypt/Decrypt Key. -d, --decrypt Flag to decrypt. ''' from __future__ import print_function import argparse import base64 import os _stash = globals()['_stash'] try: import pyaes except ImportError: print('Installing Required packages...') _stash('pip install pyaes') import pyaes class Crypt(object): def __init__(self, in_filename, out_filename=None): self.in_filename = in_filename self.out_filename = out_filename def aes_encrypt(self, key=None, chunksize=64 * 1024): self.out_filename = self.out_filename or self.in_filename + '.enc' if key is None: key = base64.b64encode(os.urandom(32))[:32] aes = pyaes.AESModeOfOperationCTR(key) with open(self.in_filename, 'rb') as infile: with open(self.out_filename, 'wb') as outfile: pyaes.encrypt_stream(aes, infile, outfile) return key def aes_decrypt(self, key, chunksize=64 * 1024): self.out_filename = self.out_filename or os.path.splitext(self.in_filename)[0] aes = pyaes.AESModeOfOperationCTR(key) with open(self.in_filename, 'rb') as infile: with open(self.out_filename, 'wb') as outfile: pyaes.decrypt_stream(aes, infile, outfile) if __name__ == '__
main__': ap = argparse.ArgumentParser() ap.add_argument( '-k', '--key', action='store', default=None, help='Encrypt/Decrypt Key.', ) ap.add_argument( '-d', '--decrypt', action='store_true', default=False, help='Flag to decrypt.', ) #ap.add_argument('-t','--type',action='store',choices={'aes','rsa'},default='aes') ap.add_argument('infile', action='store', help='File to encrypt
/decrypt.') ap.add_argument('outfile', action='store', nargs='?', help='Output file.') args = ap.parse_args() crypt = Crypt(args.infile, args.outfile) if args.decrypt: crypt.aes_decrypt(args.key.encode()) else: nk = crypt.aes_encrypt(args.key) if args.key is None: print("Key: %s" % nk.decode())
operepo/ope
laptop_credential/winsys/tests/test_event_logs.py
Python
mit
6,794
0.002944
# -*- coding: utf-8 -*- from __future__ import unicode_literals import sys from winsys._compat import unittest import uuid import winerror import win32api import win32con import win32evtlog import win32security import pywintypes from winsys.tests import utils as testutils from winsys import event_logs, registry, utils LOG_NAME = event_logs.DEFAULT_LOG_NAME GUID = "_winsys-%s" % uuid.uuid1() # # Utility functions # def yield_logs(computer=None, log_name=LOG_NAME): hLog = win32evtlog.OpenEventLog(computer, log_name) try: while True: entries = win32evtlog.ReadEventLog(
hLog, win32evtlog.EVENTLOG_BACKWARDS_READ | w
in32evtlog.EVENTLOG_SEQUENTIAL_READ, 0 ) if entries: for entry in entries: yield entry else: break finally: win32evtlog.CloseEventLog(hLog) # # TESTS # @unittest.skipUnless(testutils.i_am_admin(), "These tests must be run as Administrator") class TestEventLogs(unittest.TestCase): # # Fixtures # def setUp(self): event_logs.EventSource.create(GUID, LOG_NAME) self.registry_root = registry.registry(r"HKLM\SYSTEM\CurrentControlSet\Services\Eventlog") def tearDown(self): event_logs.event_source(r"%s\%s" %(LOG_NAME, GUID)).delete() # # Event Source # def test_create_source(self): log_name = "System" guid = "_winsys-test_create_source-%s" % uuid.uuid1() try: source = event_logs.EventSource.create(guid, log_name) self.assertTrue(self.registry_root + log_name + guid) except: raise else: source.delete() self.assertFalse(bool(self.registry_root + log_name + guid)) def test_create_source_at_default(self): guid = "_winsys-test_create_source_at_default-%s" % uuid.uuid1() try: source = event_logs.EventSource.create(guid) self.assertTrue(self.registry_root + event_logs.DEFAULT_LOG_NAME + guid) except: raise else: source.delete() self.assertFalse(bool(self.registry_root + event_logs.DEFAULT_LOG_NAME + guid)) def test_event_sources(self): log_name = "System" self.assertEqual( set(s.name for s in event_logs.event_sources(log_name)), set(r.name for r in self.registry_root + log_name) ) self.assertTrue(all(isinstance(s, event_logs.EventSource) for s in event_logs.event_sources(log_name))) def test_event_source_from_event_source(self): for s in event_logs.event_sources(): self.assertTrue(isinstance(s, event_logs.EventSource)) self.assertTrue(event_logs.event_source(s) is s) break def test_event_source_from_none(self): self.assertTrue(event_logs.event_source(None) is None) def test_event_source_from_bad_string(self): with self.assertRaises(event_logs.x_event_logs): event_logs.event_source("") def test_event_source_from_good_string(self): self.assertTrue( isinstance( event_logs.event_source(r"%s\%s" %(LOG_NAME, GUID)), event_logs.EventSource ) ) def test_event_source_from_good_string_default_log(self): self.assertTrue( isinstance( event_logs.event_source(GUID), event_logs.EventSource ) ) def test_event_source_as_string(self): self.assertTrue(event_logs.event_source(GUID).as_string()) def test_event_source_log_event(self): data = str(GUID).encode("utf8") event_logs.event_source(GUID).log_event(data=data) for event in yield_logs(): if event.SourceName == GUID and event.Data == data: self.assertTrue(True) break else: self.assertTrue(False) # # Event logs # def test_event_logs(self): self.assertEqual( set(s.name for s in event_logs.event_logs()), set(r.name for r in self.registry_root.keys()) ) self.assertTrue(all(isinstance(s, event_logs.EventLog) for s in event_logs.event_logs())) def test_event_log_from_event_log(self): for l in event_logs.event_logs(): self.assertTrue(isinstance(l, event_logs.EventLog)) self.assertTrue(event_logs.event_log(l) is l) break def test_event_log_from_none(self): self.assertTrue(event_logs.event_log(None) is None) def test_event_log_from_bad_string(self): with self.assertRaises(event_logs.x_event_logs): event_logs.event_log ("") def test_event_log_from_good_string(self): self.assertTrue( isinstance( event_logs.event_log(LOG_NAME), event_logs.EventLog ) ) def test_event_log_clear_no_save(self): log_name = "Internet Explorer" source_name = "_winsys-%s" % uuid.uuid1() source = event_logs.EventSource.create(source_name, log_name) log = event_logs.event_log(log_name) hLog = win32evtlog.OpenEventLog(None, log_name) try: log.log_event(source, message="hello") self.assertNotEquals(win32evtlog.GetNumberOfEventLogRecords(hLog), 0) log.clear() self.assertEqual(win32evtlog.GetNumberOfEventLogRecords(hLog), 0) finally: win32evtlog.CloseEventLog(hLog) source.delete() def test_event_log_clear_with_save(self): log_name = "Internet Explorer" source_name = "_winsys-%s" % uuid.uuid1() source = event_logs.EventSource.create(source_name, log_name) log = event_logs.event_log(log_name) hLog = win32evtlog.OpenEventLog(None, log_name) try: log.log_event(source, message="hello") self.assertNotEquals(win32evtlog.GetNumberOfEventLogRecords(hLog), 0) log.clear() self.assertEqual(win32evtlog.GetNumberOfEventLogRecords(hLog), 0) finally: win32evtlog.CloseEventLog(hLog) source.delete() # # Module-level functions # def test_log_event(self): data = str(GUID).encode("utf8") event_logs.log_event("%s\\%s" %(LOG_NAME, GUID), data=data) for event in yield_logs(): if event.SourceName == GUID and event.Data == data: self.assertTrue(True) break else: self.assertTrue(False) if __name__ == "__main__": unittest.main() if sys.stdout.isatty(): raw_input("Press enter...")
jntkym/rappers
utils.py
Python
mit
1,664
0.001202
#!/usr/bin/env python # -*- coding:utf-8 -*- # from __future__ import unicode_literals import codecs import sys reload(sys) sys.setdefaultencoding('utf8') # ranges of ordinals of unicode ideographic characters ranges = [ {"from": ord(u"\u3300"), "to": ord(u"\u33ff")}, #
compatibility ideographs {"from": ord(u"\ufe30"), "to": ord(u"\ufe4f")}, # compatibility ideographs {"from": ord(u"\uf900"), "to": ord(u"\ufaff")}, # compatibility ideographs # {"from": ord(u"\U0002f800"), "to": ord(u"\U0002fa1f")}, # compatibility ideographs {"from": ord(u"\u30a0"), "to": or
d(u"\u30ff")}, # Japanese Kana {"from": ord(u"\u2e80"), "to": ord(u"\u2eff")}, # cjk radicals supplement {"from": ord(u"\u4e00"), "to": ord(u"\u9fff")}, {"from": ord(u"\u3400"), "to": ord(u"\u4dbf")}, # {"from": ord(u"\U00020000"), "to": ord(u"\U0002a6df")}, # {"from": ord(u"\U0002a700"), "to": ord(u"\U0002b73f")}, # {"from": ord(u"\U0002b740"), "to": ord(u"\U0002b81f")}, # {"from": ord(u"\U0002b820"), "to": ord(u"\U0002ceaf")} # included as of Unicode 8.0 ] def is_cjk(char): return any([range["from"] <= ord(char) <= range["to"] for range in ranges]) def load_csv_to_dict(filepath, delimiter=','): u"""Load a .csv file to dictionary Args: - filepath: path to the csv file - delimiter: delimiter in the csv file Input: csv file key,val ... Return: dictionary """ table = {} with codecs.open(filepath, 'r', encoding='utf-8') as f: for line in f: key, val = line.strip().split(delimiter) table[key] = val return table
xinruobingqing/robotChat
vector.py
Python
apache-2.0
1,599
0.012203
# -*- coding:utf-8 -*- import os import random import sys import importlib importlib.reload(sys) UNK_ID = 3 train_encode_file = 'data/middle_data/train.enc' train_decode_file = 'data/middle_data/train.dec' test_encode_file = 'data/middle_data/test.enc' test_decode_file = 'data/middle_data/test.dec' train_encode_vocabulary_file = 'data/voca_data/train_encode_vocabulary' train_decode_vocabulary_file = 'data/voca_data/train_decode_vocabulary' print("对话转向量...") # 把对话字符串转为向量形式 def convert_to_vector(input_file, vocabulary_file, output_file): tmp_vocab = [] with open(vocabulary_file, "r", encoding="utf8") as f: tmp_vocab.extend(f.readlines()) tmp_vocab = [line.strip() for line in tmp_vocab] vocab = dict([(x, y) for (y, x) in enumerate(tmp_vocab)]) # {'硕': 3142, 'v': 577, 'I': 4789, '\ue796': 4515, '拖': 1333, '疤': 2201 ...} output_f = open(output_file, 'w', encoding="utf8") with open(input_file, 'r', encoding="utf8") as f: for line in f: line_vec = [] for words in line.strip(): line_vec.append(vocab.get(words, UNK_ID)) output_f.write(" ".join([str(num) for num in line_vec]) + "\n") output_f.close() convert_to_vector(train_encode_file, train_encode_vocabulary_file, 'data/vector_data/train_encode.vec') convert_to_vector(train_decode_file, train_decode_vocabulary_file, 'd
ata/vector_data/trai
n_decode.vec') convert_to_vector(test_encode_file, train_encode_vocabulary_file, 'data/vector_data/test_encode.vec') convert_to_vector(test_decode_file, train_decode_vocabulary_file, 'data/vector_data/test_decode.vec')
lukw00/spaCy
tests/spans/test_span.py
Python
mit
579
0.001727
from __future__ import unicode_literals import pytest @pytest.fixture def doc(EN): return EN('This is a sentence. Th
is is another sentence. And a third.') @pytest.mark.models def test_sent_spans(doc): sents = list(doc.sents) assert sents[0].start == 0 assert sents[0].end == 5 assert len(sents) == 3 assert sum(len(sent) for sent in sents) == len(doc
) @pytest.mark.models def test_root(doc): np = doc[2:4] assert len(np) == 2 assert np.orth_ == 'a sentence' assert np.root.orth_ == 'sentence' assert np.root.head.orth_ == 'is'
luke0922/celery_learning
manage.py
Python
apache-2.0
1,290
0.000775
#!/usr/bin/env python # encoding: utf-8 import sys import subprocess from flask_script import Manager from flask_script.commands import ShowUrls from flask_migrate import MigrateCommand from application import create_app from application.extensions import db from utils.commands import GEventServer, ProfileServer manager = Manager(create_app) manager.add_option('-c', '--config', dest='mode', required=False) manager.add_command("showurls", ShowUrls()) manager.add_command("gevent", GEventServer()) manager.add_command("profile", ProfileServer()) manager.add_command('db', MigrateCommand) @manager.option('-c', '--config', help='enviroment config') def simple_run(config): app = create_app(config) app.run(host="0.0.0.0", port=9192, debug=True) @manager.command def lint(): """Runs code linter.""" lint = subprocess.call(['flake8', '
--ignore=E402,F403,E501', 'application/', 'manage.py', 'tests/']) == 0 if lint: print('OK') sys.exit(lint) @manager.command def test(): """Runs unit te
sts.""" tests = subprocess.call(['python', '-c', 'import tests; tests.run()']) sys.exit(tests) @manager.command def create_db(): """create tables""" db.create_all() if __name__ == "__main__": manager.run()
dmacvicar/spacewalk
client/solaris/smartpm/smart/sorter.py
Python
gpl-2.0
22,108
0.001357
# # Copyright (c) 2004 Conectiva, Inc. # # Written by Gustavo Niemeyer <niemeyer@conectiva.com> # # This file is part of Smart Package Manager. # # Smart Package Manager is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation; either version 2 of the License, or (at # your option) any later version. # # Smart Package Manager is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Smart Package Manager; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # from smart.const import ENFORCE, OPTIONAL, INSTALL, REMOVE, RECURSIONLIMIT from smart.cache import PreRequires from smart import * import os, sys MAXSORTERDEPTH = RECURSIONLIMIT-50 class LoopError(Error): pass class ElementGroup(object): def __init__(self): self._relations = {} # (pred, succ) -> True def getRelations(self): return self._relations.keys() def addPredecessor(self, succ, pred): self._relations[(pred, succ)] = True def addSuccessor(self, pred, succ): self._relations[(pred, succ)] = True class ElementOrGroup(ElementGroup): pass class ElementAndGroup(ElementGroup): pass class ElementSorter(object): def __init__(self): self._successors = {} # pred -> {(succ, kind): True} self._predcount = {} # succ -> n self._groups = {} # (pred, succ, kind) -> [group, ...] self._disabled = {} # (pred, succ, kind) -> True def reset(self): self._successors.clear() self._groups.clear() def _getLoop(self, start, end=None): if end is None: end = start successors = self._successors path = [start] done = {}
loop = {} while path:
head = path[-1] dct = successors.get(head) if dct: for succ, kind in dct: if (head, succ, kind) not in self._disabled: if succ in loop or succ == end: loop.update(dict.fromkeys(path, True)) loop[end] = True # If end != start elif succ not in done: done[succ] = True path.append(succ) break else: path.pop() else: path.pop() return loop def _checkLoop(self, start, end=None): if end is None: end = start successors = self._successors queue = [start] done = {} while queue: elem = queue.pop() dct = successors.get(elem) if dct: for succ, kind in dct: if (elem, succ, kind) not in self._disabled: if succ == end: return True elif succ not in done: done[succ] = True queue.append(succ) return False def getLoops(self): successors = self._successors predcount = self._predcount loops = {} for elem in successors: if predcount.get(elem) and elem not in loops: loop = self._getLoop(elem) if loop: loops.update(loop) return loops def getLoopPaths(self, loops): if not loops: return [] successors = self._successors paths = [] done = {} for elem in loops: if elem not in done: path = [elem] while path: head = path[-1] dct = successors.get(head) if dct: for succ, kind in dct: if (succ in loops and (head, succ, kind) not in self._disabled): done[succ] = True if succ == elem: paths.append(path+[elem]) else: headsucc = (head, succ) if headsucc not in done: done[headsucc] = True path.append(succ) break else: path.pop() else: path.pop() return paths def _breakLoops(self, elem, loops, rellock, reclock, depth=0): if depth > MAXSORTERDEPTH: return False result = True dct = self._successors.get(elem) if dct: for succ, kind in dct.keys(): # Should we care about this relation? if succ not in loops: continue tup = (elem, succ, kind) if tup in self._disabled: continue # Check if the loop for this specific relation is still alive. if not self._checkLoop(succ, elem): continue # Some upper frame is already checking this. Protect # from infinite recursion. if tup in reclock: result = False break # If this relation is locked, our only chance is breaking # it forward. if tup in rellock: reclock[tup] = True loop = self._getLoop(succ, elem) broke = self._breakLoops(succ, loop, rellock, reclock, depth+1) del reclock[tup] if not broke: result = False continue # If this relation is optional, break it now. if kind is OPTIONAL: self._breakRelation(*tup) continue # We have an enforced relation. Let's check if we # have OR groups that could satisfy it. groups = self._groups.get(tup) if groups: # Any enforced AND groups tell us we can't # break this relation. for group in groups: if type(group) is ElementAndGroup: groups = None break if groups: # Check if we can remove the relation from all groups. reenable = {} for group in groups: reenable[group] = [] active = 0 for gpred, gsucc in group._relations: gtup = (gpred, gsucc, kind) if gtup in self._disabled: if gtup not in rellock: reenable[group].append(gtup) else: active += 1 if active > 1: break if active > 1: del reenable[group] elif not reenable[group]: break else: # These relations must not be reenabled in # the loop breaking steps below. relations = self._breakRelation(*tup) for rtup in relations: rellock[rtup] = True # Reenable the necessary relations, if possible.
sixtyfive/pcsc-ctapi-wrapper
PCSC/UnitaryTests/control_get_firmware.py
Python
lgpl-2.1
2,053
0.001461
#! /usr/bin/env python """ # control_get_firmware.py: get firmware version of Gemalto readers # Copyright (C) 2009-2012 Ludovic Rousseau """ # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with th
is program; if not, see <http://www.gnu.org/licenses/>. from smartcard.System import readers from smartcard.pcsc.PCSCPart10 import (SCARD_SHARE_DIRECT, SCARD_LEAVE_CARD, SCARD_
CTL_CODE, getTlvProperties) for reader in readers(): cardConnection = reader.createConnection() cardConnection.connect(mode=SCARD_SHARE_DIRECT, disposition=SCARD_LEAVE_CARD) print "Reader:", reader # properties returned by IOCTL_FEATURE_GET_TLV_PROPERTIES properties = getTlvProperties(cardConnection) # Gemalto devices supports a control code to get firmware key = 'PCSCv2_PART10_PROPERTY_wIdVendor' if key in properties: if properties[key] == 0x08E6: get_firmware = [0x02] IOCTL_SMARTCARD_VENDOR_IFD_EXCHANGE = SCARD_CTL_CODE(1) res = cardConnection.control(IOCTL_SMARTCARD_VENDOR_IFD_EXCHANGE, get_firmware) print " Firmware:", "".join([chr(x) for x in res]) else: print " Not a Gemalto reader" key = 'PCSCv2_PART10_PROPERTY_sFirmwareID' if key in properties: firmware = properties[key] print " Firmware:", firmware else: print " %s not supported" % key else: print " %s not supported" % key
WING-NUS/corpSearch
system/systems.py
Python
lgpl-3.0
2,889
0
from corpsearchsystem import CorpSearchSystem from modules.editdistance.normalized import \ NormalizedEditDistanceQueryToHandle,\ NormalizedEditDistanceQueryToDisplayName from modules.editdistance.lengths import \ LengthOfQuery, LengthOfHandle, LengthOfDisplayName from modules.editdistance.stopwords import \ NormalizedEditDistanceStopwordsQueryToHandle,\ NormalizedEditDistanceStopwordsQueryToDisplayName from modules.description.counts import OccurrencesOfQueryInDescCaseInsensitive from modules.description.cosinesimilarity import \ CosineSimilarityDescriptionAndQuery, \ CosineSimilarityDescriptionAndDDG from modules.languagemodels.bigram import \ DescriptionLanguageModel, \ PostContentLanguageModel Baseline = CorpSearchSystem('Baseline', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName ]) # Baseline plus x. PlusLengths = CorpSearchSystem('+ Lengths', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, LengthOfQuery, LengthOfHandle, LengthOfDisplayName ]) PlusQueryOccurrences = CorpSearchSystem('+ Query Occurrences', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, OccurrencesOfQueryInDescCaseInsensitive ]) PlusDescriptionCosineSimilarity = CorpSearchSystem( '+ Description-Query Cosine Similarity', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, CosineSimilarityDescriptionAndQuery ]) PlusDescriptionDDGCosineSimilarity = CorpSearchSystem( '+ Description-DDG Cosine Similarity', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, CosineSimilarityDescriptionAndDDG ]) PlusDescriptionLanguageModels = CorpSearchSystem( '+ Description Language Models', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, DescriptionLanguageModel ]) PlusPostContentLanguageModels = CorpSearchSystem( '+ Post Content Language Models', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, PostContentLanguageModel ]) # End Baseline plus x. Final = CorpSearchSystem('Production System', [ NormalizedEditDistanceStopwordsQueryToHandle, NormalizedEditDistanceStopwordsQueryToDisplayName, LengthOfQuery, LengthOfHandle, LengthOfDisp
layName, OccurrencesOfQueryInDescCaseInsensitive, CosineSimilarityDescriptionAndQuery, CosineSimilarityDescripti
onAndDDG, DescriptionLanguageModel, PostContentLanguageModel ])
ottodietz/rough-q1d
q1d/q1d_step.py
Python
gpl-3.0
2,411
0.014932
#!/usr/bin/env python from __future__ import division from math import * import numpy as np from numpy.fft import fft, fftshift, fftfreq def fermi(x, smearing): """Return Fermi function""" return 1./(1. + np.exp(x/smearing)) def step(x, delta, smearing): """Return smoothed step-function Fermi(x-Delta)-Fermi(x)""" return fermi(x-delta,smearing) - fermi(x,smearing) def build_wire(x, heights, delta, smearing): """Return sum_(n=-N,N) alpha_n step(x-n*Delta)""" N_module = heights.size wire = np.zeros(x.size) for n in np.arange(N_module): wire = ( wire + heights[n]* step(x - (n-N_module/2.)*delta, delta, smearing)
) return wire def powerspectrum(data, dx): """Return power-spectrum of input signal""" powerspec = np.abs(fftshift(fft(data))*dx)**2 freq = 2.*pi*fftfreq(data.size, dx) return freq, powerspec def AGS(k, heights, delta, smearing): """Return roughness-height power spectrum W(k)""" N_module = np.arange(heights.size) # Take correlation of alphas into account: omega = ([ np.
exp(-1j*n*k*delta)*heights[n] for n in N_module ]) omega = np.sum(omega, axis=0) omega = np.abs(omega)**2 / heights.size return (1./delta * (2.*pi*smearing* np.sinh(k*pi*smearing)**(-1)*np.sin(k*delta/2.))**2) * omega def SGS(k, heights, delta, smearing): """Return roughness-height power spectrum S(k)""" N_module = np.arange(heights.size) # a[n-1] and a[n+1] = 0 for n=N and n=0 a = np.concatenate([ [0],heights,[0] ]) # Take correlation of alphas into account: omega = ([ np.exp(-1j*n*k*delta)*(a[n]*(a[n]-a[n+1])* np.exp(-1j*k*delta) + a[n]*(a[n]-a[n-1])) for n in N_module ]) omega = np.sum(omega, axis=0) omega = np.abs(omega)**2 / heights.size return ( 1./delta / 72. * (k*pi*(1.+k**2*smearing**2)* np.sinh(k*pi*smearing)**(-1))**2 ) * omega def transmission(n, d, L, sigma, k, heights, delta, smearing): """ Return transmission T based on analytical expressions for W and S """ # only symmetric wire geometry considered yet invLbAGS=(4.*sigma**2 / d**6 *(n*pi)**4 / k**2 * AGS(2*k,heights,delta,smearing)) invLbSGS=(0.5*(sigma/d*pi*n)**4 / k**2 * (1./3. + 1./(pi*n)**2)**2 * SGS(2*k,heights,delta,smearing)) return np.exp(-L*(invLbAGS+invLbSGS)), invLbAGS, invLbSGS
ipernet/RatticWeb
ratticweb/context_processors.py
Python
gpl-2.0
1,262
0.001585
from cred.models import CredChangeQ from django.conf import settings from django.utils import timezone def base_template_reqs(request): cntx = { 'pageurl': request.path, 'LDAP_ENABLED': settings.LDAP_ENABLED, 'GOAUTH2_ENABLED': settings.GOAUTH2_ENABLED, 'EXPORT_ENABLED': not settings.RATTIC_DISABLE_EXPORT, 'TEMPLATE_DEBUG': settings.TEMPLATE_DEBUG, 'ALLOWPWCHANGE':
not (settings.LDAP_ENABLED and not settings.AUTH_LDAP_ALLOW_
PASSWORD_CHANGE), 'rattic_icon': 'rattic/img/rattic_icon_normal.png', 'rattic_logo': 'rattic/img/rattic_logo_normal.svg', } if settings.HELP_SYSTEM_FILES: cntx['helplinks'] = True else: cntx['helplinks'] = False if request.user.is_authenticated(): cntx['changeqcount'] = CredChangeQ.objects.for_user(request.user).count() return cntx def logo_selector(request): cntx = {} tz = timezone.get_current_timezone() time = tz.normalize(timezone.now()) if ((time.hour > 20 and time.hour < 24) or (time.hour >= 0 and time.hour < 6)): cntx['rattic_icon'] = 'rattic/img/rattic_icon_sleeping.png' cntx['rattic_logo'] = 'rattic/img/rattic_logo_sleeping.svg' return cntx
allisson/django-tiny-rest
testproject/blog/tests/test_views.py
Python
mit
10,142
0
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase from django.contrib.auth import get_user_model from django.core.urlresolvers import reverse import json from model_mommy import mommy from io import BytesIO from PIL import Image from tiny_rest.tests import Client import status from blog.models import Post, Comment User = get_user_model() class TestPostAPIView(TestCase): def setUp(self): self.client = Client() self.user = User.objects.create_user( 'user', 'user@email.com', '123456' ) self.list_url = reverse('blog:post_api') self.posts = mommy.make(Post, user=self.user, _quantity=20) self.detail_url = reverse('blog:post_api', args=[self.posts[0].pk]) self.file_obj = BytesIO() image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0)) image.save(self.file_obj, 'PNG') self.file_obj.name = 'test.jpg' self.file_obj.seek(0) self.client.login(username='user', password='123456') def tearDown(self): for post in Post.objects.all(): post.image.delete() post.delete() def test_list(self): self.client.logout() response = self.client.get(self.list_url) data = json.loads(response.content.decode()) self.assertEqual(len(data['data']), 10) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(self.list_url, {'page': 'invalid'}) data = json.loads(response.content.decode()) self.assertEqual(len(data['data']), 10) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(self.list_url, {'page': 100}) data = json.loads(response.content.decode()) self.assertEqual(len(data['data']), 10) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_detail(self): self.client.logout() response = self.client.get(self.detail_url) data = json.loads(response.content.decode()) self.assertEqual(data['id'], self.posts[0].pk) self.assertEqual(response.status_code, status.HTTP_200_OK) self.posts[0].delete() response = self.client.get(self.detail_url) data = json.loads(response.content.decode()) self.assertEqual(data['error'], 'Resource Not Found') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_create(self): response = self.client.post(self.list_url, {}) data = json.loads(response.content.decode()) self.assertEqual(data['error']['body'][0], 'This field is required.') self.assertEqual(data['error']['image'][0], 'This field is required.') self.assertEqual(data['error']['slug'][0], 'This field is required.') self.assertEqual(data['error']['title'][0], 'This field is required.') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.post( self.list_url, { 'title': 'my post', 'slug': 'my-post', 'body': 'my body', 'image': self.file_obj } ) data = json.loads(response.content.dec
ode()) self.assertEqual(data['title'], 'my post') self.assertEqual(data['slug'], 'my-post') self.assertEqual(data['body'], 'my body') self.assertEqual(data['user']['id'], self.user.pk)
self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_update(self): response = self.client.put(self.detail_url, {}) data = json.loads(response.content.decode()) self.assertEqual(data['error']['body'][0], 'This field is required.') self.assertEqual(data['error']['slug'][0], 'This field is required.') self.assertEqual(data['error']['title'][0], 'This field is required.') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.put( self.detail_url, { 'title': 'my post', 'slug': 'my-post', 'body': 'my body', 'image': self.file_obj }, ) data = json.loads(response.content.decode()) self.assertEqual(data['title'], 'my post') self.assertEqual(data['slug'], 'my-post') self.assertEqual(data['body'], 'my body') self.assertEqual(data['user']['id'], self.user.pk) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_partial_update(self): post = self.posts[0] response = self.client.patch( self.detail_url, { 'title': 'my post', }, ) data = json.loads(response.content.decode()) self.assertEqual(data['title'], 'my post') self.assertEqual(data['slug'], post.slug) self.assertEqual(data['body'], post.body) self.assertEqual(data['user']['id'], post.user.pk) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_destroy(self): post = self.posts[0] response = self.client.delete(self.detail_url) self.assertFalse(Post.objects.filter(pk=post.pk).exists()) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) response = self.client.delete(self.detail_url) data = json.loads(response.content.decode()) self.assertEqual(data['error'], 'Resource Not Found') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) class TestCommentAPIView(TestCase): def setUp(self): self.client = Client() self.user = User.objects.create_user( 'user', 'user@email.com', '123456' ) self.post = mommy.make(Post, user=self.user) self.comments = mommy.make( Comment, post=self.post, user=self.user, _quantity=20 ) self.list_url = reverse('blog:comment_api', args=[self.post.pk]) self.detail_url = reverse( 'blog:comment_api', args=[self.post.pk, self.comments[0].pk] ) self.client.login(username='user', password='123456') def test_list(self): self.client.logout() response = self.client.get(self.list_url) data = json.loads(response.content.decode()) self.assertEqual(len(data['data']), 10) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(self.list_url, {'page': 'invalid'}) data = json.loads(response.content.decode()) self.assertEqual(len(data['data']), 10) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(self.list_url, {'page': 100}) data = json.loads(response.content.decode()) self.assertEqual(len(data['data']), 10) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_detail(self): self.client.logout() response = self.client.get(self.detail_url) data = json.loads(response.content.decode()) self.assertEqual(data['id'], self.comments[0].pk) self.assertEqual(response.status_code, status.HTTP_200_OK) self.comments[0].delete() response = self.client.get(self.detail_url) data = json.loads(response.content.decode()) self.assertEqual(data['error'], 'Resource Not Found') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_create(self): response = self.client.post(self.list_url, {}) data = json.loads(response.content.decode()) self.assertEqual( data['error']['comment'][0], 'This field is required.' ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.post( self.list_url, { 'comment': 'my comment' } ) data = json.loads(response.content.decode()) self.assertEqual(data['post'], self.post.pk) self.assertEqual(data['comment'],
sarojaerabelli/HVGS
CareerTinderServer/CareerTinder/migrations/0005_auto_20160918_0221.py
Python
mit
573
0
# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2016-09-18 06:21 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies
= [ ('CareerTinder', '0004_auto_20160918_0152'), ] operations = [ migrations.RenameField( model_name='hiree', old_name='first_name', new_name='email', ), migrations.RenameField( model_name='hiree', old_name='last_name', ne
w_name='name', ), ]
fjpena/sword-of-ianna-zx
python_src/ianna_score.py
Python
apache-2.0
6,072
0.049407
import pygame import time import scripts """ Score class Handles all the score area package: ianna """ class IannaScore(): def __init__ (self, buffer, screen, game_entities): self.score_image = pygame.image.load('artwork/marcador.png').convert() self.font = pygame.image.load('artwork/font.png').convert() self.chars = [] self.buffer = buffer self.screen = screen self.game_entities = game_entities self.weapons = [] self.weapons.append(pygame.image.load('artwork/marcador_armas_sword.png').convert()) self.weapons
.append(pygame.image.load('artwork/marcador_armas_eclipse.png').convert()) self.weapons.append(pygame.image.load('artwork/marcador_armas_axe.png').convert()) self.weapons.append(pygame.image.load('artwork/marcador_armas_blade.png').convert()) self.first_object_in_inventory = 0 # We
have 64 chars, in ASCII order starting by BLANK (32) # There are some special chars, look at the font! for tile_x in range (0,32): rect = (tile_x*8, 0, 8, 8) self.chars.append(self.font.subsurface(rect)) for tile_x in range (0,32): rect = (tile_x*8, 8, 8, 8) self.chars.append(self.font.subsurface(rect)) def clean_text_area(self): for y in range(0,3): for x in range(0,30): self.buffer.blit(self.chars[0],(8+x*8,168+y*8)) def print_string(self,string): fpsClock = pygame.time.Clock() y=0 x=0 i=0 while i < len(string): word = "" # Find the word while string[i] != ',' and string[i] != '.' and string[i] != ' ': word = word + string[i] i = i + 1 # Add the punctuation character word = word + string[i] i = i + 1 # Now print it if x + len(word) > 30: y = y + 1 x = 0 if y == 3: # We need to wait until the player presses any key self.buffer.blit(self.chars[32],(240,184)) pygame.transform.scale(self.buffer,(256*3,192*3),self.screen) pygame.display.flip() self.wait_for_keypress() y = 0 self.clean_text_area() j = 0 while j < len(word): char = ord(word[j]) - 32 self.buffer.blit(self.chars[char],(8+x*8,168+y*8)) x = x + 1 j = j + 1 pygame.transform.scale(self.buffer,(256*3,192*3),self.screen) pygame.display.flip() fpsClock.tick(25) # run at 10 fps self.buffer.blit(self.chars[32],(240,184)) pygame.transform.scale(self.buffer,(256*3,192*3),self.screen) pygame.display.flip() self.wait_for_keypress() def print_char(self,char,x,y): char = ord(str(char)) - 32 self.buffer.blit(self.chars[char],(x,y)) def wait_for_keypress(self): ''' Silly function, just wait for a keypress to happen In the Spectrum version, it should be way better ''' keypressed = False keyreleased = False key = None while (not keypressed) and (not keyreleased): events = pygame.event.get() for event in events: if event.type == pygame.KEYDOWN: # keypressed, wait until it is released key = event.key keypressed = True if event.type == pygame.KEYUP: # keypressed, wait until it is released if key == event.key: keyreleased = True def print_meter(self,x,value, color): ''' Display an entity health, on X ''' y=191 value = value*23/100 rect = [x+2,y-value,5,value] pygame.draw.rect(self.buffer,color,rect) def print_inventory(self,player): ''' Display the inventory ''' currentx = 24 x = 0 if player.current_object > self.first_object_in_inventory + 2: self.first_object_in_inventory = self.first_object_in_inventory + 1 elif player.current_object < self.first_object_in_inventory: self.first_object_in_inventory = self.first_object_in_inventory - 1 for item in player.inventory[self.first_object_in_inventory:]: if x == 3: break self.buffer.blit(player.map.tile_table[self.tiles_per_pickable_object[item]], (currentx,168)) currentx = currentx + 24 x = x + 1 # Use a marker for the current selected object self.buffer.blit(self.chars[63],(24+(player.current_object-self.first_object_in_inventory)*24,184)) def draw(self): self.buffer.set_clip(pygame.Rect(0,160,256,192)) # set clipping area for game, should then set clipping for score area self.buffer.blit(self.score_image,(0,160)) # Print barbarian energy self.print_meter(168,(self.game_entities[0].energy*100) / self.game_entities[0].get_entity_max_energy(),(255,0,0)) # Print barbarian level self.print_meter(176,(self.game_entities[0].experience*100) / self.game_entities[0].get_player_max_exp(),(0,255,255)) # Print current weapon self.buffer.blit(self.weapons[self.game_entities[0].weapon-1],(112,168)) if self.game_entities[1] and self.game_entities[1].enemy_type != "OBJECT_ENEMY_ROCK": entity = self.game_entities[1] energy = (entity.energy*100) / entity.enemy_energy[entity.enemy_type][entity.level] self.print_meter(192,energy,(0,255,0)) # Print energy in numbers if entity.energy > 99: print "WARNING: enemy energy is > 100" else: self.print_char(entity.energy/10,200,176) self.print_char(entity.energy%10,208,176) self.print_char(entity.level,208,184) if self.game_entities[2] and self.game_entities[2].enemy_type not in ('OBJECT_ENEMY_ROCK','OBJECT_ENEMY_SECONDARY'): entity = self.game_entities[2] energy = (entity.energy*100) / entity.enemy_energy[entity.enemy_type][entity.level] self.print_meter(216,energy,(0,255,0)) if entity.energy > 99: print "WARNING: enemy energy is > 100" else: self.print_char(entity.energy/10,224,176) self.print_char(entity.energy%10,232,176) self.print_char(entity.level,232,184) self.print_inventory(self.game_entities[0]) # Remember to copy this from scripts.py when new objects are created tiles_per_pickable_object = { "OBJECT_KEY_GREEN": 217, "OBJECT_KEY_BLUE": 218, "OBJECT_KEY_YELLOW": 219, "OBJECT_BREAD": 220, "OBJECT_MEAT": 221, "OBJECT_HEALTH": 222, "OBJECT_KEY_RED": 223, "OBJECT_KEY_WHITE": 224, "OBJECT_KEY_PURPLE": 225, }
srjoglekar246/sympy
sympy/printing/tests/test_jscode.py
Python
bsd-3-clause
8,432
0.010911
from sympy.core import pi, oo, symbols, Function, Rational, Integer, GoldenRatio, EulerGamma, Catalan, Lambda, Dummy from sympy.functions import Piecewise, sin, cos, Abs, exp, ceiling, sqrt from sympy.utilities.pytest import raises from sympy.printing.jscode import JavascriptCodePrinter from sympy.utilities.lambdify import implemented_function from sympy.tensor import IndexedBase, Idx # import test from sympy import jscode x, y, z = symbols('x,y,z') g = Function('g') def test_printmethod(): assert jscode(Abs(x)) == "Math.abs(x)" def test_jscode_sqrt(): assert jscode(sqrt(x)) == "Math.sqrt(x)" assert jscode(x**0.5) == "Math.sqrt(x)" assert jscode(sqrt(x)) == "Math.sqrt(x)" def test_jscode_Pow(): assert jscode(x**3) == "Math.pow(x, 3)" assert jscode(x**(y**3)) == "Math.pow(x, Math.pow(y, 3))" assert jscode(1/(g(x)*3.5)**(x - y**x)/(x**2 + y)) == \ "Math.pow(3.5*g(x), -x + Math.pow(y, x))/(Math.pow(x, 2) + y)" assert jscode(x**-1.0) == '1/x' def test_jscode_constants_mathh(): assert jscode(exp(1)) == "Math.E" assert jscode(pi) == "Math.PI" assert jscode(oo) == "Number.POSITIVE_INFINITY" assert jscode(-oo) == "Number.NEGATIVE_INFINITY" def test_jscode_constants_other(): assert jscode(2*GoldenRatio) == "var GoldenRatio = 1.61803398874989;\n2*GoldenRatio" assert jscode(2*Catalan) == "var Catalan = 0.915965594177219;\n2*Catalan" assert jscode(2*EulerGamma) == "var EulerGamma = 0.577215664901533;\n2*EulerGamma" def test_jscode_Rational(): assert jscode(Rational(3,7)) == "3/7" assert jscode(Rational(18,9)) == "2" assert jscode(Rational(3,-7)) == "-3/7" assert jscode(Rational(-3,-7)) == "3/7" def test_jscode_Integer(): assert jscode(Integer(67)) == "67" assert jscode(Integer(-1)) == "-1" def test_jscode_functions(): assert jscode(sin(x) ** cos(x)) == "Math.pow(Math.sin(x), Math.cos(x))" def test_jscode_inline_function(): x = symbols('x') g = implemented_function('g', Lambda(x, 2*x)) assert jscode(g(x)) == "2*x" g = implemented_function('g', Lambda(x, 2*x/Catalan)) assert jscode(g(x)) == "var Catalan = %s;\n2*x/Catalan" %Catalan.n() A = IndexedBase('A') i = Idx('i', symbols('n', integer=True)) g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x))) assert jscode(g(A[i]), assign_to=A[i]) == ( "for (var i=0; i<n; i++){\n" " A[i] = (1 + A[i])*(2 + A[i])*A[i];\n" "}" ) def test_jscode_exceptions(): assert jscode(ceiling(x)) == "Math.ceil(x)" assert jscode(Abs(x)) == "Math.abs(x)" def test_jscode_boolean(): assert jscode(x & y) == "x && y" assert jscode(x | y) == "x || y" assert jscode(~x) == "!x" assert jscode(x & y & z) == "x && y && z" assert jscode(x | y | z) == "x || y || z" assert jscode((x & y) | z) == "z || x && y" assert jscode((x | y) & z) == "z && (x || y)" def test_jscode_Piecewise(): p = jscode(Piecewise((x, x<1), (x**2, True))) s = \ """\ if (x < 1) { x } else { Math.pow(x, 2) }\ """ assert p == s def test_jscode_Piecewise_deep(): p = jscode(2*Piecewise((x, x<1),(x**2, True))) s = \ """\ 2*if (x < 1) { x } else { Math.pow(x, 2) }\ """ assert p == s def test_jscode_settings(): raises(TypeError, lambda : jscode(sin(x),method="garbage")) def test_jscode_Indexed(): from sympy.tensor import IndexedBase, Idx from sympy import symbols i,j,k,n,m,o = symbols('i j k n m o', integer=True) p = JavascriptCodePrinter() p._not_c = set() x = IndexedBase('x')[Idx(j, n)] assert p._print_Indexed(x) == 'x[j]' A = IndexedBase('A')[Idx(i, m), Idx(j, n)] assert p._print_Indexed(A) == 'A[%s]'% str(j + n*i) B = IndexedBase('B')[Idx(i, m), Idx(j, n), Idx(k, o)] assert p._print_Indexed(B) == 'B[%s]'% str(k + i*n*o + j*o) assert p._not_c == set() def test_jscode_loops_matrix_vector(): n,m = symbols('n m', integer=True) A = IndexedBase('A') x = IndexedBase('x') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) s = ( 'for (var i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' 'for (var i=0; i<m; i++){\n' ' for (var j=0; j<n; j++){\n' ' y[i] = y[i] + A[i*n + j]*x[j];\n' ' }\n' '}' ) c = jscode(A[i, j]*x[j], assign_to=y[i]) assert c == s def test_dummy_loops(): # the following line could also be # [Dummy(s, integer=True) for s in 'im'] # or [Dummy(integer=True) for s in 'im'] i, m = symbols('i m', integer=True, cls=Dummy) x = IndexedBase('x') y = IndexedBase('y') i = Idx(i, m) expected = ( 'for (var i_%(icount)i=0; i_%(icount)i<m_%(mcount)i; i_%(icount)i++){\n' ' y[i_%(icount)i] = x[i_%(icount)i];\n' '}' ) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index} code = jscode(x[i], assign_to=y[i]) assert code == expected def test_jscode_loops_add(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m = symbols('n m', integer=True) A = IndexedBase('A') x = IndexedBase('x') y = IndexedBase('y') z = IndexedBase('z') i = Idx('i', m) j = Idx('j', n) s = ( 'for (var i=0; i<m; i++){\n' ' y[i] = x[i] + z[i];\n' '}\n' 'for (var i=0; i<m; i++){\n' ' for (var j=0; j<n; j++){\n' ' y[i] = y[i] + A[i*n + j]*x[j];\n' ' }\n' '}' ) c = jscode(A[i, j]*x[j] + x[i] + z[i], assign_to=y[i]) assert c == s def test_jscode_loops_multiple_contractions(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o, p = symbols('n m o p', integer=True) a = IndexedBase('a') b = IndexedBase('b') y = IndexedBase('y') i = Idx('i', m
) j = Idx('j', n) k = Idx('k', o) l = Idx('l', p) s = ( 'for (var i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' 'for (var i=0; i<m; i++){\n' ' for (var j=0; j<n; j++){\n' ' for (var k=0; k<o; k++){\n' ' for (var l=0; l<p; l++){\n' ' y[i] = y[i] + b[j*o*p + k*p + l]*a[i*n*o*p + j*o*p + k*p + l];\n' ' }\n' ' }\n' ' }\n' '}' ) c = jscode(b[j, k, l]*a[i, j, k, l], assign_to=y[i]) assert c == s def test_js
code_loops_addfactor(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o, p = symbols('n m o p', integer=True) a = IndexedBase('a') b = IndexedBase('b') c = IndexedBase('c') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) k = Idx('k', o) l = Idx('l', p) s = ( 'for (var i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' 'for (var i=0; i<m; i++){\n' ' for (var j=0; j<n; j++){\n' ' for (var k=0; k<o; k++){\n' ' for (var l=0; l<p; l++){\n' ' y[i] = (a[i*n*o*p + j*o*p + k*p + l] + b[i*n*o*p + j*o*p + k*p + l])*c[j*o*p + k*p + l] + y[i];\n' ' }\n' ' }\n' ' }\n' '}' ) c = jscode((a[i, j, k, l] + b[i, j, k, l])*c[j, k, l], assign_to=y[i]) assert c == s def test_jscode_loops_multiple_terms(): from sympy.tensor import IndexedBase, Idx from sympy import symbols n, m, o, p = symbols('n m o p', integer=True) a = IndexedBase('a') b = IndexedBase('b') c = IndexedBase('c') y = IndexedBase('y') i = Idx('i', m) j = Idx('j', n) k = Idx('k', o) s0 = ( 'for (var i=0; i<m; i++){\n' ' y[i] = 0;\n' '}\n' ) s1 = ( 'for (var i=0; i<m; i++){\n' ' for (var j=0; j<n; j++){\n' ' for (var k=0; k<o; k++){\n' ' y[i] = b[j]*b[k]*c[i*n*o + j*o + k] + y[i];\n' ' }\n' ' }\n' '}\n' ) s2 = ( 'for (var i=0; i<m; i++){\n' ' for (var k=0; k<o; k++){\n' ' y[i] = b[k]*a[i*o + k] + y[i];\n' ' }\n' '}\n' ) s3 = ( 'for (var i=0; i<m; i++){\n' ' for (var j=0; j<n; j++){\n' ' y[i] = b[j]*a[i*n + j] + y[i];\n' ' }\n' '}\n' ) c = jscode(b[j]*a[i, j] + b[k]*a[i, k] + b[j]*b[k]*c[i, j, k], assign_to=y[i])
shuoli84/gevent_socketio2
socketio/__init__.py
Python
mit
583
0.003431
import logging log = logging.getLogger(__name__) def has_bin(arg): """ Helper function checks whether args contains binary data :param args: list | tuple | bytearray | dict :return: (bool) """ if type(arg)
is list or type(arg) is tuple: return reduce(lambda has_binary, item: has_binary or has_bin(item), arg, False) if type(arg) is bytearray or hasattr(arg, 'read'):
return True if type(arg) is dict: return reduce(lambda has_binary, item: has_binary or has_bin(item), [v for k, v in arg.items()], False) return False
allisson/python-vindi
setup.py
Python
mit
1,845
0
import codecs import os import re from setuptools import Command, find_packages, setup here = os.path.abspath(os.path.dirname(__file__)) version = "0.0.0" changes = os.path.join(here, "CHANGES.rst") match = r"^#*\s*(?P<version>[0-9]+\.[0-9]+(\.[0-9]+)?)$" with codecs.open(changes, encoding="utf-8") as changes: for line in changes: res = re.match(match, line) if res: version = res.group("version") break # Get the long description with codecs.open(os.path.join(here, "README.rst"), encoding="utf-8") as f: long_description = f.read() # Get version with codecs.open(os.path.join(here, "CHANGES.rst"), encoding="utf-8") as f: changelog = f.read() install_requirements = ["simple-rest-client>=1.0.0"] tests_requirements = ["pytest", "pytest-cov", "coveralls"] class VersionCommand(Command): description = "print library version" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): print(version) setup( name="vindi", version=version, description="Integração com API da Vindi (Python 3.6+)", long_description=long_description, url="https://github.com/allisson/python-vindi", author="Allisson Azevedo", author_email="allisson@gmail.com", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Software Development :: Lib
raries", ], keywords=
"rest client http vindi", packages=find_packages(exclude=["docs", "tests*"]), setup_requires=["pytest-runner"], install_requires=install_requirements, tests_require=tests_requirements, cmdclass={"version": VersionCommand}, )
factorlibre/odoo-addons-cpo
purchase_compute_order_product_filter_season/models/__init__.py
Python
agpl-3.0
197
0
# -*- coding: utf-8 -*- # © 2016
FactorLibre - Hugo Santos <hugo.santos@factorlibre.com> # License AGPL-3.0 or later (http://w
ww.gnu.org/licenses/agpl.html). from . import computed_purchase_order
elfnor/sverchok
nodes/vector/interpolation_mk2.py
Python
gpl-3.0
7,301
0.003013
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hop
e that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See th
e # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import bisect import numpy as np import bpy from bpy.props import EnumProperty, FloatProperty, BoolProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import updateNode, dataCorrect, repeat_last # spline function modifed from # from looptools 4.5.2 done by Bart Crouch # calculates natural cubic splines through all given knots def cubic_spline(locs, tknots): knots = list(range(len(locs))) n = len(knots) if n < 2: return False x = tknots[:] result = [] for j in range(3): a = [] for i in locs: a.append(i[j]) h = [] for i in range(n-1): if x[i+1] - x[i] == 0: h.append(1e-8) else: h.append(x[i+1] - x[i]) q = [False] for i in range(1, n-1): q.append(3/h[i]*(a[i+1]-a[i]) - 3/h[i-1]*(a[i]-a[i-1])) l = [1.0] u = [0.0] z = [0.0] for i in range(1, n-1): l.append(2*(x[i+1]-x[i-1]) - h[i-1]*u[i-1]) if l[i] == 0: l[i] = 1e-8 u.append(h[i] / l[i]) z.append((q[i] - h[i-1] * z[i-1]) / l[i]) l.append(1.0) z.append(0.0) b = [False for i in range(n-1)] c = [False for i in range(n)] d = [False for i in range(n-1)] c[n-1] = 0.0 for i in range(n-2, -1, -1): c[i] = z[i] - u[i]*c[i+1] b[i] = (a[i+1]-a[i])/h[i] - h[i]*(c[i+1]+2*c[i])/3 d[i] = (c[i+1]-c[i]) / (3*h[i]) for i in range(n-1): result.append([a[i], b[i], c[i], d[i], x[i]]) splines = [] for i in range(len(knots)-1): splines.append([result[i], result[i+n-1], result[i+(n-1)*2]]) return(splines) def eval_spline(splines, tknots, t_in): out = [] for t in t_in: n = bisect.bisect(tknots, t, lo=0, hi=len(tknots))-1 if n > len(splines)-1: n = len(splines)-1 if n < 0: n = 0 pt = [] for i in range(3): ax, bx, cx, dx, tx = splines[n][i] x = ax + bx*(t-tx) + cx*(t-tx)**2 + dx*(t-tx)**3 pt.append(x) out.append(pt) return out class SvInterpolationNodeMK2(bpy.types.Node, SverchCustomTreeNode): '''Vector Interpolate''' bl_idname = 'SvInterpolationNodeMK2' bl_label = 'Vector Interpolation mk2' bl_icon = 'OUTLINER_OB_EMPTY' t_in_x = FloatProperty(name="tU", default=.5, min=0, max=1, precision=5, update=updateNode) t_in_y = FloatProperty(name="tV", default=.5, min=0, max=1, precision=5, update=updateNode) defgrid = BoolProperty(name='default_grid', default=True, update=updateNode) regimes = [('P', 'Pattern', "Pattern", 0), ('G', 'Grid', "Grid", 1)] regime = EnumProperty(name='regime', default='G', items=regimes, update=updateNode) directions = [('UV', 'UV', "Two directions", 0), ('U', 'U', "One direction", 1)] direction = EnumProperty(name='Direction', default='U', items=directions, update=updateNode) modes = [('SPL', 'Cubic', "Cubic Spline", 0), ('LIN', 'Linear', "Linear Interpolation", 1)] mode = EnumProperty(name='Mode', default="SPL", items=modes, update=updateNode) def sv_init(self, context): self.inputs.new('VerticesSocket', 'Vertices') self.inputs.new('StringsSocket', 'IntervalX').prop_name = 't_in_x' self.inputs.new('StringsSocket', 'IntervalY').prop_name = 't_in_y' self.outputs.new('VerticesSocket', 'Vertices') def draw_buttons(self, context, layout): #pass col = layout.column(align=True) row = col.row(align=True) row.prop(self, 'mode', expand=True) row = col.row(align=True) row.prop(self, 'regime', expand=True) if self.regime == 'G': row = col.row(align=True) row.prop(self, 'direction', expand=True) col.prop(self, 'defgrid') def interpol(self, verts, t_ins): verts_out = [] for v, t_in in zip(verts, repeat_last(t_ins)): pts = np.array(v).T tmp = np.apply_along_axis(np.linalg.norm, 0, pts[:, :-1]-pts[:, 1:]) t = np.insert(tmp, 0, 0).cumsum() t = t/t[-1] t_corr = [min(1, max(t_c, 0)) for t_c in t_in] # this should also be numpy if self.mode == 'LIN': out = [np.interp(t_corr, t, pts[i]) for i in range(3)] verts_out.append(list(zip(*out))) else: # SPL spl = cubic_spline(v, t) out = eval_spline(spl, t, t_corr) verts_out.append(out) return verts_out def process(self): if not any(s.is_linked for s in self.outputs): return if self.inputs['Vertices'].is_linked: verts = self.inputs['Vertices'].sv_get() verts = dataCorrect(verts) t_ins_x = self.inputs['IntervalX'].sv_get() t_ins_y = self.inputs['IntervalY'].sv_get() if self.regime == 'P' and self.direction == 'U': self.direction = 'UV' if self.defgrid: t_ins_x = [[i/10 for i in range(11)]] t_ins_y = [[i/10 for i in range(11)]] if self.regime == 'G': vertsX = self.interpol(verts, t_ins_x) if self.direction == 'UV': verts_T = np.swapaxes(np.array(vertsX),0,1).tolist() verts_out = self.interpol(verts_T, t_ins_y) else: verts_out = vertsX else: verts_out_ = [] for x,y in zip(t_ins_x[0],t_ins_y[0]): vertsX = self.interpol(verts, [[x]]) verts_T = np.swapaxes(np.array(vertsX),0,1).tolist() vertsY = self.interpol(verts_T, [[y]]) verts_out_.extend(vertsY) verts_out = [[i[0] for i in verts_out_]] self.outputs['Vertices'].sv_set(verts_out) def register(): bpy.utils.register_class(SvInterpolationNodeMK2) def unregister(): bpy.utils.unregister_class(SvInterpolationNodeMK2)
Fahrenholz/maat-analyzer
functions/debugging_functions.py
Python
gpl-3.0
537
0.005587
from functions import global_functions def debug_step_here(): ""
" Creates a Breakpoint when executed in debug-mode :return: """ if global_functions.get_config_key("debug"): try: input("Press any key to continue") except SyntaxError: pass def print_out_var(variable): """ Prints out a variable when executed in debug-mode :param variable: :return: """ if global_functions.get_config_key("debug"): print(variable) debug_step_here()
Tigge/trello-to-web
markdown_imaged.py
Python
mit
1,141
0.003506
import os.path import urllib.parse import requests import rfc6266 import settings import utilities from
markdown import Extension from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE class ImageDownloadPattern(ImagePattern): def handleMatch(self, match): el = super(ImageDownloadPattern, self).handleMatch(match) urlparts = urllib.parse.urlparse(el.attrib["src"]) if urlparts.netloc: response = requests.get(urlparts.geturl
()) response.raise_for_status() filename = rfc6266.parse_requests_response(response).filename_unsafe with open(os.path.join(settings.get("folder"), filename), "wb") as f: f.write(response.content) el.attrib["src"] = filename utilities.fix_image(os.path.join(settings.get("folder"), filename), settings.get("features")["width"]) return el class ImageDownload(Extension): def extendMarkdown(self, md, md_globals): md.inlinePatterns['image_link'] = ImageDownloadPattern(IMAGE_LINK_RE, md) def makeExtension(configs={}): return ImageDownload(configs=configs)
pidydx/grr
grr/checks/rsyslog_test.py
Python
apache-2.0
1,801
0.004442
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for rsyslog state checks.""" from grr.lib import flags from grr.lib import test_lib from grr.lib.checks import checks_test_lib from grr.parsers import config_file class RsyslogCheckTests(checks_test_lib.HostChe
ckTest): """Test the rsyslog checks.""" @classmethod def setUpClass(cls): cls.LoadCheck("rsyslog.yaml") cls.parser = config_file.RsyslogParser() def testLoggingAuthRemoteOK(self): chk_id = "CIS-LOGGING-AUTH-REMOTE" test_data = { "/etc/rsyslog.conf": "*.* @@tcp.example.com.:514;RSYSLOG_ForwardFormat" } host_data = self.GenFileData("LinuxRsyslogConfigs", test_data, self.parser) results = self.RunChecks(host_data) self.assertCheckUndetected(chk_id,
results) def testLoggingAuthRemoteFail(self): chk_id = "CIS-LOGGING-AUTH-REMOTE" test_data = {"/etc/rsyslog.conf": "*.* /var/log/messages"} host_data = self.GenFileData("LinuxRsyslogConfigs", test_data, self.parser) sym = "Missing attribute: No remote destination for auth logs." found = ["Expected state was not found"] results = self.RunChecks(host_data) self.assertCheckDetectedAnom(chk_id, results, sym, found) def testLoggingFilePermissions(self): chk_id = "CIS-LOGGING-FILE-PERMISSIONS" ro = self.CreateStat("/test/ro", 0, 0, 0o0100640) rw = self.CreateStat("/test/rw", 0, 0, 0o0100666) sym = "Found: Log configurations can be modified by non-privileged users." found = ["/test/rw user: 0, group: 0, mode: -rw-rw-rw-"] results = self.GenResults(["LinuxRsyslogConfigs"], [[ro, rw]]) self.assertCheckDetectedAnom(chk_id, results, sym, found) def main(argv): test_lib.GrrTestProgram(argv=argv) if __name__ == "__main__": flags.StartMain(main)
PeytonXu/learn-python
cases/errbot/config.py
Python
mit
761
0.011827
import logging # This is a minimal configuration to get you started with the Text mode. # If you want to connect Errbot to chat services, checkout
# the options i
n the more complete config-template.py from here: # https://raw.githubusercontent.com/errbotio/errbot/master/errbot/config-template.py BACKEND = 'Text' # Errbot will start in text mode (console only mode) and will answer commands from there. BOT_DATA_DIR = r'D:\Work\Python\learn-python\cases\errbot\data' BOT_EXTRA_PLUGIN_DIR = 'D:\Work\Python\learn-python\cases\errbot\plugins' BOT_LOG_FILE = r'D:\Work\Python\learn-python\cases\errbot\errbot.log' BOT_LOG_LEVEL = logging.DEBUG BOT_ADMINS = ('CHANGE ME', ) # !! Don't leave that to "CHANGE ME" if you connect your errbot to a chat system !!
wendlers/usherpa-pysherpa
setup.py
Python
lgpl-2.1
1,459
0.023304
#!/usr/bin/env python ## # This file is part of the uSherpa Python Library project # # Copyright (C) 2012 Stefan Wendler <sw@kaltpost.de> # # The uSherpa Python Library is free software; you can redistribute # it and/or modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # uSherpa Python Library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with the JSherpa firmware; if not, write to the Free # Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA # 02111-1307 US
A. ## ''' uSherpa Python Library setup-script. To install this library use: sudo python setup.py install ''' from distutils.core import setup setup(name='pysherpa', version='0.1', description='uSherpa Python Library', long_description='Client library for Python to use MCU running uSherpa Firmware. Depends on pyserial.', author='Stefan Wendler', author_email='sw@usherpa.org', url='http
://www.usherpa.org/', license='LGPL 2.1', packages=['usherpa'], platforms=['Linux'], package_dir = {'': 'src'}, requires = ['serial(>=2.4)'] )
maurizi/otm-core
opentreemap/treemap/views/misc.py
Python
agpl-3.0
8,998
0
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division import string import re import sass import json from django.utils.translation import ugettext as _ from django.core.
urlresolvers import reverse from django.conf import settings from django.contrib.gis.geos import Polygon from django.core.exceptions import ValidationError from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render, get_object_or_404 from stormwater.models
import PolygonalMapFeature from treemap.models import User, Species, StaticPage, Instance, Boundary from treemap.plugin import get_viewable_instances_filter from treemap.lib.user import get_audits, get_audits_params from treemap.lib import COLOR_RE from treemap.lib.perms import map_feature_is_creatable from treemap.units import get_unit_abbreviation, get_units from treemap.util import leaf_models_of_class _SCSS_VAR_NAME_RE = re.compile('^[_a-zA-Z][-_a-zA-Z0-9]*$') def edits(request, instance): """ Request a variety of different audit types. Params: - models Comma separated list of models (only Tree and Plot are supported) - model_id The ID of a specfici model. If specified, models must also be defined and have only one model - user Filter by a specific user - exclude (default: true) Set to false to ignore edits that are currently pending - page_size Size of each page to return (up to PAGE_MAX) - page The page to return """ params = get_audits_params(request) user_id = request.GET.get('user', None) user = None if user_id is not None: user = User.objects.get(pk=user_id) return get_audits(request.user, instance, request.GET.copy(), user, **params) def index(request, instance): return HttpResponseRedirect(reverse('map', kwargs={ 'instance_url_name': instance.url_name})) def get_map_view_context(request, instance): if request.user and not request.user.is_anonymous(): iuser = request.user.get_effective_instance_user(instance) resource_classes = [resource for resource in instance.resource_classes if map_feature_is_creatable(iuser, resource)] else: resource_classes = [] context = { 'fields_for_add_tree': [ (_('Tree Height'), 'Tree.height') ], 'resource_classes': resource_classes, 'only_one_resource_class': len(resource_classes) == 1, 'polygon_area_units': get_unit_abbreviation( get_units(instance, 'greenInfrastructure', 'area')), 'q': request.GET.get('q'), } add_map_info_to_context(context, instance) return context def add_map_info_to_context(context, instance): all_polygon_types = {c.map_feature_type for c in leaf_models_of_class(PolygonalMapFeature)} my_polygon_types = set(instance.map_feature_types) & all_polygon_types context['has_polygons'] = len(my_polygon_types) > 0 context['has_boundaries'] = instance.boundaries.exists() def static_page(request, instance, page): static_page = StaticPage.get_or_new(instance, page) return {'content': static_page.content, 'title': static_page.name} def boundary_to_geojson(request, instance, boundary_id): boundary = get_object_or_404(Boundary.all_objects, pk=boundary_id) geom = boundary.geom # Leaflet prefers to work with lat/lng so we do the transformation # here, since it way easier than doing it client-side geom.transform('4326') return HttpResponse(geom.geojson) def add_anonymous_boundary(request): request_dict = json.loads(request.body) srid = request_dict.get('srid', 4326) polygon = Polygon(request_dict.get('polygon', []), srid=srid) if srid != 3857: polygon.transform(3857) b = Boundary.anonymous(polygon) b.save() return {'id': b.id} def boundary_autocomplete(request, instance): max_items = request.GET.get('max_items', None) boundaries = instance.boundaries \ .filter(searchable=True) \ .order_by('sort_order', 'name')[:max_items] return [{'name': boundary.name, 'category': boundary.category, 'id': boundary.pk, 'value': boundary.name, 'tokens': boundary.name.split(), 'sortOrder': boundary.sort_order} for boundary in boundaries] def species_list(request, instance): max_items = request.GET.get('max_items', None) species_qs = instance.scope_model(Species)\ .order_by('common_name')\ .values('common_name', 'genus', 'species', 'cultivar', 'other_part_of_name', 'id') if max_items: species_qs = species_qs[:max_items] # Split names by space so that "el" will match common_name="Delaware Elm" def tokenize(species): names = (species['common_name'], species['genus'], species['species'], species['cultivar'], species['other_part_of_name']) tokens = set() for name in names: if name: tokens = tokens.union(name.split()) # Names are sometimes in quotes, which should be stripped return {token.strip(string.punctuation) for token in tokens} def annotate_species_dict(sdict): sci_name = Species.get_scientific_name(sdict['genus'], sdict['species'], sdict['cultivar'], sdict['other_part_of_name']) display_name = "%s [%s]" % (sdict['common_name'], sci_name) tokens = tokenize(species) sdict.update({ 'scientific_name': sci_name, 'value': display_name, 'tokens': tokens}) return sdict return [annotate_species_dict(species) for species in species_qs] def compile_scss(request): """ Reads key value pairs from the query parameters and adds them as scss variables with color values, then imports the main entry point to our scss file. Any variables provided will be put in the scss file, but only those which override variables with '!default' in our normal .scss files should have any effect """ # Webpack and libsass have different opinions on how url(...) works scss = "$staticUrl: '/static/';\n" # We can probably be a bit looser with what we allow here in the future if # we need to, but we must do some checking so that libsass doesn't explode for key, value in request.GET.items(): if _SCSS_VAR_NAME_RE.match(key) and COLOR_RE.match(value): scss += '$%s: #%s;\n' % (key, value) elif key == 'url': # Ignore the cache-buster query parameter continue else: raise ValidationError("Invalid SCSS values %s: %s" % (key, value)) scss += '@import "%s";' % settings.SCSS_ENTRY scss = scss.encode('utf-8') return sass.compile(string=scss, include_paths=[settings.SCSS_ROOT]) def public_instances_geojson(request): def instance_geojson(instance): return { 'type': 'Feature', 'geometry': { 'type': 'Point', 'coordinates': [instance.center_lat_lng.x, instance.center_lat_lng.y] }, 'properties': { 'name': instance.name, 'url': reverse( 'instance_index_view', kwargs={'instance_url_name': instance.url_name}), 'plot_count': instance.plot_count() } } instances = (Instance.objects .filter(is_public=True) .filter(get_viewable_instances_filter())) return [instance_geojson(instance) for instance in
robotic-ultrasound-image-system/ur5
build/ur5-master/universal_robot-kinetic-devel/ur_description/catkin_generated/pkg.installspace.context.pc.py
Python
apache-2.0
380
0
# generated from catkin/cmake/templat
e/pkg.context.pc.in CATKIN_PACKAGE_PREFIX = "" PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else [] PROJECT_CATKIN_DEPENDS = "".replace(';', ' ') PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else [] PROJECT_NAME =
"ur_description" PROJECT_SPACE_DIR = "/home/us-robot/catkin_ws/install" PROJECT_VERSION = "1.2.0"
jjdmol/LOFAR
LCS/Tools/src/makeClass.py
Python
gpl-3.0
17,838
0.029432
#! /usr/bin/env python # Copyright (C) 2005 # ASTRON (Netherlands Institute for Radio Astr
onomy) # P.O.Box 2, 7990 AA Dwingeloo, The Netherlands # # This file is part of the LOFAR software suite. # The LOFAR software suite is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # The LOFAR software suite is distributed in the hop
e that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. # # $Id$ # makeClass.py: Script to make default class files in a Package/srcdir in the # LOFAR development tree. normal class files, main program and templates # are covered # # Usage: # ./makeClass [-h] [-t list [-d] | -m] [ClassName] # Args: # ClassName The name of the Class that will be created # h,--help usage # t,--templated list This is an automated templated class, # list can contain a comma seperated list # with the template parameters. Example: # makeClass -t T,U className # d,--diy Do it yourself (manual template instanciation) # Only together with -t # m,--main This is a main program for a class # # Revisions: # # 26-01-2005 Initial Release. # # import all packages we need # import os import sys import getopt import re from datetime import date def openFile(name,mode): try: file = open (name,mode) except IOError, message: sys.exit("Error opening file: %s" % message) return file def replacePackageAndClassName(readFile,writeFile,packageName, className,subDirName): aLine=readFile.readline() year=`date.today().year` while aLine != "": #set start of copyright year if aLine.find("%YEAR%") > -1: aLine = str.replace(aLine,"%YEAR%",year) # replace SUB with Subdir when needed if aLine.find("%SUB%") > -1: if subDirName != "": aLine = str.replace(aLine,"%SUB%",subDirName+"/") else: aLine = str.replace(aLine,"%SUB%",subDirName) # replace SUBUPPER with Subdir in uppercase when needed if aLine.find("%SUBUPPER%") > -1: if subDirName != "": aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper()+"_") else: aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper()) # replace PACKAGE with real name if aLine.find("%PACKAGE%") > -1: aLine = str.replace(aLine,"%PACKAGE%",packageName) # replace PACKAGEUPPER with uppercase Package name if aLine.find("%PACKAGEUPPER%") > -1: aLine = str.replace(aLine,"%PACKAGEUPPER%",packageName.upper()) # replace CLASS with real name if aLine.find("%CLASS%") > -1: aLine = str.replace(aLine,"%CLASS%",className) # replace CLASSUPPER with uppercase classname if aLine.find("%CLASSUPPER%") > -1: aLine = str.replace(aLine,"%CLASSUPPER%",className.upper()) writeFile.write(aLine) aLine=readFile.readline() def addTemplates(type,readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName): aLine=readFile.readline() year=`date.today().year` while aLine != "": #set start of copyright year if aLine.find("%YEAR%") > -1: aLine = str.replace(aLine,"%YEAR%",year) # replace SUB with Subdir when needed if aLine.find("%SUB%") > -1: if subDirName != "": aLine = str.replace(aLine,"%SUB%",subDirName+"/") else: aLine = str.replace(aLine,"%SUB%",subDirName) # replace SUBUPPER with Subdir in uppercase when needed if aLine.find("%SUBUPPER%") > -1: if subDirName != "": aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper()+"_") else: aLine = str.replace(aLine,"%SUBUPPER%",subDirName.upper()) # replace PACKAGE with real name if aLine.find("%PACKAGE%") > -1: aLine= str.replace(aLine,"%PACKAGE%",packageName) # replace PACKAGEUPPER with uppercase Package name if aLine.find("%PACKAGEUPPER%") > -1: aLine = str.replace(aLine,"%PACKAGEUPPER%",packageName.upper()) # replace CLASS with real name if aLine.find("%CLASS%") > -1: aLine = str.replace(aLine,"%CLASS%",className) # replace CLASSUPPER with uppercase classname if aLine.find("%CLASSUPPER%") > -1: aLine = str.replace(aLine,"%CLASSUPPER%",className.upper()) tmpltype = "<" tmplparm = "<" i=0 while i < len(templateList): if i > 0: tmpltype += ", " tmplparm += "," tmpltype += "typename " + templateList[i] tmplparm += templateList[i] i+=1 tmpltype += ">" tmplparm += ">" # replace TEMPLATETYPE and TEMPLATEPARAM if aLine.find("%TEMPLATETYPE%") > -1: aLine = str.replace(aLine,"%TEMPLATETYPE%",tmpltype) if aLine.find("%TEMPLATEPARAM%") > -1: aLine = str.replace(aLine,"%TEMPLATEPARAM%",tmplparm) # Check if !diy, template and .h file, if so include tcc in header file if aLine.find("%INCLUDETCC%") > -1: incstr = "" if autoTemplate == 1: if subDirName != "": incstr = "#include <"+packageName+"/"+subDirName+"/"+className+".tcc>" else: incstr = "#include <"+packageName+"/"+className+".tcc>" aLine = str.replace(aLine,"%INCLUDETCC%",incstr) writeFile.write(aLine) aLine=readFile.readline() def makeDefaultClass(lofarDir,className,packageName,srcDir,incDir,subDirName): # default.h file readFile=openFile(lofarDir+"/LCS/Tools/src/templates/header.h_template","r") incHDir=incDir if subDirName != "": incHDir = incDir+"/"+subDirName writeFile=openFile(incHDir+"/"+className+".h","w") replacePackageAndClassName(readFile,writeFile,packageName,className,subDirName) writeFile.close() readFile.close() addToMakefile("h",packageName,className,incDir,subDirName) #default.cc file readFile=openFile(lofarDir+"/LCS/Tools/src/templates/header.cc_template","r") writeFile=openFile(className+".cc","w") replacePackageAndClassName(readFile,writeFile,packageName,className,subDirName) writeFile.close() readFile.close() addToMakefile("cc",packageName,className,srcDir,subDirName) def makeTemplatedClass(lofarDir,className,packageName,templateList,autoTemplate,srcDir,incDir,subDirName): #default h file readFile=openFile(lofarDir+"/LCS/Tools/src/templates/templated_header.h_template","r") incHDir=incDir if subDirName != "": incHDir = incDir+"/"+subDirName writeFile=openFile(incHDir+"/"+className+".h","w") addTemplates("h",readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName) writeFile.close() readFile.close() addToMakefile("h",packageName,className,incDir,subDirName) #default tcc template file readFile=openFile(lofarDir+"/LCS/Tools/src/templates/templated_header.tcc_template","r") writeFile=openFile(incHDir+"/"+className+".tcc","w") addTemplates("tcc",readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName) writeFile.close() readFile.close() addToMakefile("tcc",packageName,className,incDir,subDirName) if autoTemplate==0: #default diy-cc template file readFile=openFile(lofarDir+"/LCS/Tools/src/templates/templated_header.cc_template","r") writeFile=openFile(className+".cc","w") addTemplates("diy",readFile,writeFile,className,packageName,templateList,autoTemplate,subDirName) writeFile.close() readFile.close() addToMakefile("diy",packageName,className,srcDir,subDirName) def makeMainClass(lofarDir,className,packageName,srcDir,subDirName): readFile=openFile(lofarDir+"/LCS/Tools/src/templates/main.cc_template","r") writeFile=openFile(className+"Main.cc","w") replacePac
pelson/biggus
biggus/tests/unit/init/__init__.py
Python
gpl-3.0
882
0
# (C) British Crown Copyright 2016, Met Office # # This file is part of Biggus. # # Biggus is free software: you can redistribute it and/or modify it under # the terms of
the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Biggus is distributed in the hope that it will be useful, # but WITHOU
T ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Biggus. If not, see <http://www.gnu.org/licenses/>. """Unit tests for `biggus._init`""" from __future__ import absolute_import, division, print_function from six.moves import (filter, input, map, range, zip) # noqa
endlessm/chromium-browser
tools/web_dev_style/presubmit_support.py
Python
bsd-3-clause
1,332
0.010511
# Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # fo
und in the LICENSE
file. import css_checker import html_checker import js_checker import resource_checker def IsResource(f): return f.LocalPath().endswith(('.html', '.css', '.js')) def CheckStyle(input_api, output_api, file_filter=lambda f: True): apis = input_api, output_api wrapped_filter = lambda f: file_filter(f) and IsResource(f) checkers = [ css_checker.CSSChecker(*apis, file_filter=wrapped_filter), html_checker.HtmlChecker(*apis, file_filter=wrapped_filter), js_checker.JSChecker(*apis, file_filter=wrapped_filter), resource_checker.ResourceChecker(*apis, file_filter=wrapped_filter), ] results = [] for checker in checkers: results.extend(checker.RunChecks()) return results def CheckStyleESLint(input_api, output_api): is_js = lambda f: f.LocalPath().endswith('.js') js_files = input_api.AffectedFiles(file_filter=is_js, include_deletes=False) if not js_files: return [] return js_checker.JSChecker(input_api, output_api).RunEsLintChecks(js_files) def DisallowIncludes(input_api, output_api, msg): return resource_checker.ResourceChecker( input_api, output_api, file_filter=IsResource).DisallowIncludes(msg)
MikaelSchultz/dofiloop-sentinel
sentinel/manage.py
Python
mit
257
0
#!/usr/bin/env python import
os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentinel.settings.local") from django.core.mana
gement import execute_from_command_line execute_from_command_line(sys.argv)
takwas/flask_app_template
template_app/forms.py
Python
mit
2,863
0.028292
# Extension imports from flask.ext.wtf import Form, RecaptchaField from flask_wtf.html5 import TelField, IntegerField from wtforms import StringField, PasswordField, BooleanField, SubmitField, TextAreaField, SelectField, DecimalField from wtforms.validators import Email, DataRequired, EqualTo #from flask_wtf.file import FileField from flask_wtf.file import FileField, FileRequired, FileAllowed # for file upload(s) ####TODO: Import app object #from [[app_name]] import app import db_ops ####################################################################################################################### # Basic template for a form class #class [[Form_Class_Name]](Form): # Create form fields ####TODO: ... ## Use fields and validators imported above ## Submitfield does not need a validator ## An example is shown for SelectField which loads the fields options from a database. Sample case uses 'categories' ## An example for FileField is shown below; useful for image uploads #[[form_field_name_variable]] = [[FieldType]]('[[Field label]]', validators=[[[List_of_Field_Validators,...]]]) # SelectField Example # load categories from DB #catgs = [(category.name, category.name) for category in db_ops.ret_all(db_ops.Category)] #category_fld = SelectField('Category', choices=catgs, validators=[DataRequired()]) # FileField example #img_fld = FileField('Upload a Profile Photo', \ # validators=[FileAllowed(app.config['IMG_ALLOWED_EXTENSIONS'], 'Images only!')]) #contact_no_fld = TelField('Telephone: ') # User login form class LoginForm(Form): # openid = StringField('openid', validators=[DataRequired()]) username_fld = StringField('Username or Email: ', validators=[DataRequired()]) password_fld = PasswordField('Password: ', validators=[DataRequired()]) remember_me_chkbx = BooleanField('Remember me', default=False) login_btn = SubmitField('Sign in!') # New user signup form class RegForm(Form): username_fld = StringField('Username: ', validators=[DataRequired()]) # Text-Field: First name email_fld = StringField('Email: ', validators = [DataRequired()]) # Text-Field: Email conf_email_fld = StringField('Confirm Email: ', validators = [DataRequired()]) # Text-Field: Retype/Confirm Email password_fld = PasswordField('Password: ', validators=[DataRequired()]) # Text(Password)-Field: Password conf_password_fld = PasswordField('Confirm Password: ', validators=[DataRequired()]) # Text(Password)-Field: Retype/Confirm Password #recap_fld = RecaptchaField() # Recaptcha code verification #subscrb_chkbx = BooleanField('Subcscribe for our newsletters!', default=False) # Che
ck-box: Subscribe submit_btn = SubmitField('Sign me up!') #
Button: Submit Form
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-3.2/Lib/distutils/tests/test_archive_util.py
Python
mit
7,249
0.001104
"""Tests for distutils.archive_util.""" __revision__ = "$Id: test_archive_util.py 86596 2010-11-20 19:04:17Z ezio.melotti $" import unittest import os import tarfile from os.path import splitdrive import warnings from distutils.archive_util import (check_archive_formats, make_tarball, make_zipfile, make_archive, ARCHIVE_FORMATS) from distutils.spawn import find_executable, spawn from distutils.tests import support from test.support import check_warnings, run_unittest try: import zipfile ZIP_SUPPORT = True except ImportError: ZIP_SUPPORT = find_executable('zip') class ArchiveUtilTestCase(support.TempdirManager, support.LoggingSilencer, unittest.TestCase): def test_make_tarball(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') os.mkdir(os.path.join(tmpdir, 'sub')) self.write_file([tmpdir, 'sub', 'file3'], 'xxx') tmpdir2 = self.mkdtemp() unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "Source and target should be on same drive") base_name = os.path.join(tmpdir2, 'archive') # working with relative paths to avoid tar warnings old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(splitdrive(base_name)[1], '.') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(splitdrive(base_name)[1], '.', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) def _tarinfo(self, path): tar = tarfile.open(path) try: names = tar.getnames() names.sort() return tuple(names) finally: tar.close() def _create_files(self): # creating something to tar tmpdir = self.mkdtemp() dist = os.path.join(tmpdir, 'dist') os.mkdir(dist) self.write_file([dist, 'file1'], 'xxx') self.write_file([dist, 'file2'], 'xxx') os.mkdir(os.path.join(dist, 'sub')) self.write_file([dist, 'sub', 'file3'], 'xxx') os.mkdir(os.path.join(dist, 'sub2')) tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') return tmpdir, tmpdir2, base_name @unittest.skipUnless(find_executable('tar') and find_executable('gzip'), 'Need the tar command to run') def tes
t_tarfi
le_vs_tar(self): tmpdir, tmpdir2, base_name = self._create_files() old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(base_name, 'dist') finally: os.chdir(old_dir) # check if the compressed tarball was created tarball = base_name + '.tar.gz' self.assertTrue(os.path.exists(tarball)) # now create another tarball using `tar` tarball2 = os.path.join(tmpdir, 'archive2.tar.gz') tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist'] gzip_cmd = ['gzip', '-f9', 'archive2.tar'] old_dir = os.getcwd() os.chdir(tmpdir) try: spawn(tar_cmd) spawn(gzip_cmd) finally: os.chdir(old_dir) self.assertTrue(os.path.exists(tarball2)) # let's compare both tarballs self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2)) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(base_name, 'dist', compress=None) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) # now for a dry_run base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: make_tarball(base_name, 'dist', compress=None, dry_run=True) finally: os.chdir(old_dir) tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) @unittest.skipUnless(find_executable('compress'), 'The compress program is required') def test_compress_deprecated(self): tmpdir, tmpdir2, base_name = self._create_files() # using compress and testing the PendingDeprecationWarning old_dir = os.getcwd() os.chdir(tmpdir) try: with check_warnings() as w: warnings.simplefilter("always") make_tarball(base_name, 'dist', compress='compress') finally: os.chdir(old_dir) tarball = base_name + '.tar.Z' self.assertTrue(os.path.exists(tarball)) self.assertEqual(len(w.warnings), 1) # same test with dry_run os.remove(tarball) old_dir = os.getcwd() os.chdir(tmpdir) try: with check_warnings() as w: warnings.simplefilter("always") make_tarball(base_name, 'dist', compress='compress', dry_run=True) finally: os.chdir(old_dir) self.assertTrue(not os.path.exists(tarball)) self.assertEqual(len(w.warnings), 1) @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') def test_make_zipfile(self): # creating something to tar tmpdir = self.mkdtemp() self.write_file([tmpdir, 'file1'], 'xxx') self.write_file([tmpdir, 'file2'], 'xxx') tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') make_zipfile(base_name, tmpdir) # check if the compressed tarball was created tarball = base_name + '.zip' def test_check_archive_formats(self): self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), 'xxx') self.assertEqual(check_archive_formats(['gztar', 'zip']), None) def test_make_archive(self): tmpdir = self.mkdtemp() base_name = os.path.join(tmpdir, 'archive') self.assertRaises(ValueError, make_archive, base_name, 'xxx') def test_make_archive_cwd(self): current_dir = os.getcwd() def _breaks(*args, **kw): raise RuntimeError() ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') try: try: make_archive('xxx', 'xxx', root_dir=self.mkdtemp()) except: pass self.assertEqual(os.getcwd(), current_dir) finally: del ARCHIVE_FORMATS['xxx'] def test_suite(): return unittest.makeSuite(ArchiveUtilTestCase) if __name__ == "__main__": run_unittest(test_suite())
ryankanno/py-utilities
tests/fs/test_path_utilities.py
Python
mit
1,597
0
#!/usr/bin/env python # -*- coding: utf-8 -*- from nose.tools imp
ort ok_ import os from py_utilities.fs.path_utilities import expanded_abspath from py_utilities.fs.path_utilities import filename from py_utilities.fs.path_utilities import get_first_dir_path from py_utilities.fs.path_utilities import get_first_file_path import tempfile import unittest class TestPath(unittest.TestCase): def test_expanded_abspath(self): h
ome = os.environ["HOME"] ok_(expanded_abspath("~") == home) ok_(expanded_abspath("~/foo") == os.path.join(home, 'foo')) ok_(expanded_abspath("/foo") == "/foo") ok_(expanded_abspath("/foo/bar") == "/foo/bar") def test_filename(self): paths = ['/foo/bar/', '/foo/bar', 'foo/bar/', 'foo/bar', '\\foo\\bar\\', '\\foo\\bar', 'foo\\bar\\', 'foo\\bar'] for path in paths: ok_(filename(path) == 'bar') def test_get_first_dir_path(self): dir = tempfile.mkdtemp() home = os.environ["HOME"] fake = '/foo/bar/x/y/z/a' ok_(dir == get_first_dir_path([dir])) ok_(dir == get_first_dir_path([dir, home])) ok_(home == get_first_dir_path([home, dir])) ok_(home == get_first_dir_path([fake, home, dir])) ok_(dir == get_first_dir_path([fake, dir, home])) def test_get_first_file_path(self): f = tempfile.mkstemp()[1] fake = '/foo/bar/x/y/z/a' ok_(f == get_first_file_path([f])) ok_(f == get_first_file_path([f, fake])) ok_(f == get_first_file_path([fake, f])) # vim: filetype=python
dmazzer/nors
remote/GrovePi/Software/Python/GrovePi_Hardware_Test.py
Python
mit
2,308
0.011698
#!/usr/bin/env python # # GrovePi Hardware Test # Connect Buzzer to Port D8 # Connect Button to Analog Port A0 # # The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.grovepi.com # # Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi # ''' ## License The MIT License (MIT) GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi. Copyright (C) 2015 Dexter Industries Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUT
HORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILIT
Y, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' import time import grovepi # Connect the Grove Button to Analog Port 0. button = 14 # This is the A0 pin. buzzer = 8 # This is the D8 pin. grovepi.pinMode(button,"INPUT") grovepi.pinMode(buzzer,"OUTPUT") print "GrovePi Basic Hardware Test." print "Setup: Connect the button sensor to port A0. Connect a Grove Buzzer to port D8." print "Press the button and the buzzer will buzz!" while True: try: butt_val = grovepi.digitalRead(button) # Each time we go through the loop, we read A0. print (butt_val) # Print the value of A0. if butt_val == 1: grovepi.digitalWrite(buzzer,1) print ('start') time.sleep(1) else: grovepi.digitalWrite(buzzer,0) time.sleep(.5) except IOError: print ("Error")
hellhovnd/dentexchange
dentexchange/apps/libs/tests/haystack/test_get_indexes.py
Python
bsd-3-clause
1,159
0.001726
# -*- coding:utf-8 -*- import unittest import mock from ...haystack.utils import get_indexes class GetIndexesTestCase(unittest.TestCase): @mock.patch('libs.haystack.utils.connections') @mock.patch('libs.haystack.utils.connection_router.for_write') def test_get_indexes_should_yield_get_index( self, for_write, connections): # setup model_class = mock.Mock() using = mock.Mock()
for_write.return_value = [using] connection = mock.Mock() connections.__getitem__ = mock.MagicMock(return_value=connection) # action
returned_value = list(get_indexes(model_class)) # assert self.assertDictEqual(dict(models=[model_class]), for_write.call_args[1]) self.assertTupleEqual((using,), connections.__getitem__.call_args[0]) self.assertEqual(1, connection.get_unified_index.call_count) self.assertTupleEqual((model_class,), connection.get_unified_index.return_value.get_index.call_args[0]) self.assertListEqual( [connection.get_unified_index.return_value.get_index.return_value], returned_value)
j2ali/FlightScraper
Scraper.py
Python
bsd-3-clause
1,950
0.007179
from bs4 import BeautifulSoup import helper from datetime import datetime import click import time import calendar #Example values #START_DATE = datetime(2014, 05, 15) #END_DATE = datetime(2015, 05, 15) #DAY_DELTA = 7 #TIMEOUT_SECONDS = 30 #Example Command #python Scraper.py 2014/05/25 2015/05/15 4 0 YYZ POS @click.command() @click.argument('start_date') @click.argument('end_date
') @click.argument('day_delta') @click.argument('time_out') @click.argument('origin_airport') @click.argument('destination_airport') def find_flights(start_date, end_date, day_delta, time_out, origin_airport, destination_airport): start_date = datetime.strptime(start_date
, "%Y/%m/%d") end_date = datetime.strptime(end_date, "%Y/%m/%d") day_delta = int(day_delta) time_out = int(time_out) flight_dates = helper.generate_dates(start_date, end_date, day_delta) #There is a new output file for each run. #Use something like time.ctime(int("1284101485")) to get back date filename = calendar.timegm(datetime.utcnow().utctimetuple()) file = open('DataOut/output_'+str(filename)+'.txt', "a") for flight_date in flight_dates: (depart_date, return_date) = flight_date response = helper.hit_the_site(depart_date, return_date, origin_airport, destination_airport) soup = BeautifulSoup(response) data = helper.parse_data(soup) if len(data) == 0: file.writelines('No data received might have encounter captcha') file.close() break for a in data: print a file.writelines(a.encode('utf-8')) # Trying to avoid captcha here but looks like timeout is over 30 seconds # I can go 10 hit then its turned on time.sleep(time_out) file.close() if __name__ == '__main__': find_flights()
agronick/WebServiceExample
smarterer/smarterer/wsgi.py
Python
gpl-2.0
395
0
""" WSGI config for smarterer project.
It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO
_SETTINGS_MODULE", "smarterer.settings") application = get_wsgi_application()
steventimberman/masterDebater
debate/migrations/0010_auto_20170808_2242.py
Python
mit
466
0
# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-08-09 03:42 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('debate', '0009_auto_20170807_2329'), ] operat
ions = [ migrations.AlterField( model_name='debatetopic', name='timestamp', field=models.DateTimeField(auto_now_add=True), ),
]
openprocurement/openprocurement.edge
openprocurement/edge/tests/traversal.py
Python
apache-2.0
638
0.001567
# -*- coding: utf-8 -*- import unittest from mock import MagicMock, patch from mu
nch import munchify from openprocurement.edge.traversal import Root class TestTraversal(unittest.TestCase): def test_Root(self): request = munchify({'registry': {'db': 'database'}}) root = Root(request) self.assertEqual(root.request, request) self.assertEqual(root.db, request.registry.db) def test_get_item(self): pass def suite():
suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(TestTraversal)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
edx/xblock-lti-consumer
lti_consumer/lti_1p3/tests/extensions/rest_framework/test_authentication.py
Python
agpl-3.0
4,164
0.00024
""" Unit tests for LTI 1.3 consumer implementation """ from unittest.mock import MagicMock, patch import ddt from Cryptodome.PublicKey import RSA from django.test.testcases import TestCase from rest_framework import exceptions from lti_consumer.lti_1p3.consumer import LtiConsumer1p3 from lti_consumer.lti_1p3.extensions.rest_framework.authentication import Lti1p3ApiAuthentication from lti_consumer.models import LtiConfiguration # Variables required for testing and verification ISS = "http://test-platform.example/" OIDC_URL = "http://test-platform/oidc" LAUNCH_URL = "http://test-platform/launch" CLIENT_ID = "1" DEPLOYMENT_ID = "1" NONCE = "1234" STATE = "ABCD" # Consider storing a fixed key RSA_KEY_ID = "1" RSA_KEY = RSA.generate(2048).export_key('PEM') @ddt.ddt class TestLtiAuthentication(TestCase): """ Unit tests for Lti1p3ApiAuthentication class """ def setUp(self): super().setUp() # Set up consumer self.lti_consumer = LtiConsumer1p3( iss=ISS, lti_oidc_url=OIDC_URL, lti_launch_url=LAUNCH_URL, client_id=CLIENT_ID, deployment_id=DEPLOYMENT_ID, rsa_key=RSA_KEY, rsa_key_id=RSA_KEY_ID, # Use the same key for testing purposes tool_key=RSA_KEY, ) # Create LTI Configuration self.lti_configuration = LtiConfiguration.objects.create( version=LtiConfiguration.LTI_1P3, ) # Patch call that retrieves config from modulestore # We're not testing the model here self._lti_block_patch = patch( 'lti_consumer.models.LtiConfiguration.get_lti_consumer', return_value=self.lti_consumer, ) self.addCleanup(self._lti_block_patch.stop) self._lti_block_patch.start() def _make_request(self): """ Returns a
Mock Request that can be used to test the LTI auth. """ mock_request = MagicMock() # G
enerate a valid access token token = self.lti_consumer.key_handler.encode_and_sign( { "sub": self.lti_consumer.client_id, "iss": self.lti_consumer.iss, "scopes": "", }, expiration=3600 ) mock_request.headers = { "Authorization": f"Bearer {token}", } # Set the lti config id in the "url" mock_request.parser_context = {"kwargs": { "lti_config_id": self.lti_configuration.id, }} return mock_request @ddt.data( None, "", "Bearer", "Bearer invalid token", # Valid token format, but cannot be decoded "Bearer invalid", ) def test_invalid_auth_token(self, token): """ Test invalid and auth token in auth mechanism. """ mock_request = self._make_request() # Either set invalid token or clear headers if token is not None: mock_request.headers = { "Authorization": token, } else: mock_request.headers = {} with self.assertRaises(exceptions.AuthenticationFailed): auth = Lti1p3ApiAuthentication() auth.authenticate(mock_request) def test_no_lti_config(self): """ Test that the login is invalid if LTI config doesn't exist. """ mock_request = self._make_request() mock_request.parser_context = {"kwargs": { "lti_config_id": 0, # Django id field is never zero }} with self.assertRaises(exceptions.AuthenticationFailed): auth = Lti1p3ApiAuthentication() auth.authenticate(mock_request) def test_lti_login_succeeds(self): """ Test if login successful and that the LTI Consumer and token are attached to request. """ mock_request = self._make_request() # Run auth auth = Lti1p3ApiAuthentication() auth.authenticate(mock_request) # Check request self.assertEqual(mock_request.lti_consumer, self.lti_consumer)
zappala/bene
examples/broadcast.py
Python
gpl-2.0
2,081
0.001922
from __future__ import print_function import sys sys.path.append('..') from src.sim import Sim from src.packet import Packet from networks.network import Network class BroadcastApp(object): def __init__(self, node): self.node = node def receive_packet(self, packet): print(Sim.scheduler.current_time(), self.node.hostname, packet.ident) def main(): # parameters Sim.scheduler.reset() # setup network net = Network('../networks/five-
nodes.txt') # get nodes n1 = net.get_node('n1') n2 = net.get_node('n2') n3 = net.get_node('n3') n4 = net.get_node('n4') n5 = net.get_node('n5') # setup broadcast application b1 = BroadcastApp(n1) n1.add_protocol(protocol="broadcast",
handler=b1) b2 = BroadcastApp(n2) n2.add_protocol(protocol="broadcast", handler=b2) b3 = BroadcastApp(n3) n3.add_protocol(protocol="broadcast", handler=b3) b4 = BroadcastApp(n4) n4.add_protocol(protocol="broadcast", handler=b4) b5 = BroadcastApp(n5) n5.add_protocol(protocol="broadcast", handler=b5) # send a broadcast packet from 1 with TTL 2, so everyone should get it p = Packet( source_address=n1.get_address('n2'), destination_address=0, ident=1, ttl=2, protocol='broadcast', length=100) Sim.scheduler.add(delay=0, event=p, handler=n1.send_packet) # send a broadcast packet from 1 with TTL 1, so just nodes 2 and 3 # should get it p = Packet( source_address=n1.get_address('n2'), destination_address=0, ident=2, ttl=1, protocol='broadcast', length=100) Sim.scheduler.add(delay=1, event=p, handler=n1.send_packet) # send a broadcast packet from 3 with TTL 1, so just nodes 1, 4, and 5 # should get it p = Packet( source_address=n3.get_address('n1'), destination_address=0, ident=3, ttl=1, protocol='broadcast', length=100) Sim.scheduler.add(delay=2, event=p, handler=n3.send_packet) # run the simulation Sim.scheduler.run() if __name__ == '__main__': main()
lichinka/pystella
coriolis_alla_stella.py
Python
bsd-2-clause
3,432
0.015443
import random # -------------------------------------------------------------- # DEFINITION of the Coriolis stencil object coriolis = Stencil ( ) # # add the U stage to the Coriolis stencil # uSlowTensStage = coriolis.addStage ( ) @uSlowTensStage.attachD
o def uStageDo (utens, v, fc): """ The 'Do' function of the U stage, with the Coriolis force directly applied: utens a STELLA data field, representing ???; v a STELLA data fiedl, representing ???; fc a scalar representing the force.- """ res = fc * average (v, v.iplus1) res += fc * average (v.jminus1, v.jminus1.iplus1) utens += res / 2.0 # # add the V stage to the Coriolis ste
ncil # vSlowTensStage = coriolis.addStage ( ) @vSlowTensStage.attachDo (IJKRealField, IJKRealField, Scalar) def vStageDo (vtens, u, fc): """ The 'Do' function of the V stage, with the Coriolis force defined as a private function: vtens a STELLA data field, representing ???; u a STELLA data field, representing ???; fc a scalar, representing the force.- """ def coriolisForce (frc, vel): """ Calculates the Coriolis force: fc constant Coriolis force factor; vel velocity used to calculte the force. """ return frc * vel res = coriolisForce (fc, average (u.jplus1, u)) res += coriolisForce (fc, average (u.iminus1, u.iminus1.jplus1)) vtens += res / 2.0 # # the output of the 'uSlowTensStage' is used as input of the 'vSlowTensStage' # #coriolis.addKLoop (sweep='kIncrement', (uSlowTensStage, # vSlowTensStage)) # # these loops do not share any data whithin the stencil execution # coriolis.addKLoop (sweep='kIncrement', (uSlowTensStage)) coriolis.addKLoop (sweep='kIncrement', (vSlowTensStage)) # -------------------------------------------------------------- # USAGE of the Coriolis stencil object defined above # # the calculation domain on which the stencil will be applied # calculationDomain = IJKSize (8, 8, 2) # # no boundaries in the K dimension # kBoundary = KBoundary (0, 0) # # data-field definitions # u = IJKRealField (calculationDomain, kBoundary) v = IJKRealField (calculationDomain, kBoundary) utens = IJKRealField (calculationDomain, kBoundary) vtens = IJKRealField (calculationDomain, kBoundary) # # put some values in the data fields # map (random.random ( ), u) map (random.random ( ), v) map (random.random ( ) * 10.0, utens) map (random.random ( ) * 10.0, vtens) # # print out the initial state # print ("Initial state") print (utens) print (vtens) # # apply the stencil in 3 time steps # for step in xrange (3): # # apply the stencil with the field and scalar variables: # the names (i.e., dictionary keys) should match those used # in the `Do' functions at stage level, otherwise a # StencilCompilationException would occur at runtime # coriolis.apply (fields={'u': u, 'v': v, 'utens': utens, 'vtens': vtens}, scalars={'fc': 3.5}) # # print the situation after each step # print ("State after time step", step) print (utens) print (vtens)
jwittenbach/thunder
thunder/__init__.py
Python
apache-2.0
347
0.008646
from . import series fr
om . import images def _setup(): import logging logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter('[%(name)s] %(levelname)s %(message)s') ch = logging.StreamHandler() ch.setFormatter(formatter) logger.addHandler(ch) _setup() __version__
= '1.1.1'
Lily-Ayta/aosc-os-abbs
extra-games/pingus/autobuild/overrides/usr/share/pingus/images/fonts/buildset.py
Python
gpl-2.0
155
0.006452
#!/usr/bin/env pytho
n imp
ort sys str = sys.stdin.read().decode('utf-8') characters = set(str) for c in characters: print c.encode('utf-8') # EOF #
CodyKochmann/battle_tested
battle_tested/beta/fuzz_planner.py
Python
mit
68
0.014706
from battle_te
sted.beta.input_type_combos import input_typ
e_combos
Hybrid-Cloud/cinder
cinder/tests/unit/api/contrib/test_quotas_classes.py
Python
apache-2.0
6,020
0
# Copyright 2013 Huawei Technologies Co., Ltd # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Tests for cinder.api.contrib.quota_classes.py """ import mock import webob.exc from cinder.api.contrib import quota_classes from cinder import context from cinder import quota from cinder import test from cinder.tests.unit import fake_constants as fake from cinder.volume import volume_types QUOTAS = quota.QUOTAS def make_body(root=True, gigabytes=1000, snapshots=10, volumes=10, backups=10, backup_gigabytes=1000, per_volume_gigabytes=-1, volume_types_faked=None, tenant_id=fake.PROJECT_ID): resources = {'gigabytes': gigabytes, 'snapshots': snapshots, 'volumes': volumes, 'backups': backups, 'per_volume_gigabytes': per_volume_gigabytes, 'backup_gigabytes': backup_gigabytes} if not volume_types_faked: volume_types_faked = {'fake_type': None} for volume_type in volume_types_faked: resources['gigabytes_' + volume_type] = -1 resources['snapshots_' + volume_type] = -1 resources['volumes_' + volume_type] = -1 if tenant_id: resources['id'] = tenant_id if root: result = {'quota_class_set': resources} else: result = resources return result def make_response_body(root=True, ctxt=None, quota_class='foo', request_body=None, tenant_id=fake.PROJECT_ID): resources = {} if not ctxt: ctxt = context.get_admin_context() resources.update(QUOTAS.get_class_quotas(ctxt, quota_class)) if not request_body and not request_body['quota_class_set']: resources.update(request_body['quota_class_set']) if tenant_id: resources['id'] = tenant_id if root: result = {'quota_class_set': resources} else: result = resources return result class QuotaClassSetsControllerTest(test.TestCase): def setUp(self): super(QuotaClassSetsControllerTest, self).setUp() self.controller = quota_classes.QuotaClassSetsController() self.ctxt = context.get_admin_context() self.req = mock.Mock() self.req.environ = {'cinder.context': self.ctxt} self.req.environ['cinder.context'].is_admin = True def test_show(self): volume_types.create(self.ctxt, 'fake_type') result = self.controller.show(self.req, fake.PROJECT_ID) self.assertDictMatch(make_body(), result) def test_show_not_authorized(self): self.req.environ['cinder.context'].is_admin = False self.req.environ['cinder.context'].user_id = fake.USER_ID self.req.environ['cinder.context'].project_id = fake.PROJECT_ID self.assertRaises(webob.exc.HTTPForbidden, self.controller.show, self.req, fake.PROJECT_ID) def test_update(self): volume_types.create(self.ctxt, 'fake_type') body = make_body(gigabytes=2000, snapshots=15, volumes=5, tenant_id=None) result = self.controller.update(self.req, fake.PROJECT_ID, body) self.assertDictMatch(body, result) @mock.patch('cinder.api.openstack.wsgi.Controller.validate_string_length') @mock.patch('cinder.utils.validate_integer') def test_update_limit(self, mock_validate_integer, mock_validate): mock_validate_integer.return_value = 5 volume_types.create(self.ctxt, 'fake_type') body = make_body(volumes=5) result = self.controller.update(self.req, fake.PROJECT_ID, body) self.assertEqual(5, result['quota_class_set']['volumes']) self.assertTrue(mock_validate.called) self.assertTrue(mock_validate_integer.called) def test_update_wrong_key(self): volume_types.create(self.ctxt, 'fake_type') body = {'quota_class_set': {'bad': 'bad'}} result = self.controller.update(self.req, fake.PROJECT_ID, body) self.assertDictMatch(make_body(tenant_id=None), result) def test_update_invalid_key_value(self): body = {'quota_class_set': {'gigabytes': "should_be_int"}} self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update, self.req, fake.PROJECT_ID, body) def test_update_bad_quota_limit(self): body = {'quota_class_set': {'gigabytes': -1000}} self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update, self.req, fake.PROJECT_ID, body) def test_update_no_admin(self): self.req.environ['cinder.context'].is_admin = False self.assertRaises(webob.exc.HTTPForbidden, self.controller.update, self.req, fake.PROJECT_ID, make_b
ody(tenant_id=None)) def test_update_with_more_volume_types(self): volume_types.create(self.ctxt, 'fake_type_1') volume_types.create(self.ctxt, 'fake_type_2') body = {'quota_class_set': {'gigabytes_fake_type_1': 1111, 'volumes_fake_type_2': 2222}} result = self.con
troller.update(self.req, fake.PROJECT_ID, body) self.assertDictMatch(make_response_body(ctxt=self.ctxt, quota_class=fake.PROJECT_ID, request_body=body, tenant_id=None), result)
invitecomm/asterisk-ivr
pigeonhole/wardial.py
Python
gpl-3.0
3,694
0.004061
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim: set et sw=4 fenc=utf-8: # # Copyright 2016 INVITE Communications Co., Ltd. All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """AGI script that renders speech to text using Google Cloud Speech API using the REST API.""" # [START import_libraries] from __future__ import print_function from asterisk.agi import * import re im
port ConfigParser from datetime import date, datetime, timedelta import mysql.connector as mariadb def question(file, valid_digits): regexp = re.compile(r'[' + valid_digits + ']') res = agi.get_data(file, 20000, 1) if regexp.search(res) is not None: return res res = agi.get_data(file, 20000, 1) if regexp.search(res) is
not None: return res if not res: agi.hangup() settings = ConfigParser.RawConfigParser() settings.read('/etc/asterisk/res_config_mysql.conf') config = { 'user': settings.get('general', 'dbuser'), 'password': settings.get('general', 'dbpass'), 'host': settings.get('general', 'dbhost'), 'database': settings.get('general', 'dbname'), 'raise_on_warnings': True, } def data_insert(query): agi.verbose(query) try: mariadb_connection = mariadb.connect(**config) cursor = mariadb_connection.cursor() cursor.execute(query) record = cursor.lastrowid mariadb_connection.commit() cursor.close() mariadb_connection.close() except mariadb.Error as error: agi.verbose("Database Error: {0}".format(error)) return record db_insert = ("INSERT INTO `warlist` (`clid`, `%s`) VALUES ('%s', '%s')") db_update = ("UPDATE `warlist` SET `%s` = '%s' WHERE `id` = '%s'") agi = AGI() agi.answer() clid = agi.env['agi_accountcode'] # Asterisk Dial-plan Application 'DumpChan()' #Variables: #WOMBAT_HOPPER_ID=2145573608 #warlist=38418 #NUM= #SIPCALLID=1583cd9c69daeca70f5a91477e22f3b7@172.17.70.223:5060 wombat = agi.get_variable('WOMBAT_HOPPER_ID') warlist = agi.get_variable('warlist') agi.verbose("Database Record: {0}".format(warlist)) amdstatus = agi.env['agi_arg_2'] amdreason = agi.env['agi_arg_3'] if amdstatus == "MACHINE": agi.appexec('UserEvent', 'CALLSTATUS, UniqueID:%s,V:AMD' % wombat) data_insert(db_update % ('note', '%s:%s' % (amdstatus, amdreason), warlist)) agi.hangup() data_insert(db_update % ('note', '%s:%s' % (amdstatus, amdreason), warlist)) agi.stream_file('wardial/greeting') q1 = question('wardial/question1', '12') data_insert(db_update % ('q1', q1, warlist)) q2 = question('wardial/question2', '123') data_insert(db_update % ('q2', q2, warlist)) q3 = question('wardial/question3', '12345') data_insert(db_update % ('q3', q3, warlist)) q4 = question('wardial/question4', '123') data_insert(db_update % ('q4', q4, warlist)) q5 = question('wardial/question5', '123') data_insert(db_update % ('q5', q5, warlist)) agi.stream_file('wardial/goodby') agi.hangup() # calltime = agi.get_variable('ANSWEREDTIME') # data_insert(db_update % ('reply', calltime, warlist))
Dronecode/MAVProxy
MAVProxy/modules/lib/mp_image.py
Python
gpl-3.0
20,342
0.00295
#!/usr/bin/env python from __future__ import print_function ''' display a image in a subprocess Andrew Tridgell June 2012 ''' import time from MAVProxy.modules.lib.wx_loader import wx import cv2 import numpy as np import warnings from MAVProxy.modules.lib import mp_util from MAVProxy.modules.lib import mp_widgets from MAVProxy.modules.lib import win_layout from MAVProxy.modules.lib import multiproc from MAVProxy.modules.lib.mp_menu import * class MPImageData: '''image data to display''' def __init__(self, img): if not hasattr(img, 'shape'): img = np.asarray(img[:,:]) self.width = img.shape[1] self.height = img.shape[0] self.data = img.tostring() class MPImageTitle: '''window title to use''' def __init__(self, title): self.title = title class MPImageBrightness: '''image brightness to use''' def __init__(self, brightness): self.brightness = brightness class MPImageFitToWindow: '''fit image to window''' def __init__(self): pass class MPImageFullSize: '''show full image resolution''' def __init__(self): pass class MPImageMenu: '''window menu to add''' def __init__(self, menu): self.menu = menu class MPImagePopupMenu: '''popup menu to add''' def __init__(self, menu): self.menu = menu class MPImageNewSize: '''reported to parent when window size changes''' def __init__(self, size): self.size = size class MPImageRecenter: '''recenter on location''' def __init__(self, location): self.location = location class MPImage(): ''' a generic image viewer widget for use in MP tools ''' def __init__(self, title='MPImage', width=512, height=512, can_zoom = False, can_drag = False, mouse_events = False, key_events = False, auto_size = False, report_size_changes = False, daemon = False): self.title = title self.width = width self.height = height self.can_zoom = can_zoom self.
can_drag = can_drag sel
f.mouse_events = mouse_events self.key_events = key_events self.auto_size = auto_size self.report_size_changes = report_size_changes self.menu = None self.popup_menu = None self.in_queue = multiproc.Queue() self.out_queue = multiproc.Queue() self.default_menu = MPMenuSubMenu('View', items=[MPMenuItem('Fit Window', 'Fit Window', 'fitWindow'), MPMenuItem('Full Zoom', 'Full Zoom', 'fullSize')]) self.child = multiproc.Process(target=self.child_task) self.child.daemon = daemon self.child.start() self.set_popup_menu(self.default_menu) def child_task(self): '''child process - this holds all the GUI elements''' mp_util.child_close_fds() from MAVProxy.modules.lib.wx_loader import wx state = self self.app = wx.App(False) self.app.frame = MPImageFrame(state=self) self.app.frame.Show() self.app.MainLoop() def is_alive(self): '''check if child is still going''' return self.child.is_alive() def set_image(self, img, bgr=False): '''set the currently displayed image''' if not self.is_alive(): return if not hasattr(img, 'shape'): img = np.asarray(img[:,:]) if bgr: img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) self.in_queue.put(MPImageData(img)) def set_title(self, title): '''set the frame title''' self.in_queue.put(MPImageTitle(title)) def set_brightness(self, brightness): '''set the image brightness''' self.in_queue.put(MPImageBrightness(brightness)) def fit_to_window(self): '''fit the image to the window''' self.in_queue.put(MPImageFitToWindow()) def full_size(self): '''show the full image resolution''' self.in_queue.put(MPImageFullSize()) def set_menu(self, menu): '''set a MPTopMenu on the frame''' self.menu = menu self.in_queue.put(MPImageMenu(menu)) def set_popup_menu(self, menu): '''set a popup menu on the frame''' self.popup_menu = menu self.in_queue.put(MPImagePopupMenu(menu)) def get_menu(self): '''get the current frame menu''' return self.menu def get_popup_menu(self): '''get the current popup menu''' return self.popup_menu def poll(self): '''check for events, returning one event''' if self.out_queue.empty(): return None evt = self.out_queue.get() while isinstance(evt, win_layout.WinLayout): win_layout.set_layout(evt, self.set_layout) if self.out_queue.empty(): return None evt = self.out_queue.get() return evt def set_layout(self, layout): '''set window layout''' self.in_queue.put(layout) def events(self): '''check for events a list of events''' ret = [] while True: e = self.poll() if e is None: break ret.append(e) return ret def terminate(self): '''terminate child process''' self.child.terminate() self.child.join() def center(self, location): self.in_queue.put(MPImageRecenter(location)) class MPImageFrame(wx.Frame): """ The main frame of the viewer """ def __init__(self, state): wx.Frame.__init__(self, None, wx.ID_ANY, state.title) self.state = state state.frame = self self.last_layout_send = time.time() self.sizer = wx.BoxSizer(wx.VERTICAL) state.panel = MPImagePanel(self, state) self.sizer.Add(state.panel, 1, wx.EXPAND) self.SetSizer(self.sizer) self.Bind(wx.EVT_IDLE, self.on_idle) self.Bind(wx.EVT_SIZE, state.panel.on_size) def on_idle(self, event): '''prevent the main loop spinning too fast''' state = self.state now = time.time() if now - self.last_layout_send > 1: self.last_layout_send = now state.out_queue.put(win_layout.get_wx_window_layout(self)) time.sleep(0.1) class MPImagePanel(wx.Panel): """ The image panel """ def __init__(self, parent, state): wx.Panel.__init__(self, parent) self.frame = parent self.state = state self.img = None self.redraw_timer = wx.Timer(self) self.Bind(wx.EVT_TIMER, self.on_redraw_timer, self.redraw_timer) self.Bind(wx.EVT_SET_FOCUS, self.on_focus) self.redraw_timer.Start(100) self.mouse_down = None self.drag_step = 10 self.zoom = 1.0 self.menu = None self.popup_menu = None self.wx_popup_menu = None self.popup_pos = None self.last_size = None self.done_PIL_warning = False state.brightness = 1.0 # dragpos is the top left position in image coordinates self.dragpos = wx.Point(0,0) self.need_redraw = True self.mainSizer = wx.BoxSizer(wx.VERTICAL) self.SetSizer(self.mainSizer) # panel for the main image with warnings.catch_warnings(): warnings.simplefilter('ignore') self.imagePanel = mp_widgets.ImagePanel(self, wx.EmptyImage(state.width,state.height)) self.mainSizer.Add(self.imagePanel, flag=wx.TOP|wx.LEFT|wx.GROW, border=0) if state.mouse_events: self.imagePanel.Bind(wx.EVT_MOUSE_EVENTS, self.on_event) else: self.imagePanel.Bind(wx.EVT_MOUSE_EVENTS, self.on_mouse_event) if state.key_events: self.imagePanel.Bind(wx.EVT_KEY_DOWN, self.on_event) else: self.imagePanel.Bind(wx.EVT_KEY_DOWN, self.on_key_event) self.imagePanel.Bind(
dafrito/trac-mirror
tracopt/versioncontrol/svn/tests/__init__.py
Python
bsd-3-clause
731
0.002736
# -*- coding: utf-8 -*- # # Copyright (C) 2012-2013 Edgewall Software # All rights reserved. # # This software is licensed as d
escribed in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://trac.edgewall.org/wiki/TracLicense. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, availab
le at http://trac.edgewall.org/log/. import unittest from tracopt.versioncontrol.svn.tests import svn_fs def suite(): suite = unittest.TestSuite() suite.addTest(svn_fs.suite()) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
googleapis/python-recaptcha-enterprise
samples/snippets/migrate_site_key.py
Python
apache-2.0
1,870
0.001604
# Copyright 2021 Google Inc. All Rights Reserved. # # Licensed under the Apache
License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the Li
cense is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START recaptcha_enterprise_migrate_site_key] from google.cloud import recaptchaenterprise_v1 from list_site_keys import list_site_keys def migrate_site_key(project_id: str, recaptcha_site_key: str) -> None: """ Migrate a key from reCAPTCHA (non-Enterprise) to reCAPTCHA Enterprise. If you created the key using Admin console: https://www.google.com/recaptcha/admin/site, then use this API to migrate to reCAPTCHA Enterprise. For more info, see: https://cloud.google.com/recaptcha-enterprise/docs/migrate-recaptcha Args: project_id: Google Cloud Project ID. recaptcha_site_key: Specify the site key to migrate. """ client = recaptchaenterprise_v1.RecaptchaEnterpriseServiceClient() # Specify the key name to migrate. name = f"projects/{project_id}/keys/{recaptcha_site_key}" request = recaptchaenterprise_v1.MigrateKeyRequest() request.name = name response = client.migrate_key(request) # To verify if the site key has been migrated, use 'list_site_keys' to check if the # key is present. for key in list_site_keys(project_id): if key.name == response.name: print(f"Key migrated successfully: {recaptcha_site_key}") # [END recaptcha_enterprise_migrate_site_key]
Canpio/Paddle
python/paddle/fluid/tests/unittests/test_elementwise_pow_op.py
Python
apache-2.0
1,482
0
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import numpy as np from op_test import OpTest class TestElementwisePowOp(OpTest): def setUp(self): self.op_type = "elementwise_pow" self.inputs = { 'X': np.random.uniform(0.1, 1, [13, 17]).astype("float32"), 'Y': np.random.uniform(0.1, 1, [13, 17]).astype("float32") } self.outputs = {'Out': np.power(self.inputs['X'], self.inputs['Y'])} def test_check_output(self): self.check_output() class TestElementwisePowOp_scalar(TestElementwisePowOp): def setUp(self): self.op_
type = "elementwise_pow" self.inputs = { 'X': np.random.rand(2, 3, 4).astype('float32'), 'Y': np.ra
ndom.rand(1).astype('float32') } self.outputs = {'Out': np.power(self.inputs['X'], self.inputs['Y'])} if __name__ == '__main__': unittest.main()
SmartcitySantiagoChile/onlineGPS
drawroute/views.py
Python
mit
422
0.018957
from django.http impor
t JsonResponse from django.shortcuts import render from django.views.generic import View # model #from drawroute.models import * # Create your views here. class MapHandler(View): '''This class manages the map where lines are drawn ''' def __init__(self): self.context={} def get(self, request): template = "drawroute.html" return render(request, template, self.context)
xrloong/Xie
src/xie/graphics/stroke_path.py
Python
apache-2.0
24,937
0.0473
from .shape import Shape from .shape import Pane from .shape import mergeBoundary from .shape import offsetBoundary class StrokePath(Shape): def __init__(self, segments): self.segments = segments boundary = self.computeBoundary() self.pane = Pane(*boundary) def __eq__(self, other): return (isinstance(other, self.__class__) and self.getSegments() == other.getSegments()) def __str__(self): return "-".join(map(lambda s: str(s), self.getSegments())) def __repr__(self): return "StrokePath({0})".format(",".join(map(lambda s: str(s), self.getSegments()))) def getSegments(self): return self.segments def getPane(self): return self.pane def draw(self, drawingSystem): segments=self.getSegments() for segment in segments: segment.draw(drawingSystem) def computeBoundary(self): segments=self.getSegments() currentPoint=(0, 0) totalBoundary=(0, 0, 0, 0) for segment in segments: boundary=segment.computeBoundary() newBoundary=offsetBoundary(boundary, currentPoint) totalBoundary=mergeBoundary(totalBoundary, newBoundary) endPoint=segment.getEndPoint() currentPoint=(currentPoint[0]+endPoint[0], currentPoint[1]+endPoint[1], ) return totalBoundary def computeBoundaryWithStartPoint(self, startPoint): strokePathBoundary = self.computeBoundary() return offsetBoundary(strokePathBoundary, startPoint) class StrokePathGenerator: def __init__(self, segmentFactory): self.segmentFactory = segmentFactory def getSegmentFactory(self): return self.segmentFactory def generate(self, parameters): strokeSegments = self.computeStrokeSegments(parameters) return StrokePath(strokeSegments) def parseExpression(self, parameterExpressionList): return [] def computeStrokeSegments(self, paramList): return [] class StrokePathGenerator_點(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==2 assert int(l[1])>0 return [int(l[0]), int(l[1])] def computeStrokeSegments(self, paramList): w=paramList[0] h=paramList[1] return self.getSegmentFactory().generateSegments_點(w, h) class StrokePathGenerator_圈(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==2 assert int(l[0])>0 assert int(l[1])>0 return [int(l[0]), int(l[1])] def computeStrokeSegments(self, paramList): w=paramList[0] h=paramList[1] return self.getSegmentFactory().generateSegments_圈(w, h) class StrokePathGenerator_橫(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==1 assert int(l[0])>0 return [int(l[0])] def computeStrokeSegments(self, paramList): w1=paramList[0] return self.getSegmentFactory().generateSegments_橫(w1) class StrokePathGenerator_橫鉤(StrokePathGenerator): def parseExpression(self, parameterExpressionLis
t): l=parameterExpressionList assert len(l)==3 assert int(l[0]
)>0 assert int(l[1])>0 assert int(l[2])>0 return [int(l[0]), int(l[1]), int(l[2]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] w2=paramList[1] h2=paramList[2] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2)) return segments class StrokePathGenerator_橫折(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==2 assert int(l[0])>0 assert int(l[1])>0 return [int(l[0]), int(l[1]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] h2=paramList[1] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_豎(h2)) return segments class StrokePathGenerator_橫折折(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==3 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 return [int(l[0]), int(l[1]), int(l[2]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] h2=paramList[1] w3=paramList[2] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_豎(h2)) segments.extend(self.getSegmentFactory().generateSegments_橫(w3)) return segments class StrokePathGenerator_橫折提(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==4 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 assert int(l[3])>0 return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] h2=paramList[1] w3=paramList[2] h3=paramList[3] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_豎(h2)) segments.extend(self.getSegmentFactory().generateSegments_提(w3, h3)) return segments class StrokePathGenerator_橫折折撇(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==6 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 assert int(l[3])>0 assert int(l[4])>0 assert int(l[5])>0 return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), int(l[5]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] w2=paramList[1] h2=paramList[2] w3=paramList[3] w4=paramList[4] h4=paramList[5] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2)) segments.extend(self.getSegmentFactory().generateSegments_橫(w3)) segments.extend(self.getSegmentFactory().generateSegments_撇(w4, h4)) return segments class StrokePathGenerator_橫折鉤(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==5 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 assert int(l[3])>0 assert int(l[4])>0 return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int(l[4]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] w2=paramList[1] h2=paramList[2] w3=paramList[3] h3=paramList[4] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_撇鉤之撇(w2, h2)) segments.extend(self.getSegmentFactory().generateSegments_鉤(w3, h3)) return segments class StrokePathGenerator_橫折彎(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==4 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 assert int(l[3])>0 return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] h2=paramList[1] w2=paramList[2] cr=paramList[3] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_豎(h2 - cr)) segments.extend(self.getSegmentFactory().generateSegments_曲(cr)) segments.extend(self.getSegmentFactory().generateSegments_橫(w2 - cr)) return segments class StrokePathGenerator_橫撇(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==3 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 return [int(l[0]), int(l[1]), int(l[2]), ] def computeStrokeSegments(self, paramList): w1=paramList[0] w2=paramList[1] h2=paramList[2] segments=[] segments.extend(self.getSegmentFactory().generateSegments_橫(w1)) segments.extend(self.getSegmentFactory().generateSegments_撇(w2, h2)) return segments class StrokePathGenerator_橫斜彎鉤(StrokePathGenerator): def parseExpression(self, parameterExpressionList): l=parameterExpressionList assert len(l)==6 assert int(l[0])>0 assert int(l[1])>0 assert int(l[2])>0 assert int(l[3])>0 assert int(l[4])>0 assert int(l[5])>0 return [int(l[0]), int(l[1]), int(l[2]), int(l[3]), int
lmazuel/azure-sdk-for-python
azure-servicefabric/azure/servicefabric/models/fabric_error.py
Python
mit
1,580
0.000633
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Chang
es may cause incorrect behavior and will be lost if the code is # regenerated. # ------
-------------------------------------------------------------------- from msrest.serialization import Model from msrest.exceptions import HttpOperationError class FabricError(Model): """The REST API operations for Service Fabric return standard HTTP status codes. This type defines the additional information returned from the Service Fabric API operations that are not successful. . :param error: Error object containing error code and error message. :type error: ~azure.servicefabric.models.FabricErrorError """ _validation = { 'error': {'required': True}, } _attribute_map = { 'error': {'key': 'Error', 'type': 'FabricErrorError'}, } def __init__(self, error): super(FabricError, self).__init__() self.error = error class FabricErrorException(HttpOperationError): """Server responsed with exception of type: 'FabricError'. :param deserialize: A deserializer :param response: Server response to be deserialized. """ def __init__(self, deserialize, response, *args): super(FabricErrorException, self).__init__(deserialize, response, 'FabricError', *args)
jefftranter/udis
z80.py
Python
apache-2.0
32,759
0.094142
########################################################################## # # Processor specific code # CPU = "Z80" # Description = "Zilog 8-bit microprocessor." # DataWidth = 8 # 8-bit data # AddressWidth = 16 # 16-bit addresses # Maximum length of an instruction (for formatting purposes) maxLength = 4 # Leadin bytes for multibyte instructions leadInBytes = [0xcb, 0xdd, 0xed, 0xfd] # Addressing mode table # List of addressing modes and corresponding format strings for operands. addressModeTable = { "implied" : "", "0" : "0", "0,a" : "0,a", "0,b" : "0,b", "0,c" : "0,c", "0,d" : "0,d", "0,e" : "0,e", "0,h" : "0,h", "0,indhl" : "0,(hl)", "0,l" : "0,l", "00" : "$00", "08" : "$08", "1" : "1", "1,a" : "1,a", "1,b" : "1,b", "1,c" : "1,c", "1,d" : "1,d", "1,e" : "1,e", "1,h" : "1,h", "1,indhl" : "1,(hl)", "1,l" : "1,l", "10" : "$10", "18" : "$18", "2" : "2", "2,a" : "2,a", "2,b" : "2,b", "2,c" : "2,c", "2,d" : "2,d", "2,e" : "2,e", "2,h" : "2,h", "2,indhl" : "2,(hl)", "2,l" : "2,l", "20" : "$20", "28" : "$28", "3,a" : "3,a", "3,b" : "3,b", "3,c" : "3,c", "3,d" : "3,d", "3,e" : "3,e", "3,h" : "3,h", "3,indhl" : "3,(hl)", "3,l" : "3,l", "30" : "$30", "38" : "$38", "4,a" : "4,a", "4,b" : "4,b", "4,c" : "4,c", "4,d" : "4,d", "4,e" : "4,e", "4,h" : "4,h", "4,indhl" : "4,(hl)", "4,l" : "4,l", "5,a" : "5,a", "5,b" : "5,b", "5,c" : "5,c", "5,d" : "5,d", "5,e" : "5,e", "5,h" : "5,h", "5,indhl" : "5,(hl)", "5,l" : "5,l", "6,a" : "6,a", "6,b" : "6,b", "6,c" : "6,c", "6,d" : "6,d", "6,e" : "6,e", "6,h" : "6,h", "6,indhl" : "6,(hl)", "6,l" : "6,l", "7,a" : "7,a", "7,b" : "7,b", "7,c" : "7,c", "7,d" : "7,d", "7,e" : "7,e", "7,h" : "7,h", "7,indhl" : "7,(hl)", "7,l" : "7,l", "a" : "a", "a,a" : "a,a", "a,b" : "a,b", "a,c" : "a,c", "a,d" : "a,d", "a,e" : "a,e", "a,h" : "a,h", "a,i" : "a,i", "a,indbc" : "a,(bc)", "a,indc" : "a,(c)", "a,indde" : "a,(de)", "a,indhl" : "a,(hl)", "a,indix+d" : "a,(ix+${0:02X})", "a,indiy+d" : "a,(iy+${0:02X})", "a,indn" : "a,(${0:02X})", "a,indnn" : "a,(${1:02X}{0:02X})", "a,l" : "a,l", "a,n" : "a,${0:02X}", "a,r" : "a,r", "af" : "af", "af,af'" : "af,af'", "b" : "b", "b,a" : "b,a", "b,b" : "b,b", "b,c" : "b,c", "b,d" : "b,d", "b,e" : "b,e", "b,h" : "b,h", "b,indc" : "b,(c)", "b,indhl" : "b,(hl)", "b,indix+d" : "b,(ix+${0:02X})", "b,indiy+d" : "b,(iy+${0:02X})", "b,l" : "b,l", "b,n" : "b,${0:02X}", "bc" : "bc", "bc,indaa" : "bc,(${1:02X}{0:02X})", "bc,nn" : "bc,${1:02X}{0:02X}", "c" : "c", "c,a" : "c,a", "c,b" : "c,b", "c,c" : "c,c", "c,d" : "c,d", "c,e" : "c,e", "c,h" : "c,h", "c,indc" : "c,(c)", "c,indhl" : "c,(hl)", "c,indix+d" : "c,(ix+${0:02X})", "c,indiy+d" : "c,(iy+${0:02X})", "c,l" : "c,l", "c,n" : "c,${0:02X}", "c,pcr" : "c,${0:04X}", "c,nn" : "c,${1:02X}{0:02X}", "d" : "d", "d,a" : "d,a", "d,b" : "d,b", "d,c" : "d,c", "d,d" : "d,d", "d,e" : "d,e", "d,h" : "d,h", "d,indc" : "d,(c)", "d,indhl" : "d,(hl)", "d,indix+d" : "d,(ix+${0:02X})", "d,indiy+d" : "d,(iy+${0:02X})", "d,l" : "d,l", "d,n" : "d,${0:02X}", "de" : "de", "de,hl" : "de,hl", "de,indaa" : "de,(${1:02X}{0:02X})", "de,nn" : "de,${1:02X}{0:02X}", "e" : "e", "e,a" : "e,a", "e,b" : "e,b", "e,c" : "e,c", "e,d" : "e,d", "e,e" : "e,e", "e,h" : "e,h", "e,indc" : "e,(c)", "e,indhl" : "e,(hl)", "e,indix+d" : "e,(ix+${0:02X})", "e,indiy+d" : "e,(iy+${0:02X})", "e,l" : "e,l", "e,n" : "e,${0:02X}", "h" : "h", "h,a" : "h,a", "h,b" : "h,b", "h,c" : "h,c", "h,d" : "h,d", "h,e" : "h,e", "h,h" : "h,h", "h,indc" : "h,(c)", "h,indhl" : "h,(hl)", "h,indix+d" : "h,(ix+${0:02X})", "h,indiy+d" : "h,(iy+${0:02X})", "h,l" : "h,l", "h,n" : "h,${0:02X}", "hl" : "hl", "hl,bc" : "hl,bc", "hl,de" : "hl,de", "hl,hl" : "hl,hl", "hl,indnn"
: "hl,(${1:02X}{0:02X})", "hl,nn" : "hl,${1:02X}{0:02X}", "hl,sp" : "hl,sp", "i,a" : "i,a", "indaa,bc" : "(${1:02X}{0:02X}),bc", "indaa,de" : "(${1:02X}{0:02X}),de", "indaa,ix" : "
(${1:02X}{0:02X}),ix", "indaa,iy" : "(${1:02X}{0:02X}),iy", "indaa,sp" : "(${1:02X}{0:02X}),sp", "indbc,a" : "(bc),a", "indc,a" : "(c),a", "indc,b" : "(c),b", "indc,c" : "(c),c", "indc,d" : "(c),d", "indc,e" : "(c),e", "indc,h" : "(c),h", "indc,l" : "(c),l", "indde,a" : "(de),a", "indhl" : "(hl)", "indhl,a" : "(hl),a", "indhl,b" : "(hl),b", "indhl,c" : "(hl),c", "indhl,d" : "(hl),d", "indhl,e" : "(hl),e", "indhl,h" : "(hl),h", "indhl,l" : "(hl),l", "indhl,n" : "(hl),${0:02X}", "indix+d" : "(ix+${0:02X})", "indix+d,a" : "(ix+${0:02X}),a", "indiy+d,a" : "(iy+${0:02X}),a", "indix+d,b" : "(ix+${0:02X}),b", "indix+d,c" : "(ix+${0:02X}),c", "indix+d,d" : "(ix+${0:02X}),d", "indix+d,e" : "(ix+${0:02X}),e", "indix+d,h" : "(ix+${0:02X}),h", "indix+d,l" : "(ix+${0:02X}),l", "indix+d,n" : "(ix+${0:02X}),${1:02X}", "indiy+d" : "(iy+${0:02X})", "indiy+d,b" : "(iy+${0:02X}),b", "indiy+d,c" : "(iy+${0:02X}),c", "indiy+d,d" : "(iy+${0:02X}),d", "indiy+d,e" : "(iy+${0:02X}),e", "indiy+d,h" : "(iy+${0:02X}),h", "indiy+d,l" : "(iy+${0:02X}),l", "indiy+d,n" : "(iy+${0:02X}),${1:02X}", "indn,a" : "(${0:02X}),a", "indnn,a" : "(${1:02X}{0:02X}),a", "indnn,hl" : "(${1:02X}{0:02X}),hl", "indsp,hl" : "(sp),hl", "ix" : "ix", "ix,aa" : "ix,${1:02X}{0:02X}", "ix,bc" : "ix,bc", "ix,de" : "ix,de", "ix,indaa" : "ix,(${1:02X}{0:02X})", "ix,ix" : "ix,ix", "ix,sp" : "ix,sp", "iy" : "iy", "iy,aa" : "iy,${1:02X}{0:02X}", "iy,bc" : "iy,bc", "iy,bc" : "iy,bc", "iy,de" : "iy,de", "iy,indaa" : "iy,(${1:02X}{0:02X})", "iy,indaa" : "iy,(${1:02X}{0:02X})", "iy,iy" : "iy,iy", "iy,sp" : "iy,sp", "l" : "l", "l,a" : "l,a", "l,b" : "l,b", "l,c" : "l,c", "l,d" : "l,d", "l,e" : "l,e", "l,h" : "l,h", "l,indc" : "l,(c)", "l,indhl" : "l,(hl)", "l,indix+d" : "l,(ix+${0:02X})", "l,indiy+d" : "l,(iy+${0:02X})", "l,l" : "l,l", "l,n" : "l,${0:02X}", "m" : "m", "m,nn" : "m,${1:02X}{0:02X}", "n" : "${0:02X}", "n,pcr" : "${0:04X}", "n,indix+d" : "n,(ix+${0:02X})", "n,indiy+d" : "n,(iy+${0:02X})", "nc" : "nc", "nc,pcr" : "nc,${0:04X}", "nc,nn" : "nc,${1:02X}{0:02X}", "nn" : "${1:02X}{0:02X}", "nz" : "nz", "nz,pcr" : "nz,${0:04X}", "nz,nn" : "nz,${1:02X}{0:02X}", "p" : "p", "p,nn" : "p,${1:02X}{0:02X}", "pcr" : "${0:04X}", "pe" : "pe", "pe,nn" : "pe,${1:02X}{0:02X}", "po" : "po", "po,nn" : "po,${1:02X}{0:02X}", "r,a" : "r,a", "sp" : "sp", "sp,hl" : "sp,hl", "sp,indaa" : "sp,(${1:02X}{0:02X})", "sp,nn" : "sp,${1:02X}{0:02X}", "z" : "z", "z,pcr" : "z,${0:04X}", "z,nn" : "z,${1:02X}{0:02X}", } # Op Code Table # Key is numeric opcode (possibly multiple bytes) # Value is a list: # # bytes # mnemonic # addressing mode # flags (e.g. pcr) opcodeTable = { 0x00 : [ 1, "nop", "implied" ], 0x01 : [ 3, "ld", "bc,nn" ], 0x02 : [ 1, "ld", "indbc,a" ], 0x03 : [ 1, "inc", "bc" ], 0x04 : [ 1, "inc", "b" ], 0x05 : [ 1, "dec",
isarlab-department-engineering/ros_dt_lane_follower
deprecated_nodes/old-lane-detection.py
Python
bsd-3-clause
9,077
0.048695
#!/usr/bin/env python from __future__ import print_function import roslib roslib.load_manifest('lane_detection') import rospy import sys from std_msgs.msg import Int32 import cv2 from sensor_msgs.msg import Image from cv_bridge import CvBridge, CvBridgeError from picamera import PiCamera from picamera.array import PiRGBArray import time import numpy as np def detect(img): # start time start_time = cv2.getTickCount() # Gaussian Filter to remove noise img = cv2.medianBlur(img,5) gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # print img.shape = (200,350,3) rows,cols,channels = img.shape # ROI roi_mask = np.zeros(img.shape,dtype=np.uint8) roi_mask[10
:rows,0:cols] = 255 street = cv2.bitwise_and(img,roi_mask) stop_roi_mask = np.zeros(gray.shape,dtype=np.uint8) stop_roi_mask[100:rows,150:250] = 255 right_roi_mask = np.zeros(gray.shape,dtype=np.uint8) right_r
oi_mask[rows/3:rows,220:360] = 255 right_roi = cv2.bitwise_and(img,img,right_roi_mask) left_roi_mask = np.zeros(gray.shape,dtype=np.uint8) left_roi_mask[rows/3:rows,0:180] = 255 left_roi = cv2.bitwise_and(img,img,left_roi_mask) # define range of color in HSV hsv = cv2.cvtColor(street,cv2.COLOR_BGR2HSV) sensitivity = 60 # range of sensitivity=[90,150] lower_white = np.array([0,0,255-sensitivity]) upper_white = np.array([255,sensitivity,255]) white_mask = cv2.inRange(hsv,lower_white,upper_white) white_mask = cv2.erode(white_mask, None, iterations=2) white_mask = cv2.dilate(white_mask, None, iterations=2) lower_red = np.array([150,70,50]) upper_red = np.array([200,255,255]) red_mask = cv2.inRange(hsv,lower_red,upper_red) red_mask = cv2.erode(red_mask, None, iterations=2) red_mask = cv2.dilate(red_mask, None, iterations=2) lower_yellow = np.array([10,100,100]) #0,100,100 upper_yellow = np.array([30,255,255]) #80,255,255 yellow_mask = cv2.inRange(hsv,lower_yellow,upper_yellow) yellow_mask = cv2.erode(yellow_mask, None, iterations=2) yellow_mask = cv2.dilate(yellow_mask, None, iterations=2) # mask AND original img whitehsvthresh = cv2.bitwise_and(right_roi,right_roi,mask=white_mask) yellowhsvthresh = cv2.bitwise_and(street,street,mask=yellow_mask) redhsvthresh = cv2.bitwise_and(street,street,mask=red_mask) # Canny Edge Detection right_edges = cv2.Canny(whitehsvthresh,100,200) left_edges = cv2.Canny(yellowhsvthresh,100,200) right_edges = cv2.bitwise_and(right_edges,right_roi_mask) left_edges = cv2.bitwise_and(left_edges,left_roi_mask) red_edges_hsv = cv2.Canny(redhsvthresh,100,200) red_edges = cv2.bitwise_and(red_edges_hsv,stop_roi_mask) # Probabilistic Hough Transform # minLength=50 # maxGap=10 # right_lines = cv2.HoughLinesP(right_edges,1,np.pi/180,30,minLength,maxGap) # left_lines = cv2.HoughLinesP(left_edges,1,np.pi/180,30,minLength,maxGap) # red_lines = cv2.HoughLinesP(red_edges,1,np.pi/180,100,minLength,maxGap) # # w = 205 # da controllare # lw = 20 # da controllare # ly = 15 # da controllare # i = 0 # j = 0 # d = [] # phi = [] # if right_lines is not None: # for x in range(0,len(right_lines)): # for x1,y1,x2,y2 in right_lines[x]: # d_i = ((x1+x2)/2)-(w/2) # if x2>x1: # d_i = d_i - lw # d.insert(i,d_i) # a = x2-x1 # if x2<x1: # a = -a # phi.insert(j,(np.pi)/2 - np.arctan(a/(y2-y1))) # i+1 # j+1 # rospy.loginfo("Right lane: ") # rospy.loginfo(d) # # if left_lines is not None: # for x in range(0,len(left_lines)): # for x1,y1,x2,y2 in left_lines[x]: # d_i = ((x1+x2)/2)+(w/2) # if x2>x1: # d_i = d_i + ly # d.insert(i,d_i) # a = x2-x1 # if x2<x1: # a = -a # phi.insert(j,(np.pi)/2) - np.arctan2((x2-x1)/(y2-y1)) # i+1 # j+1 # rospy.loginfo("Left lane: ") # rospy.loginfo(d) ## rospy.loginfo(d) ## rospy.loginfo(phi) # ## bufferx_right = [] ## i=0 ## j=0 ## mdx=[] ## if lines_right is not None: ## for x in range(0,len(lines_right)): ## for x1,y1,x2,y2 in lines_right[x]: ## if x2!=x1: ## m=(y2-y1)/(float(x2-x1)) ## #alpha=np.arctan(m) ## mdx.insert(j,m) ## bufferx_right.insert(i,x1) ## i+1 ## bufferx_right.insert(i,x2) ## i+1 ## j+1 ## bufferx_left = [] ## i=0 ## j=0 ## msx=[] ## if lines_left is not None: ## for x in range(0,len(lines_left)): ## for x1,y1,x2,y2 in lines_left[x]: ## if x2!=x1: ## m=(y2-y1)/(float(x2-x1)) ## #alpha=np.arctan(m) ## msx.insert(j,m) ## bufferx_left.insert(i,x1) ## i+1 ## bufferx_left.insert(i,x2) ## i+1 ## j+1 ## x=0 ## mx_right=0 ## for j in range(0,len(bufferx_right)): ## x+=bufferx_right[j] ## if len(bufferx_right)!=0: ## mx_right=x/len(bufferx_right) ## ## x=0 ## mx_left=0 ## for k in range(0,len(bufferx_left)): ## x+=bufferx_left[k] ## if len(bufferx_left)!=0: ## mx_left=x/len(bufferx_left) ## ## mx=(mx_right+mx_left)/2 ## ## x=0 ## m_right = 0 ## for j in range(0,len(mdx)): ## x+=mdx[j] ## if len(mdx)!=0: ## m_right=x/len(mdx) ## ## x=0 ## m_left=0 ### for k in range(0,len(msx)): ### x+=msx[k] # if len(msx)!=0: # m_left=x/(len(msx)) # # m = (m_right+m_left)/2 # # if lines_right is not None and lines_left is not None: # if (mx<=250 and mx>=150): # return "forward" # elif mx>250: # return "left" # elif mx<150: # return "right" # elif lines_left is None and lines_right is not None: # if mdx>0.8: # return "forward" # else: # return "left" # elif lines_right is None and bufferx_left is not None: # if msx>0.8: # return "forward" # else: # return "right" # else: # return "x" # Standard Hough Transform right_lines = cv2.HoughLines(right_edges,0.8,np.pi/180,40) left_lines = cv2.HoughLines(left_edges,0.8,np.pi/180,35) red_lines = cv2.HoughLines(red_edges,1,np.pi/180,30) xm = cols/2 ym = rows # Draw right lane x = [] i = 0 if right_lines is not None: right_lines = np.array(right_lines[0]) for rho, theta in right_lines: a=np.cos(theta) b=np.sin(theta) x0,y0=a*rho,b*rho y3 = 140 x3 = int(x0+((y0-y3)*np.sin(theta)/np.cos(theta))) x.insert(i,x3) i+1 if len(x) != 0: xmin = x[0] for k in range(0,len(x)): if x[k] < xmin and x[k] > 0: xmin = x[k] kr = int(np.sqrt(((xmin-xm)*(xmin-xm))+((y3-ym)*(y3-ym)))) else: kr = 0 xmin = 0 # Draw left lane x = [] i = 0 if left_lines is not None: left_lines = np.array(left_lines[0]) for rho, theta in left_lines: a=np.cos(theta) b=np.sin(theta) x0,y0=a*rho,b*rho y3 = 140 x3 = int(x0+((y0-y3)*np.sin(theta)/np.cos(theta))) x.insert(i,x3) i+1 if len(x) != 0: xmax = x[0] for k in range(0,len(x)): if x[k] > xmax and x[k]<cols: xmax = x[k] kl = int(np.sqrt(((xmax-xm)*(xmax-xm))+((y3-ym)*(y3-ym)))) else: kl = 0 xmax = 0 error = kr - kl #end time end_time = cv2.getTickCount() time_count= (end_time - start_time) / cv2.getTickFrequency() # rospy.loginfo(time_count) if red_lines is not None: rospy.loginfo("STOP") return 154 #stop elif right_lines is not None and left_lines is not None: rospy.loginfo(error) if error > 150: error = 150 elif error < -150: error = -150 return error elif left_lines is not None and right_lines is None: rospy.loginfo("Turn Right") rospy.loginfo(kl) return 152 #turn right elif left_lines is None and right_lines is not None: rospy.loginfo("Turn Left") return 153 #turn let elif left_lines is None and right_lines is None: rospy.loginfo("No line")
agoose77/hivesystem
dragonfly/scene/bound/moverel.py
Python
bsd-2-clause
978
0.002045
from bee import * from bee.segments import * import libcontext from libcontext.socketclasses import * from libcontext.pluginclasses import * def get_worker(name, xyz): class moverel(worker): """Relative movement along %s axis""" % xyz __beename__ = name moverel = antenna("push", "float") movement = variable("float") connect(moverel, movement) @modifier def do_move(self):
axis = self.get_matrix().get_proxy("AxisSystem") axis.origin += getattr(axis, xyz) * self.movement axis.commit() trigger(movement, do_move) def set_get_matrix(self, function): self.g
et_matrix = function def place(self): libcontext.socket(("entity", "bound", "matrix"), socket_single_required(self.set_get_matrix)) return moverel moverelX = get_worker("moverelX", "x") moverelY = get_worker("moverelY", "y") moverelZ = get_worker("moverelZ", "z")
zhimin711/nova
nova/virt/imagecache.py
Python
apache-2.0
4,660
0.000215
# Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.compute import task_states from nova.compute import vm_states import nova.conf from nova import objects from nova.virt import block_device as driver_block_device CONF = nova.conf.CONF CONF.import_opt('host', 'nova.netconf') class ImageCacheManager(object): """Base class for the image cache manager. This class will provide a generic interface to the image cache manager. """ def __init__(self): self.remove_unused_base_images = CONF.remove_unused_base_images self.resize_states = [task_states.RESIZE_PREP, task_states.RESIZE_MIGRATING, task_states.RESIZE_MIGRATED, task_st
ates.RESIZE_FINISH] def _get_base(self): """Returns the base directory of the cached images."""
raise NotImplementedError() def _list_running_instances(self, context, all_instances): """List running instances (on all compute nodes). This method returns a dictionary with the following keys: - used_images - image_popularity - instance_names """ used_images = {} image_popularity = {} instance_names = set() used_swap_images = set() instance_bdms = objects.BlockDeviceMappingList.bdms_by_instance_uuid( context, [instance.uuid for instance in all_instances]) for instance in all_instances: # NOTE(mikal): "instance name" here means "the name of a directory # which might contain an instance" and therefore needs to include # historical permutations as well as the current one. instance_names.add(instance.name) instance_names.add(instance.uuid) if (instance.task_state in self.resize_states or instance.vm_state == vm_states.RESIZED): instance_names.add(instance.name + '_resize') instance_names.add(instance.uuid + '_resize') for image_key in ['image_ref', 'kernel_id', 'ramdisk_id']: image_ref_str = getattr(instance, image_key) if image_ref_str is None: continue local, remote, insts = used_images.get(image_ref_str, (0, 0, [])) if instance.host == CONF.host: local += 1 else: remote += 1 insts.append(instance.name) used_images[image_ref_str] = (local, remote, insts) image_popularity.setdefault(image_ref_str, 0) image_popularity[image_ref_str] += 1 bdms = instance_bdms.get(instance.uuid) if bdms: swap = driver_block_device.convert_swap(bdms) if swap: swap_image = 'swap_' + str(swap[0]['swap_size']) used_swap_images.add(swap_image) return {'used_images': used_images, 'image_popularity': image_popularity, 'instance_names': instance_names, 'used_swap_images': used_swap_images} def _list_base_images(self, base_dir): """Return a list of the images present in _base. This method returns a dictionary with the following keys: - unexplained_images - originals """ return {'unexplained_images': [], 'originals': []} def _age_and_verify_cached_images(self, context, all_instances, base_dir): """Ages and verifies cached images.""" raise NotImplementedError() def update(self, context, all_instances): """The cache manager. This will invoke the cache manager. This will update the cache according to the defined cache management scheme. The information populated in the cached stats will be used for the cache management. """ raise NotImplementedError()
Idematica/django-oscar
oscar/apps/dashboard/orders/views.py
Python
bsd-3-clause
28,253
0.000035
import datetime from decimal import Decimal as D, InvalidOperation from django.contrib import messages from django.utils.translation import ugettext_lazy as _ from django.core.urlresolvers import reverse from django.core.exceptions import ObjectDoesNotExist from django.db.models.loading import get_model from django.db.models import fields, Q, Sum, Count from django.http import HttpResponse, HttpResponseRedirect, Http404 from django.shortcuts import get_object_or_404 from django.utils.datastructures import SortedDict from django.views.generic import ListView, DetailView, UpdateView, FormView from django.conf import settings from oscar.core.loading import get_class from oscar.core.utils import format_datetime from oscar.apps.dashboard.orders import forms from oscar.views import sort_queryset from oscar.views.generic import BulkEditMixin from oscar.apps.dashboard.reports.csv_utils import CsvUnicodeWriter from oscar.apps.payment.exceptions import PaymentError from oscar.apps.order.exceptions import InvalidShippingEvent, InvalidStatus Order = get_model('order', 'Order') OrderNote = get_model('order', 'OrderNote') ShippingAddress = get_model('order', 'ShippingAddress') Transaction = get_model('payment', 'Transaction') Line = get_model('order', 'Line') ShippingEventType = get_model('order', 'ShippingEventType') PaymentEventType = get_model('order', 'PaymentEventType') EventHandler = get_class('order.processing', 'EventHandler') Partner = get_model('partner', 'Partner') def queryset_orders_for_user(user): """ Returns a queryset of all orders that a user is allowed to access. A staff user may access all orders. To allow access to an order for a non-staff user, at least one line's partner has to have the user in the partner's list. """ queryset = Order._default_manager.all() if user.is_staff: return queryset else: partners = Partner._default_manager.filter(users=user) return queryset.filter(lines__partner__in=partners).distinct() def get_order_for_user_or_404(user, number): try: return queryset_orders_for_user(user).get(number=number) except ObjectDoesNotExist: raise Http404() class OrderStatsView(FormView): """ Dashboard view for order statistics. Supports the permission-based dashboard. """ template_name = 'dashboard/orders/statistics.html' form_class = forms.OrderStatsForm def get(self, request, *args, **kwargs): return self.post(request, *args, **kwargs) def form_valid(self, form): ctx = self.get_context_data(form=form, filters=form.get_filters()) return self.render_to_response(ctx) def get_form_kwargs(self): kwargs = super(OrderStatsView, self).get_form_kwargs() kwargs['data'] = self.request.GET return kwargs def get_context_data(self, **kwargs): ctx = super(OrderStatsView, self).get_context_data(**kwargs) filters = kwargs.get('filters', {}) ctx.update(self.get_stats(filters)) ctx['title'] = kwargs['form'].get_filter_description() return ctx def get_stats(self, filters): orders = queryset_orders_for_user(self.request.user).filter(**filters) stats = { 'total_orders': orders.count(), 'total_lines': Line.objects.filter(order__in=orders).count(), 'total_revenue': orders.aggregate( Sum('total_incl_tax'))['total_incl_tax__sum'] or D('0.00'), 'order_status_breakdown': orders.order_by('status').values( 'status').annotate(freq=Count('id')) } return stats class OrderListView(BulkEditMixin, ListView): """ Dashboard view for a list of orders. Supports the permission-based dashboard. """ model = Order context_object_name = 'orders' template_name = 'dashboard/orders/order_list.html' form_class = forms.OrderSearchForm desc_template = _("%(main_filter)s %(name_filter)s %(title_filter)s" "%(upc_filter)s %(sku_filter)s %(date_filter)s" "%(voucher_filter)s %(payment_filter)s" "%(status_filter)s") paginate_by = 25 description = '' actions = ('download_selected_orders',) current_view = 'dashboard:order-list' def dispatch(self, request, *args, **kwargs): # base_queryset is equal to all orders the user is allowed to access self.base_queryset = queryset_orders_for_user( request.user).order_by('-date_placed') return super(OrderListView, self).dispatch(request, *args, **kwargs) def get(self, request, *args, **kwargs): if 'order_number' in request.GET and request.GET.get( 'response_format', 'html') == 'html': # Redirect to Order detail page if valid order number is given try: order = self.base_queryset.get( number=request.GET['order_number']) except Order.DoesNotExist: pass else: url = reverse('dashboard:order-detail', kwargs={'number':
order.number}) return HttpResponseRedirect(url) return super(OrderListView, self).get(request, *args, **kwargs) def get_desc_context(self, data=None): # noqa (too complex (16)) """Update the title that describes the q
ueryset""" desc_ctx = { 'main_filter': _('All orders'), 'name_filter': '', 'title_filter': '', 'upc_filter': '', 'sku_filter': '', 'date_filter': '', 'voucher_filter': '', 'payment_filter': '', 'status_filter': '', } if 'order_status' in self.request.GET: status = self.request.GET['order_status'] if status.lower() == 'none': desc_ctx['main_filter'] = _("Orders without an order status") else: desc_ctx['main_filter'] = _("Orders with status '%s'") % status if data is None: return desc_ctx if data['order_number']: desc_ctx['main_filter'] = _('Orders with number starting with' ' "%(order_number)s"') % data if data['name']: desc_ctx['name_filter'] = _(" with customer name matching" " '%(name)s'") % data if data['product_title']: desc_ctx['title_filter'] \ = _(" including an item with title matching" " '%(product_title)s'") % data if data['upc']: desc_ctx['upc_filter'] = _(" including an item with UPC" " '%(upc)s'") % data if data['partner_sku']: desc_ctx['upc_filter'] = _(" including an item with ID" " '%(partner_sku)s'") % data if data['date_from'] and data['date_to']: desc_ctx['date_filter'] \ = _(" placed between %(start_date)s and %(end_date)s") \ % {'start_date': format_datetime(data['date_from']), 'end_date': format_datetime(data['date_to'])} elif data['date_from']: desc_ctx['date_filter'] = _(" placed since %s") \ % format_datetime(data['date_from']) elif data['date_to']: date_to = data['date_to'] + datetime.timedelta(days=1) desc_ctx['date_filter'] = _(" placed before %s") \ % format_datetime(date_to) if data['voucher']: desc_ctx['voucher_filter'] = _(" using voucher '%(voucher)s'") \ % data if data['payment_method']: desc_ctx['payment_filter'] = _(" paid for by %(payment_method)s") \ % data if data['status']: desc_ctx['status_filter'] = _(" with status %(status)s") % data return desc_ctx def get_queryset(self): # noqa (too complex (19)) """ Build the queryset for this list. ""
ROldford/tvregex
tests/experiments/json_test.py
Python
mit
261
0
import json SHOWNAMES_DICT = { "lipsyncb
attle": "Lip Sync Battle", "archer2009": "Archer (2009)", "thedailyshow": "The Daily Show", "atmidnight": "@midnig
ht" } with open("test_file.json", "w") as f: json.dump(SHOWNAMES_DICT, f, indent=4)
e1211205/pimouse_ros
scripts/buzzer4.py
Python
gpl-3.0
701
0.011412
#!/usr/bin/env python #encoding: utf8 import rospy, actionlib from std_msgs.msg import UInt16 from pimouse_ros.msg import MusicAction, MusicResult, MusicFeedback def write_freq(hz=0): bfile = "/dev/rtbuzzer0" try:
with open(bfile,"w") as f: f.write(str(hz) + "\n") except IOError: rospy.logerr("can't write to " + bfile) def recv_buzzer(data): write_freq(data.data) def exec_music(goal): pass if __name__ == '__main
__': rospy.init_node('buzzer') rospy.Subscriber("buzzer", UInt16, recv_buzzer) music = actionlib.SimpleActionServer('music', MusicAction, exec_music, False) music.start() rospy.on_shutdown(write_freq) rospy.spin()
rbian/avocado-vt
virttest/remote_commander/messenger.py
Python
gpl-2.0
7,889
0.000127
#!/usr/bin/env python ''' Created on Dec 6, 2013 :author: jzupka ''' import os import logging import select import cPickle import time import remote_interface import cStringIO import base64 class IOWrapper(object): """ Class encaptulates io opearation to be more consist in different implementations. (stdio, sockets, etc..) """ def __init__(self, obj): """ :param obj: IO obj for example file decriptor. """ self._obj = obj def close(self): raise NotImplementedError() def read(self, max_len, timeout=None): """ Read function should be reinmplemented as blocking reading from data source when timeout is None and nonblocking for timeout is not None. Implementation example StdIWrapper. :params max_len: Max len of readed data. :type max_len: int :param timeout: Timeout of reading operation. :type timeout: float :return: Readed data. """ raise NotImplementedError() def write(self, data): """ Write funciton should be implemented for object uded for writing. :param data: Data to write. :type data: str. """ raise NotImplementedError() def fileno(self): """ Function should return file descriptor number. If object should be used for standard io operation. :return: File number. """ raise NotImplementedError() def _wait_for_data(self, max_len, timeout): """ Wait for data for time == timeout. :params max_len: Max len of readed data. :type max_len: int :param timeout: Timeout of reading operation. :type timeout: float :return: Readed data. """ r, _, _ = select.select([self.fileno()], [], [], timeout) if r: return self.read(max_len, None) return None class DataWrapper(object): """ Basic implementation of IOWrapper for stdio. """ def decode(self, data): """ Decodes the data which was read. :return: decoded data. """ return data def encode(self, data): """ Encode data. :return: encoded data. """ return data class DataWrapperBase64(DataWrapper): """ Basic implementation of IOWrapper for stdio. """ def decode(self, data): return base64.b64decode(data) def encode(self, data): return base64.b64encode(data) class StdIOWrapper(IOWrapper, DataWrapper): """ Basic implementation of IOWrapper for stdio. """ def close(self): os.close(self._obj) def fileno(self): return self._obj class StdIOWrapperIn(StdIOWrapper): """ Basic implementation of IOWrapper for stdin """ def read(self, max_len, timeout=None): if timeout is not None: return self._wait_for_data(max_len, timeout) else: return os.read(self._obj, max_len) class StdIOWrapperOut(StdIOWrapper): """ Basic implementation of IOWrapper for stdout """ def write(self, data): os.write(self._obj, data) class StdIOWrapperInBase64(StdIOWrapperIn, DataWrapperBase64): """ Basic implementation of IOWrapper for stdin """ class StdIOWrapperOutBase64(StdIOWrapperOut, DataWrapperBase64): """ Basic implementation of IOWrapper for stdout """ class MessengerError(Exception): def __init__(self, msg): super(MessengerError, self).__init__(msg) self.msg = msg def __str__(self): return "Messenger ERROR %s" % (self.msg) def _map_path(mod_name, kls_name): if mod_name.endswith('remote_interface'): # catch all old module names mod = remote_interface return getattr(mod, kls_name) else: mod = __import__(mod_name) return getattr(mod, kls_name) class Messenger(object): """ Class could be used for communication between two python process connected by communication canal wrapped by IOWrapper class. Pickling is used for communication and thus it is possible to communicate every picleable object. """ def __init__(self, stdin, stdout): """ :params stdin: Object for read data from communication interface. :type stdin: IOWrapper :params stdout: Object for write data to communication interface. :type stdout: IOWrapper """ self.stdin = stdin self.stdout = stdout # Unfortunately only static length of data length is supported. self.enc_len_length = len(stdout.encode("0" * 10)) def close(self): self.stdin.close() self.stdout.close() def format_msg(self, data): """ Format message where first 10 char is length of message and rest is piclked message. """ pdata = cPickle.dumps(data, cPickle.HIGHEST_PROTOCOL) pdata = self.stdout.encode(pdata) len_enc = self.stdout.encode("%10d" % len(pdata)) return "%s%s" % (len_enc, pdata) def flush_stdin(self): """ Flush all input data from communication interface. """ const = 16384 r, _, _ = select.select([self.stdin.fileno()], [], [], 1) while r: if len(self.stdin.read(const)) < const: break r, _, _ = select.select([self.stdin.fileno()], [], [], 1) def write_msg(self, data): """
Write formated message to communication interface.
""" self.stdout.write(self.format_msg(data)) def _read_until_len(self, timeout=None): """ Deal with terminal interfaces... Read input until gets string contains " " and digits len(string) == 10 :param timeout: timeout of reading. """ data = "" endtime = None if timeout is not None: endtime = time.time() + timeout while (len(data) < self.enc_len_length and (endtime is None or time.time() < endtime)): d = self.stdin.read(1, timeout) if d is None: return None if len(d) == 0: return d data += d if len(data) < self.enc_len_length: return None return self.stdout.decode(data) def read_msg(self, timeout=None): """ Read data from com interface. :param timeout: timeout for reading data. :type timeout: float :return: (True, data) when reading is successful. (False, None) when other side is closed. (None, None) when reading is timeouted. """ data = self._read_until_len(timeout) if data is None: return (None, None) if len(data) == 0: return (False, None) rdata = None try: cmd_len = int(data) rdata = "" rdata_len = 0 while (rdata_len < cmd_len): rdata += self.stdin.read(cmd_len - rdata_len) rdata_len = len(rdata) rdataIO = cStringIO.StringIO(self.stdin.decode(rdata)) unp = cPickle.Unpickler(rdataIO) unp.find_global = _map_path data = unp.load() except Exception, e: logging.error("ERROR data:%s rdata:%s" % (data, rdata)) try: self.write_msg(remote_interface.MessengerError("Communication " "failed.%s" % (e))) except OSError: pass self.flush_stdin() raise # Debugging commands. # if (isinstance(data, remote_interface.BaseCmd)): # print data.func return (True, data)
rlrs/deep-rl
dqn/ParallelAgent.py
Python
mit
2,448
0.003268
""" Experimental agent implementation running separate threads for emulation and GPU training. This is slightly (estimate ~20%) faster than the sequential implementation, but results might be different. Copyright 2016 Rasmus Larsen This software may be modified and distributed under the terms of the MIT license. See the LICENSE.txt file for details. """ from Agent import Agent import random import threading import time import numpy as np class ParallelAgent(Agent): def __init__(self, emu, net, config): super(ParallelAgent, self).__init__(emu, net, config) self.gpu_lock = threading.Lock() self.testing = False def train(self): cpu = threading.Thread(target=self.ale_worker) cpu.setDaemon(True) gpu_1 = threading.Thr
ead(target=self.gpu_worker) gpu_2 = threading.Thread(target=self.gpu_worker) for i in xrange(int(self.train_start)): # wait for replay memory to fill self.next(random.randrange(self.emu.num_actions)) cpu.start() gpu_1.start() gpu_2.start() gpu_1.join()
gpu_2.join() return def test(self): self.testing = True time.sleep(0.5) # wait a bit for ALE worker to stop super(ParallelAgent, self).test() self.testing = False def ale_worker(self): """ Performs epsilon greedy action selection, updating the replay memory and emulating with ALE. """ while True: if self.testing: time.sleep(0.2) continue self.eps_greedy() def gpu_worker(self): """ Gathers a minibatch (on the CPU!) and feeds it to the GPU. Several can run at once, locking the GPU. """ while self.steps < self.train_frames: s, a, r, ns, t = self.mem.get_minibatch() # TODO: ReplayMemory is _not_ thread safe a = self.emu.onehot_actions(a) # necessary due to tensorflow not having proper indexing with self.gpu_lock: cost = self.net.train(s, a, r, ns, t) if self.steps % self.target_sync == 0: self.net.sync_target() if self.steps % self.test_freq == 0: self.test() self.steps += 1 if self.steps % 100 == 0: # TODO: remove, just for debugging print 'step ' + str(self.steps)
Endika/django
tests/update/tests.py
Python
bsd-3-clause
5,112
0.000196
from __future__ import unicode_literals from django.test import TestCase from .models import A, B, D, Bar, DataPoint, Foo, RelatedPoint class SimpleTest(TestCase): def setUp(self): self.a1 = A.objects.create() self.a2 = A.objects.create() for x in range(20): B.objects.create(a=self.a1) D.objects.create(a=self.a1) def test_nonempty_update(self): """ Test that update changes the right number of rows for a nonempty queryset """ num_updated = self.a1.b_set.update(y=100) self.assertEqual(num_updated, 20) cnt = B.objects.filter(y=100).count() self.assertEqual(cnt, 20) def test_empty_update(self): """ Test that update changes the right number of rows for an empty queryset """ num_updated = self.a2.b_set.update
(y=100) self.assertEqual(num_updated, 0) cnt = B.objects.filter(y=100).count() self.assertEqual(cnt, 0) def test_nonempty_update_with_inheritance(self)
: """ Test that update changes the right number of rows for an empty queryset when the update affects only a base table """ num_updated = self.a1.d_set.update(y=100) self.assertEqual(num_updated, 20) cnt = D.objects.filter(y=100).count() self.assertEqual(cnt, 20) def test_empty_update_with_inheritance(self): """ Test that update changes the right number of rows for an empty queryset when the update affects only a base table """ num_updated = self.a2.d_set.update(y=100) self.assertEqual(num_updated, 0) cnt = D.objects.filter(y=100).count() self.assertEqual(cnt, 0) def test_foreign_key_update_with_id(self): """ Test that update works using <field>_id for foreign keys """ num_updated = self.a1.d_set.update(a_id=self.a2) self.assertEqual(num_updated, 20) self.assertEqual(self.a2.d_set.count(), 20) class AdvancedTests(TestCase): def setUp(self): self.d0 = DataPoint.objects.create(name="d0", value="apple") self.d2 = DataPoint.objects.create(name="d2", value="banana") self.d3 = DataPoint.objects.create(name="d3", value="banana") self.r1 = RelatedPoint.objects.create(name="r1", data=self.d3) def test_update(self): """ Objects are updated by first filtering the candidates into a queryset and then calling the update() method. It executes immediately and returns nothing. """ resp = DataPoint.objects.filter(value="apple").update(name="d1") self.assertEqual(resp, 1) resp = DataPoint.objects.filter(value="apple") self.assertEqual(list(resp), [self.d0]) def test_update_multiple_objects(self): """ We can update multiple objects at once. """ resp = DataPoint.objects.filter(value="banana").update( value="pineapple") self.assertEqual(resp, 2) self.assertEqual(DataPoint.objects.get(name="d2").value, 'pineapple') def test_update_fk(self): """ Foreign key fields can also be updated, although you can only update the object referred to, not anything inside the related object. """ resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0) self.assertEqual(resp, 1) resp = RelatedPoint.objects.filter(data__name="d0") self.assertEqual(list(resp), [self.r1]) def test_update_multiple_fields(self): """ Multiple fields can be updated at once """ resp = DataPoint.objects.filter(value="apple").update( value="fruit", another_value="peach") self.assertEqual(resp, 1) d = DataPoint.objects.get(name="d0") self.assertEqual(d.value, 'fruit') self.assertEqual(d.another_value, 'peach') def test_update_all(self): """ In the rare case you want to update every instance of a model, update() is also a manager method. """ self.assertEqual(DataPoint.objects.update(value='thing'), 3) resp = DataPoint.objects.values('value').distinct() self.assertEqual(list(resp), [{'value': 'thing'}]) def test_update_slice_fail(self): """ We do not support update on already sliced query sets. """ method = DataPoint.objects.all()[:2].update with self.assertRaises(AssertionError): method(another_value='another thing') def test_update_respects_to_field(self): """ Update of an FK field which specifies a to_field works. """ a_foo = Foo.objects.create(target='aaa') b_foo = Foo.objects.create(target='bbb') bar = Bar.objects.create(foo=a_foo) self.assertEqual(bar.foo_id, a_foo.target) bar_qs = Bar.objects.filter(pk=bar.pk) self.assertEqual(bar_qs[0].foo_id, a_foo.target) bar_qs.update(foo=b_foo) self.assertEqual(bar_qs[0].foo_id, b_foo.target)
emkailu/PAT3DEM
bin/p3convert.py
Python
mit
998
0.036072
#!/usr/bin/env python import os import sys import argparse import subprocess from shutil import copyfile def main(): progname = os.
path.basename(sys.argv[0]) usage = progname + """ [options] <*.png> convert -average *.png output.png In total 210 images, first average 15, then 14. """ args_def = {} parser = argparse.ArgumentParser() parser.add_argument("png", nargs='*', help="specify png to be averaged") args = parser.parse_args()
if len(sys.argv) == 1: print "usage: " + usage print "Please run '" + progname + " -h' for detailed options." sys.exit(1) # get default values for i in args_def: if args.__dict__[i] == None: args.__dict__[i] = args_def[i] # for i in xrange(14): cmd = ['convert', '-average'] + args.png[15*i:15*(i+1)] + ['output_{}.png'.format(i)] subprocess.call(cmd, stderr=subprocess.STDOUT) cmd = ['convert', '-average', 'output_*.png', 'output.png'] subprocess.call(cmd, stderr=subprocess.STDOUT) if __name__ == '__main__': main()
campaignmonitor/createsend-python
samples/subscribers.py
Python
mit
411
0.004866
from createsend import * auth = { 'access_token': 'YOUR_ACCE
SS_TOKEN', 'refresh_token': 'YOUR_REFRESH_TOKEN' } listId = 'YOUR_LIST_ID' emailAddress = 'YOUR_SUBSCRIBER_EMAIL_ADDRESS' subscriber = Subscriber(auth, listId, emailAddress) # Get the details for a subscriber subscriberDetail = subscriber.get() for property, value in vars(subscriberDetail).items(): print(property, ":
", value)
eduNEXT/edx-platform
common/lib/xmodule/xmodule/video_module/video_xfields.py
Python
agpl-3.0
9,242
0.004653
""" # lint-amnesty, pylint: disable=cyclic-import XFields for video module. """ import datetime from xblock.fields import Boolean, DateTime, Dict, Float, List, Scope, String from xmodule.fields import RelativeTime # Make '_' a no-op so we can scrape strings. Using lambda instead of # `django.utils.translation.ugettext_noop` because Django cannot be imported in this file _ = lambda text: text class VideoFields: """Fields for `VideoBlock`.""" display_name = String( help=_("The display name for this component."), display_name=_("Component Display Name"), default="Video", scope=Scope.settings ) saved_video_position = RelativeTime( help=_("Current position in the video."), scope=Scope.user_state, default=datetime.timedelta(seconds=0) ) # TODO: This should be moved to Scope.content, but this will # require data migration to support the old video module. youtube_id_1_0 = String( help=_("Optional, for older browsers: the YouTube ID for the normal speed video."), display_name=_("YouTube ID"), scope=Scope.settings, default="3_yD_cEKoCk" ) youtube_id_0_75 = String( help=_("Optional, for older browsers: the YouTube ID for the .75x speed video."), display_name=_("YouTube ID for .75x speed"), scope=Scope.settings, default="" ) youtube_id_1_25 = String( help=_("Optional, for older browsers: the YouTube ID for the 1.25x speed video."), display_name=_("YouTube ID for 1.25x speed"), scope=Scope.settings, default="" ) youtube_id_1_5 = String( help=_("Optional, for older browsers: the YouTube ID for the 1.5x speed video."), display_name=_("YouTube ID for 1.5x speed"), scope=Scope.settings, default="" ) start_time = RelativeTime( # datetime.timedelta object help=_( "Time you want the video to start if you don't want the entire video to play. " "Not supported in the native mobile app: the full video file will play. " "Formatted as HH:MM:SS. The maximum value is 23:59:59." ), display_name=_("Video Start Time"), scope=Scope.settings, default=datetime.timedelta(seconds=0) ) end_time = RelativeTime( # datetime.timedelta object help=_( "Time you want the video to stop if you don't want the entire video to play. " "Not supported in the native mobile app: the full video file will play. " "Formatted as HH:MM:SS. The maximum value is 23:59:59." ), display_name=_("Video Stop Time"), scope=Scope.settings, default=datetime.timedelta(seconds=0) ) #front-end code of video player checks logical validity of (start_time, end_time) pair. download_video = Boolean( help=_("Allow students to download versions of this video in different formats if they cannot use the edX video" " player or do not have access to YouTube. You must add at least one non-YouTube URL " "in the Video File URLs field."), display_name=_("Video Download Allowed"), scope=Scope.settings, default=False ) html5_sources = List( help=_("The URL or URLs where you've posted non-YouTube versions of the video. Each URL must end in .mpeg," " .mp4, .ogg, or .webm and cannot be a YouTube URL. (For browser compatibility, we strongly recommend" " .mp4 and .webm format.) Students will be able to view the first listed video that's compatible with" " the student's computer. To allow students to download these videos, " "set Video Download Allowed to True."), display_name=_("Video File URLs"), scope=Scope.settings, ) track = String( help=_("By default, students can download an .srt or .txt transcript when you set Download Transcript " "Allowed to True. If you want to provide a downloadable transcript in a different format, we recommend " "that you upload a handout by using the Upload a Handout field. If this isn't possible, you can post a " "transcript file on the Files & Uploads page or on the Internet, and then add the URL for the " "transcript here. Students see a link to download that transcript below the video."), display_name=_("Downloadable Transcript URL"), scope=Scope.settings, default='' ) download_track = Boolean( help=_("Allow students to download the timed transcript. A link to download the file appears below the video." " By default, the transcript is an .srt or .txt file. If you want to provide the transcript for " "download in a different format, upload a file by using the Upload Handout field."), display_name=_("Download Transcript Allowed"), scope=Scope.settings, default=False ) # `sub` is deprecated field and should not be used in future. Now, transcripts are primarily handled in VAL and # backward compatibility for the video modules already using this field has been ensured. sub = String( help=_("The default transcript for the video, from the Default Timed Transcript field on the Basic tab. " "This transcript should be in English. You don't have to change this setting."), display_name=_("Default Timed Transcript"), scope=Scope.settings, default="" ) show_captions = Boolean( help=_("Specify whether the transcripts appear with the video by default."), display_name=_("Show Transcript"), scope=Scope.settings, default=True ) # Data format: {'de': 'german_translation', 'uk': 'ukrainian_translation'} transcripts = Dict( help=_("Add transcripts in different languages." " Click below to specify a language and upload an .srt transcript file for that language."), displa
y_name=_("Transcript Languages"), scope=Scope.settings, default={} ) transcript_language = String( help=_("Preferred language for transcript."), display_name=_("Preferred language for transcript"),
scope=Scope.preferences, default="en" ) transcript_download_format = String( help=_("Transcript file format to download by user."), scope=Scope.preferences, values=[ # Translators: This is a type of file used for captioning in the video player. {"display_name": _("SubRip (.srt) file"), "value": "srt"}, {"display_name": _("Text (.txt) file"), "value": "txt"} ], default='srt', ) speed = Float( help=_("The last speed that the user specified for the video."), scope=Scope.user_state ) global_speed = Float( help=_("The default speed for the video."), scope=Scope.preferences, default=1.0 ) auto_advance = Boolean( help=_("Specify whether to advance automatically to the next unit when the video ends."), scope=Scope.preferences, # The default is True because this field only has an effect when auto-advance controls are enabled # (globally enabled through feature flag and locally enabled through course setting); in that case # it's good to start auto-advancing and let the student disable it, instead of the other way around # (requiring the user to enable it). When auto-advance controls are hidden, this field won't be used. default=True, ) youtube_is_available = Boolean( help=_("Specify whether YouTube is available for the user."), scope=Scope.user_info, default=True ) handout = String( help=_("Upload a handout to accompany this video. Students can download the handout by " "clicking Download Handout under the video."), display_name=_("Upload Handout"), scope=Scope.settings, ) only_on_web = Boolean( help=_( "Spec