repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
ufieeehw/IEEE2015
ros/dynamixel_motor/dynamixel_driver/scripts/set_servo_config.py
Python
gpl-2.0
7,282
0.006866
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Software License Agreement (BSD License) # # Copyright (c) 2010-2011, Antons Rebguns. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of University of Arizona nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. __author__ = 'Antons Rebguns' __copyright__ = 'Copyright (c) 2010-2011 Antons Rebguns' __license__ = 'BSD' __maintainer__ = 'Antons Rebguns' __email__ = 'anton@email.arizona.edu' import sys from optparse import OptionParser import roslib roslib.load_manifest('dynamixel_driver') from dynamixel_driver import dynamixel_io if __name__ == '__main__': usage_msg = 'Usage: %prog [options] MOTOR_IDs' desc_msg = 'Sets various configuration options of specified Dynamixel servo motor.' epi_msg = 'Example: %s --port=/dev/ttyUSB1 --baud=57600 --baud-rate=1 --return-delay=1 5 9 23' % sys.argv[0] parser = OptionParser(usage=usage_msg, description=desc_msg, epilog=epi_msg) parser.add_option('-p', '--port', metavar='PORT', default='/dev/ttyUSB0', help='motors of specified controllers are connected to PORT [default: %default]') parser.add_option('-b', '--baud', metavar='BAUD', type='int', default=1000000, help='connection to serial port will be established at BAUD bps [default: %default]') parser.add_option('-r', '--baud-rate', type='int', metavar='RATE', dest='baud_rate', help='set servo motor communication speed') parser.add_option('-d', '--return-delay', type='int', metavar='DELAY', dest='return_delay', help='set servo motor return packet delay time') parser.add_option('--cw-angle-limit', type='int', metavar='CW_A
NGLE', dest='cw_
angle_limit', help='set servo motor CW angle limit') parser.add_option('--ccw-angle-limit', type='int', metavar='CCW_ANGLE', dest='ccw_angle_limit', help='set servo motor CCW angle limit') parser.add_option('--min-voltage-limit', type='int', metavar='MIN_VOLTAGE', dest='min_voltage_limit', help='set servo motor minimum voltage limit') parser.add_option('--max-voltage-limit', type='int', metavar='MAX_VOLTAGE', dest='max_voltage_limit', help='set servo motor maximum voltage limit') (options, args) = parser.parse_args(sys.argv) print options if len(args) < 2: parser.print_help() exit(1) port = options.port baudrate = options.baud motor_ids = args[1:-2] print 'motor ids', motor_ids try: dxl_io = dynamixel_io.DynamixelIO(port, baudrate) except dynamixel_io.SerialOpenError, soe: print 'ERROR:', soe else: for motor_id in motor_ids: print motor_id, type(motor_id) motor_id = int(motor_id) ping_res = dxl_io.ping(motor_id) if ping_res: # check if baud rate needs to be changed if options.baud_rate: valid_rates = (1,3,4,7,9,16,34,103,207,250,251,252) if options.baud_rate not in valid_rates: print 'Requested baud rate is invalid, please use one of the following: %s' % str(valid_rates) if options.baud_rate <= 207: print 'Setting baud rate to %d bps' % int(2000000.0/(options.baud_rate + 1)) elif options.baud_rate == 250: print 'Setting baud rate to %d bps' % 2250000 elif options.baud_rate == 251: print 'Setting baud rate to %d bps' % 2500000 elif options.baud_rate == 252: print 'Setting baud rate to %d bps' % 3000000 dxl_io.set_baud_rate(motor_id, options.baud_rate) # check if return delay time needs to be changed if options.return_delay is not None: if options.return_delay < 0 or options.return_delay > 254: print 'Requested return delay time is out of valie range (0 - 254)' print 'Setting return delay time to %d us' % (options.return_delay * 2) dxl_io.set_return_delay_time(motor_id, options.return_delay) # check if CW angle limit needs to be changed if options.cw_angle_limit is not None: print 'Setting CW angle limit to %d' % options.cw_angle_limit dxl_io.set_angle_limit_cw(motor_id, options.cw_angle_limit) # check if CCW angle limit needs to be changed if options.ccw_angle_limit is not None: print 'Setting CCW angle limit to %d' % options.ccw_angle_limit dxl_io.set_angle_limit_ccw(motor_id, options.ccw_angle_limit) else: print "NOT SETTING CCW ANGLE LIMIT" # check if minimum voltage limit needs to be changed if options.min_voltage_limit: print 'Setting minimum voltage limit to %d' % options.min_voltage_limit dxl_io.set_voltage_limit_min(motor_id, options.min_voltage_limit) # check if maximum voltage limit needs to be changed if options.max_voltage_limit: print 'Setting maximum voltage limit to %d' % options.max_voltage_limit dxl_io.set_voltage_limit_max(motor_id, options.max_voltage_limit) print 'done' else: print 'Unable to connect to Dynamixel motor with ID %d' % motor_id
ToontownUprising/src
otp/distributed/DistributedTestObject.py
Python
mit
717
0.001395
from direct.distributed import DistributedObject class Distribu
tedTestObject(DistributedObject.DistributedObject): def setRequiredField(self, r): self.requiredField = r def setB(self, B): self.B = B def setBA(self, BA): self.BA = BA def setBO(self, BO): self.BO = BO def setBR(self, BR): self.BR = BR def setBRA(self, BRA): self.BRA = BRA def setBRO(self, BRO): self.BRO = BRO def setBROA(self, BROA): self.BROA = BROA def gotNonReqThatWasntSet(self
): for field in ('B', 'BA', 'BO', 'BR', 'BRA', 'BRO', 'BROA'): if hasattr(self, field): return True return False
consideratecode/csrf_example
manage.py
Python
mit
810
0
#!/usr/bin/env python import os import sys if __nam
e__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "csrf_example.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2.
try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv)
GDGLima/contentbox
third_party/filetransfers/backends/url.py
Python
apache-2.0
405
0.004938
from django.http import HttpResponseRedirect from django.utils.encoding
import smart_str def serve_file(request, file, **kwargs): """Serves files by redirecting to file.url (e.g., useful for Amazon S3)""" return HttpResponseRedirect(smart_str(file.url)) def public_download_url(file, **kwargs): """Directs downloads to file.url (useful for
normal file system storage)""" return file.url
GabrielBrascher/cloudstack
plugins/hypervisors/ovm/src/main/scripts/vm/hypervisor/ovm/OvmNetworkModule.py
Python
apache-2.0
15,306
0.008363
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from OvmCommonModule import * import traceback import time import re logger = OvmLogger("OvmNetwork") class Filter: class Network: IFNAME_LO = r'(lo)' IFNAME_BRIDGE = r'(xenbr\d+|vlan\d+)' IFNAME_PIF = r'(eth\d+$|bond\d+$)' IFNAME_VLAN = r'(eth\d+.\d+$|bond\d+.\d+$)' class Parser(object): ''' classdocs ''' def findall(self, pattern, samples): """
@param pattern: search pattern @param result: Parser line execution result @return : list of search find result of Parser which has same pattern findall Parser find all pattern in a string """ result = [] for line in samples: items = re.findall(pattern, line)
for item in items: result.append(item) return result def checkPattern(self, pattern, cmd_result): """ @param pattern: search pattern @param cmd_result: Parser line execution result @return : True (if pattern is occurred) """ for line in cmd_result: items = re.findall(pattern, line) if len(items) > 0: return True return False def search(self, cmd_result, pattern): return None class OvmVlanDecoder(json.JSONDecoder): def decode(self, jStr): deDict = asciiLoads(jStr) vlan = OvmVlan() setAttrFromDict(vlan, 'vid', deDict, int) setAttrFromDict(vlan, 'pif', deDict) return vlan class OvmVlanEncoder(json.JSONEncoder): def default(self, obj): if not isinstance(obj, OvmVlan): raise Exception("%s is not instance of OvmVlan"%type(obj)) dct = {} safeDictSet(obj, dct, 'name') safeDictSet(obj, dct, 'vid') safeDictSet(obj, dct, 'pif') return dct def toOvmVlan(jStr): return json.loads(jStr, cls=OvmVlanDecoder) def fromOvmVlan(vlan): return normalizeToGson(json.dumps(vlan, cls=OvmVlanEncoder)) class OvmBridgeDecoder(json.JSONDecoder): def decode(self, jStr): deDic = asciiLoads(jStr) bridge = OvmBridge() setAttrFromDict(bridge, 'name', deDic) setAttrFromDict(bridge, 'attach', deDic) return bridge class OvmBridgeEncoder(json.JSONEncoder): def default(self, obj): if not isinstance(obj, OvmBridge): raise Exception("%s is not instance of OvmBridge"%type(obj)) dct = {} safeDictSet(obj, dct, 'name') safeDictSet(obj, dct, 'attach') safeDictSet(obj, dct, 'interfaces') return dct def toOvmBridge(jStr): return json.loads(jStr, cls=OvmBridgeDecoder) def fromOvmBridge(bridge): return normalizeToGson(json.dumps(bridge, cls=OvmBridgeEncoder)) class OvmInterface(OvmObject): name = '' class OvmVlan(OvmInterface): vid = 0 pif = '' class OvmBridge(OvmInterface): attach = '' interfaces = [] class OvmNetwork(OvmObject): ''' Network ''' @property def pifs(self): return self._getInterfaces("pif") @property def vlans(self): return self._getInterfaces("vlan") @property def bridges(self): return self._getInterfaces("bridge") def __init__(self): self.Parser = Parser() def _createVlan(self, vlan): """ @param jsonString : parameter from client side @return : succ xxxxx ex. jsonString => {vid:100, pif:eth0} ex. return => """ #Pre-condition #check Physical Interface Name if vlan.pif not in self.pifs.keys(): msg = "Physical Interface(%s) does not exist" % vlan.pif logger.debug(self._createVlan, msg) raise Exception(msg) #Pre-condition #check Vlan Interface Name ifName = "%s.%s" % (vlan.pif, vlan.vid) if ifName in self.vlans.keys(): msg = "Vlan Interface(%s) already exist, return it" % ifName logger.debug(self._createVlan, msg) return self.vlans[ifName] doCmd(['vconfig', 'add', vlan.pif, vlan.vid]) self.bringUP(ifName) logger.debug(self._createVlan, "Create vlan %s successfully"%ifName) return self.vlans[ifName] def _deleteVlan(self, name): if name not in self.vlans.keys(): raise Exception("No vlan device %s found"%name) vlan = self.vlans[name] self.bringDown(vlan.name) doCmd(['vconfig', 'rem', vlan.name]) logger.debug(self._deleteVlan, "Delete vlan %s successfully"%vlan.name) def _createBridge(self, bridge): """ @return : success ex. {bridge:xapi100, attach:eth0.100} create bridge interface, and attached it cmd 1: ip link add bridge cmd 2: ip link set dev """ if "xenbr" not in bridge.name and "vlan" not in bridge.name: raise Exception("Invalid bridge name %s. Bridge name must be in partten xenbr/vlan, e.g. xenbr0"%bridge.name) #pre-condition #check Bridge Interface Name if bridge.name in self.bridges.keys(): msg = "Bridge(%s) already exist, return it" % bridge.name logger.debug(self._createBridge, msg) return self.bridges[bridge.name] #pre-condition #check attach must exist #possible to attach in PIF or VLAN if bridge.attach not in self.vlans.keys() and bridge.attach not in self.pifs.keys(): msg = "%s is not either pif or vlan" % bridge.attach logger.error(self._createBridge, msg) raise Exception(msg) doCmd(['ip', 'link', 'add', 'name', bridge.name, 'type', 'bridge']) doCmd(['ip', 'link', 'set', 'dev', bridge.attach, 'master', bridge.name]) self.bringUP(bridge.name) logger.debug(self._createBridge, "Create bridge %s on %s successfully"%(bridge.name, bridge.attach)) return self.bridges[bridge.name] def _getBridges(self): return self.bridges.keys() def _getVlans(self): return self.vlans.keys() def _deleteBridge(self, name): if name not in self.bridges.keys(): raise Exception("Can not find bridge %s"%name) bridge = self.bridges[name] if bridge.attach in bridge.interfaces: bridge.interfaces.remove(bridge.attach) if len(bridge.interfaces) != 0: logger.debug(self._deleteBridge, "There are still some interfaces(%s) on bridge %s"%(bridge.interfaces, bridge.name)) return False self.bringDown(bridge.name) doCmd(['ip', 'link', 'del', bridge.name]) logger.debug(self._deleteBridge, "Delete bridge %s successfully"%bridge.name) return True def _getInterfaces(self, type): """ @param type : ["pif", "bridge", "tap"] @return : dictionary of Interface Objects get All Interfaces based on type """ devices = os.listdir('/sys/class/net') ifs = {} if type == "pif": devs = self.Parser.findall(Filter.Network.IFNAME_PIF, devices) for dev in set(devs): ifInst = OvmInterface() ifInst.name = dev ifs[dev] = ifIn
MithileshCParab/HackerRank-10DaysOfStatistics
Problem Solving/Algorithms/Implementation/migratory_birds.py
Python
apache-2.0
925
0.007568
#!/bin/python3 import math import os import random import re import sys # Complete the migratoryBirds function below. # {1:2, 2:4, 3:3, 4:4} def migratoryBirds(arr): frequentBird, frequency = 1, 0 birdsDict = {} for i in arr: if i not in birdsDict.keys(): birdsDict[i] = 1 else: birdsDict[i] = birdsDict[i] + 1 for bird in birdsDict.keys(): if birdsDict[bird] >
frequency: frequency = birdsDict[bird] frequentBir
d = bird if birdsDict[bird] == frequency: if bird < frequentBird: frequentBird = bird return frequentBird if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') arr_count = int(input().strip()) arr = list(map(int, input().rstrip().split())) result = migratoryBirds(arr) fptr.write(str(result) + '\n') fptr.close()
bohlian/frappe
frappe/commands/__init__.py
Python
mit
1,535
0.024756
# Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals, absolute_import, print_function import sys import click impor
t cProfile import
pstats import frappe import frappe.utils from functools import wraps from six import StringIO click.disable_unicode_literals_warning = True def pass_context(f): @wraps(f) def _func(ctx, *args, **kwargs): profile = ctx.obj['profile'] if profile: pr = cProfile.Profile() pr.enable() ret = f(frappe._dict(ctx.obj), *args, **kwargs) if profile: pr.disable() s = StringIO() ps = pstats.Stats(pr, stream=s)\ .sort_stats('cumtime', 'tottime', 'ncalls') ps.print_stats() # print the top-100 for line in s.getvalue().splitlines()[:100]: print(line) return ret return click.pass_context(_func) def get_site(context): try: site = context.sites[0] return site except (IndexError, TypeError): print('Please specify --site sitename') sys.exit(1) def call_command(cmd, context): return click.Context(cmd, obj=context).forward(cmd) def get_commands(): # prevent circular imports from .docs import commands as doc_commands from .scheduler import commands as scheduler_commands from .site import commands as site_commands from .translate import commands as translate_commands from .utils import commands as utils_commands return list(set(doc_commands + scheduler_commands + site_commands + translate_commands + utils_commands)) commands = get_commands()
tchitchikov/goulash
python/pullData/src/pull.py
Python
apache-2.0
2,524
0.004358
import csv import numpy import pandas import pymongo import requests from datetime import datetime from io import StringIO mongo_client = pymongo.MongoClient("localhost", 27017) financial_db = mongo_client.financial_data
financial_collection = financial_db.data class Pull: def __call__(self, source, tickers, start_date, end_date): if source=='Google': results = self.google_call(tickers, start_date, end_date) return results elif source=='Database': results = self.database_call(tickers, start_date, end_date) return results def google
_call(self, tickers, start_date, end_date): """ google_call makes a call to the google finance api for historical data Args: None (uses the class variables) Returns: None (sets self.results) """ results = {} for ticker in tickers: data_string = "https://www.google.com/finance/historical?q={ticker_symbol}&startdate={start_date}&enddate={end_date}&output=csv".format( ticker_symbol = ticker, start_date = start_date, end_date = end_date ) df = pandas.read_csv(StringIO(requests.get(data_string).text)) df['Return'] = df.Close - df.Close.shift(-1) df['DailyPeriodicReturn'] = (df['Return'] / df.Close.shift(-1)) df['ContinuouslyCompoundingDailyPeriodicReturn'] = numpy.log(df.Close / df.Close.shift(-1)) df = df.fillna(0.0) results[ticker] = { "symbol": ticker, "date_added": datetime.utcnow(), "data": df.to_dict(orient="records"), "close_prices": list(df.Close.values), "returns": list(df.Return.values), "daily_periodic_return": list(df.DailyPeriodicReturn.values), "continuous_daily_periodic_return": list(df.ContinuouslyCompoundingDailyPeriodicReturn.values), "start_date": start_date, "end_date": end_date, "url": data_string } return results def database_call(self, tickers, start_date, end_date): """ database_call makes a call to mongodb for the latest data Args: None """ results = {} for ticker in tickers: results[ticker] = financial_collection.find({ "ticker": ticker})[:][0] return results
mldbai/mldb
testing/MLDB-2126-export-structured.py
Python
apache-2.0
1,480
0.001351
# # MLDB-2126-export-structured.py # Mathieu Marquis Bolduc, 2017-01-25 # This file is part of MLDB. Copyright 20
17 mldb.ai inc. All rights reserved. # import tempfile import codecs import os from mldb import mldb, MldbUnitTest, ResponseException t
mp_dir = os.getenv('TMP') class MLDB2126exportstructuredTest(MldbUnitTest): # noqa def assert_file_content(self, filename, lines_expect): f = codecs.open(filename, 'rb', 'utf8') for index, expect in enumerate(lines_expect): line = f.readline()[:-1] self.assertEqual(line, expect) def test_row(self): # create the dataset mldb.put('/v1/datasets/patate', { 'type': 'tabular' }) mldb.post('/v1/datasets/patate/rows', { 'rowName': 0, 'columns': [ ['x.a', 1, 0], ['x.b', 2, 0] ]} ) mldb.post('/v1/datasets/patate/commit') tmp_file = tempfile.NamedTemporaryFile(dir=tmp_dir) res = mldb.post('/v1/procedures', { 'type': 'export.csv', 'params': { 'exportData': 'select x as x from patate', 'dataFileUrl': 'file://' + tmp_file.name, } }) mldb.log(res) lines_expect = ['x.a,x.b', '1,2' ] self.assert_file_content(tmp_file.name, lines_expect) if __name__ == '__main__': mldb.run_tests()
astrobin/astrobin
astrobin_apps_platesolving/migrations/0012_platesolvingadvancedtask.py
Python
agpl-3.0
824
0.002427
# -*- coding: utf-8 -*- # Generated by Django 1.11.28 on 2020-03-17 20:26 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('astrobin_apps_platesolving', '0011_update_platesolvingadvanced_settings_sample_raw_frame_file_verbose_name'), ] operations = [ migrations.CreateModel( name='PlateSolvingAdvancedTask', fiel
ds=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('serial_number', models.CharField(max_lengt
h=32)), ('created', models.DateTimeField(auto_now_add=True)), ('active', models.BooleanField(default=True)), ('task_params', models.TextField()), ], ), ]
polyaxon/polyaxon-api
polyaxon_lib/__init__.py
Python
mit
572
0.001748
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function f
rom .modes import Modes from . import models from . import bridges from . import layers from . import processing from .libs import * # noqa from . import activations from . import initializations from . import losses from . import metrics from . import optimizers from . import regularizations from .rl import explorations, environments
as envs, memories, stats, utils as rl_utils from . import variables from . import datasets from . import estimators from . import experiments
danlrobertson/servo
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/manifestupdate.py
Python
mpl-2.0
25,973
0.001617
import itertools import os import urlparse from collections import namedtuple, defaultdict from wptmanifest.node import (DataNode, ConditionalNode, BinaryExpressionNode, BinaryOperatorNode, VariableNode, StringNode, NumberNode, UnaryExpressionNode, UnaryOperatorNode, KeyValueNode) from wptmanifest.backends import conditional from wptmanifest.backends.conditional import ManifestItem import expected """Manifest structure used to update the expected results of a test Each manifest file is represented by an ExpectedManifest that has one or more TestNode children, one per test in the manifest. Each TestNode has zero or more SubtestNode children, one for each known subtest of the test. In these representations, conditionals expressions in the manifest are not evaluated upfront but stored as python functions to be evaluated at runtime. When a result for a test is to be updated set_result on the [Sub]TestNode is called to store the new result, alongside the existing conditional that result's run info matched, if any. Once all new results are known, coalesce_expected is called to compute the new set of results and conditionals. The AST of the underlying parsed manifest is updated with the changes, and the result is serialised to a file. """ class ConditionError(Exception): def __init__(self, cond=None): self.cond = cond class UpdateError(Exception): pass Value = namedtuple("Value", ["run_info", "value"]) def data_cls_getter(output_node, visited_node): # visited_node is intentionally unused if output_node is None: return ExpectedManifest elif isinstance(output_node, ExpectedManifest): return TestNode elif isinstance(output_node, TestNode): return SubtestNode else: raise ValueError class ExpectedManifest(ManifestItem): def __init__(self, node, test_path=None, url_base=None, property_order=None, boolean_properties=None): """Object representing all the tests in a particular manifest :param node: AST Node associated with this object. If this
is None, a new AST is created to associate with this manifest. :param test_path: Path of the test file associated with this manifest.
:param url_base: Base url for serving the tests in this manifest. :param property_order: List of properties to use in expectation metadata from most to least significant. :param boolean_properties: Set of properties in property_order that should be treated as boolean. """ if node is None: node = DataNode(None) ManifestItem.__init__(self, node) self.child_map = {} self.test_path = test_path self.url_base = url_base assert self.url_base is not None self.modified = False self.boolean_properties = boolean_properties self.property_order = property_order self.update_properties = { "lsan": LsanUpdate(self), } def append(self, child): ManifestItem.append(self, child) if child.id in self.child_map: print "Warning: Duplicate heading %s" % child.id self.child_map[child.id] = child def _remove_child(self, child): del self.child_map[child.id] ManifestItem._remove_child(self, child) def get_test(self, test_id): """Return a TestNode by test id, or None if no test matches :param test_id: The id of the test to look up""" return self.child_map.get(test_id) def has_test(self, test_id): """Boolean indicating whether the current test has a known child test with id test id :param test_id: The id of the test to look up""" return test_id in self.child_map @property def url(self): return urlparse.urljoin(self.url_base, "/".join(self.test_path.split(os.path.sep))) def set_lsan(self, run_info, result): """Set the result of the test in a particular run :param run_info: Dictionary of run_info parameters corresponding to this run :param result: Lsan violations detected""" self.update_properties["lsan"].set(run_info, result) def coalesce_properties(self, stability): for prop_update in self.update_properties.itervalues(): prop_update.coalesce(stability) class TestNode(ManifestItem): def __init__(self, node): """Tree node associated with a particular test in a manifest :param node: AST node associated with the test""" ManifestItem.__init__(self, node) self.subtests = {} self._from_file = True self.new_disabled = False self.update_properties = { "expected": ExpectedUpdate(self), "max-asserts": MaxAssertsUpdate(self), "min-asserts": MinAssertsUpdate(self) } @classmethod def create(cls, test_id): """Create a TestNode corresponding to a given test :param test_type: The type of the test :param test_id: The id of the test""" url = test_id name = url.rsplit("/", 1)[1] node = DataNode(name) self = cls(node) self._from_file = False return self @property def is_empty(self): ignore_keys = set(["type"]) if set(self._data.keys()) - ignore_keys: return False return all(child.is_empty for child in self.children) @property def test_type(self): """The type of the test represented by this TestNode""" return self.get("type", None) @property def id(self): """The id of the test represented by this TestNode""" return urlparse.urljoin(self.parent.url, self.name) def disabled(self, run_info): """Boolean indicating whether this test is disabled when run in an environment with the given run_info :param run_info: Dictionary of run_info parameters""" return self.get("disabled", run_info) is not None def set_result(self, run_info, result): """Set the result of the test in a particular run :param run_info: Dictionary of run_info parameters corresponding to this run :param result: Status of the test in this run""" self.update_properties["expected"].set(run_info, result) def set_asserts(self, run_info, count): """Set the assert count of a test """ self.update_properties["min-asserts"].set(run_info, count) self.update_properties["max-asserts"].set(run_info, count) def _add_key_value(self, node, values): ManifestItem._add_key_value(self, node, values) if node.data in self.update_properties: new_updated = [] self.update_properties[node.data].updated = new_updated for value in values: new_updated.append((value, [])) def clear(self, key): """Clear all the expected data for this test and all of its subtests""" self.updated = [] if key in self._data: for child in self.node.children: if (isinstance(child, KeyValueNode) and child.data == key): child.remove() del self._data[key] break for subtest in self.subtests.itervalues(): subtest.clear(key) def append(self, node): child = ManifestItem.append(self, node) self.subtests[child.name] = child def get_subtest(self, name): """Return a SubtestNode corresponding to a particular subtest of the current test, creating a new one if no subtest with that name already exists. :param name: Name of the subtest""" if name in self.subtests: return self.subtests[name] else: subtest = SubtestNode.create(name) self.append(subtest) return subtest def coalesce_p
jgmanzanas/CMNT_004_15
project-addons/vt_flask_middleware/config.py
Python
agpl-3.0
705
0.004255
import os class Config(object): DE
BUG = False TESTING = False SECRET_KEY = 'A0Zr18h/3yX R~XHH!jmN]LWX/,?RT' DATABASE = { 'engine': 'playhouse.pool.PooledPostgresqlExtDatabase', 'name': 'middleware
', 'user': 'comunitea', 'port': '5434', 'host': 'localhost', 'max_connections': None, 'autocommit': True, 'autorollback': True, 'stale_timeout': 600} NOTIFY_URL = "https://www.visiotechsecurity.com/?option=com_sync&task=sync.syncOdoo" NOTIFY_USER = os.environ.get('NOTIFY_USER') NOTIFY_PASSWORD = os.environ.get('NOTIFY_PASSWORD')
ismail-s/warehouse
tests/unit/packaging/test_services.py
Python
apache-2.0
9,105
0
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may o
btain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agr
eed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import io import os.path import boto3.session import botocore.exceptions import freezegun import pretend import pytest import redis from zope.interface.verify import verifyClass from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage from warehouse.packaging.services import ( RedisDownloadStatService, LocalFileStorage, S3FileStorage, ) @freezegun.freeze_time("2012-01-14") class TestRedisDownloadStatService: def test_verify_service(self): assert verifyClass(IDownloadStatService, RedisDownloadStatService) def test_creates_redis(self, monkeypatch): redis_obj = pretend.stub() redis_cls = pretend.stub( from_url=pretend.call_recorder(lambda u: redis_obj), ) monkeypatch.setattr(redis, "StrictRedis", redis_cls) url = pretend.stub() svc = RedisDownloadStatService(url) assert svc.redis is redis_obj assert redis_cls.from_url.calls == [pretend.call(url)] @pytest.mark.parametrize( ("keys", "result"), [ ([], 0), ([5, 7, 8], 20), ] ) def test_get_daily_stats(self, keys, result): svc = RedisDownloadStatService("") svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys)) call_keys = ( ["downloads:hour:12-01-14-00:foo"] + [ "downloads:hour:12-01-13-{:02d}:foo".format(i) for i in reversed(range(24)) ] + ["downloads:hour:12-01-12-23:foo"] ) assert svc.get_daily_stats("foo") == result assert svc.redis.mget.calls == [pretend.call(*call_keys)] @pytest.mark.parametrize( ("keys", "result"), [ ([], 0), ([5, 7, 8], 20), ] ) def test_get_weekly_stats(self, keys, result): svc = RedisDownloadStatService("") svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys)) call_keys = [ "downloads:daily:12-01-{:02d}:foo".format(i + 7) for i in reversed(range(8)) ] assert svc.get_weekly_stats("foo") == result assert svc.redis.mget.calls == [pretend.call(*call_keys)] @pytest.mark.parametrize( ("keys", "result"), [ ([], 0), ([5, 7, 8], 20), ] ) def test_get_monthly_stats(self, keys, result): svc = RedisDownloadStatService("") svc.redis = pretend.stub(mget=pretend.call_recorder(lambda *a: keys)) call_keys = [ "downloads:daily:12-01-{:02d}:foo".format(i) for i in reversed(range(1, 15)) ] + [ "downloads:daily:11-12-{:02d}:foo".format(i + 15) for i in reversed(range(17)) ] assert svc.get_monthly_stats("foo") == result assert svc.redis.mget.calls == [pretend.call(*call_keys)] class TestLocalFileStorage: def test_verify_service(self): assert verifyClass(IFileStorage, LocalFileStorage) def test_basic_init(self): storage = LocalFileStorage("/foo/bar/") assert storage.base == "/foo/bar/" def test_create_service(self): request = pretend.stub( registry=pretend.stub( settings={"files.path": "/the/one/two/"}, ), ) storage = LocalFileStorage.create_service(None, request) assert storage.base == "/the/one/two/" def test_gets_file(self, tmpdir): with open(str(tmpdir.join("file.txt")), "wb") as fp: fp.write(b"my test file contents") storage = LocalFileStorage(str(tmpdir)) file_object = storage.get("file.txt") assert file_object.read() == b"my test file contents" def test_raises_when_file_non_existant(self, tmpdir): storage = LocalFileStorage(str(tmpdir)) with pytest.raises(FileNotFoundError): storage.get("file.txt") def test_stores_file(self, tmpdir): filename = str(tmpdir.join("testfile.txt")) with open(filename, "wb") as fp: fp.write(b"Test File!") storage_dir = str(tmpdir.join("storage")) storage = LocalFileStorage(storage_dir) storage.store("foo/bar.txt", filename) with open(os.path.join(storage_dir, "foo/bar.txt"), "rb") as fp: assert fp.read() == b"Test File!" def test_stores_two_files(self, tmpdir): filename1 = str(tmpdir.join("testfile1.txt")) with open(filename1, "wb") as fp: fp.write(b"First Test File!") filename2 = str(tmpdir.join("testfile2.txt")) with open(filename2, "wb") as fp: fp.write(b"Second Test File!") storage_dir = str(tmpdir.join("storage")) storage = LocalFileStorage(storage_dir) storage.store("foo/first.txt", filename1) storage.store("foo/second.txt", filename2) with open(os.path.join(storage_dir, "foo/first.txt"), "rb") as fp: assert fp.read() == b"First Test File!" with open(os.path.join(storage_dir, "foo/second.txt"), "rb") as fp: assert fp.read() == b"Second Test File!" class TestS3FileStorage: def test_verify_service(self): assert verifyClass(IFileStorage, S3FileStorage) def test_basic_init(self): bucket = pretend.stub() storage = S3FileStorage(bucket) assert storage.bucket is bucket def test_create_service(self): session = boto3.session.Session() request = pretend.stub( find_service=pretend.call_recorder(lambda name: session), registry=pretend.stub(settings={"files.bucket": "froblob"}), ) storage = S3FileStorage.create_service(None, request) assert request.find_service.calls == [pretend.call(name="aws.session")] assert storage.bucket.name == "froblob" def test_gets_file(self): s3key = pretend.stub(get=lambda: {"Body": io.BytesIO(b"my contents")}) bucket = pretend.stub(Object=pretend.call_recorder(lambda path: s3key)) storage = S3FileStorage(bucket) file_object = storage.get("file.txt") assert file_object.read() == b"my contents" assert bucket.Object.calls == [pretend.call("file.txt")] def test_raises_when_key_non_existant(self): def raiser(): raise botocore.exceptions.ClientError( {"Error": {"Code": "NoSuchKey", "Message": "No Key!"}}, "some operation", ) s3key = pretend.stub(get=raiser) bucket = pretend.stub(Object=pretend.call_recorder(lambda path: s3key)) storage = S3FileStorage(bucket) with pytest.raises(FileNotFoundError): storage.get("file.txt") assert bucket.Object.calls == [pretend.call("file.txt")] def test_passes_up_error_when_not_no_such_key(self): def raiser(): raise botocore.exceptions.ClientError( {"Error": {"Code": "SomeOtherError", "Message": "Who Knows!"}}, "some operation", ) s3key = pretend.stub(get=raiser) bucket = pretend.stub(Object=lambda path: s3key) storage = S3FileStorage(bucket) with pytest.raises(botocore.exceptions.ClientError): storage.get("file.txt") def test_stores_file(self, tmpdir): filename = str(tmpdir.join("testfile.txt")) with open(filename, "wb") as fp: fp.write(b"Test File!") bucket = pretend.stub( upload_file=pretend.call_recorder(lambda filename, key: None), ) storage
jobiols/management-system
mgmtsystem_hazard_risk/__openerp__.py
Python
agpl-3.0
1,727
0
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2010 Savoir-faire Linux (<http://www.savoirfairelinux.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope
that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If
not, see <http://www.gnu.org/licenses/>. # ############################################################################## { "name": "Hazard Risk", "version": "8.0.1.1.0", "author": "Savoir-faire Linux, Odoo Community Association (OCA)", "website": "http://www.savoirfairelinux.com", "license": "AGPL-3", "category": "Management System", "depends": [ 'mgmtsystem_hazard', 'hr' ], "data": [ 'security/ir.model.access.csv', 'data/mgmtsystem_hazard_risk_computation.xml', 'data/mgmtsystem_hazard_risk_type.xml', 'views/res_company.xml', 'views/mgmtsystem_hazard.xml', 'views/mgmtsystem_hazard_risk_type.xml', 'views/mgmtsystem_hazard_risk_computation.xml', 'views/mgmtsystem_hazard_residual_risk.xml', ], "installable": True, }
mzdaniel/oh-mainline
vendor/packages/celery/celery/events/dumper.py
Python
agpl-3.0
2,533
0.001974
# -*- coding: utf-8 -*- """ celery.events.dumper ~~~~~~~~~~~~~~~~~~~~ THis is a simple program that dumps events to the console as they happen. Think of it like a `tcpdump` for Celery events. :copyright: (c) 2009 - 2011 by Ask Solem. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import sys from datetime import datetime from ..app import app_or_default from ..datastructures import LRUCache TASK_NAMES = LRUCache(limit=0xFFF) HUMAN_TYPES = {"worker-offline": "shutdown", "worker-online": "started", "worker-heartbeat": "heartbeat"} def humanize_type(type): try: return HUMAN_TYPES[type.lower()] except KeyError: return type.lower().replace("-", " ") class Dumper(object): def on_event(self, event): timestamp = datetime.fromtimestamp(event.pop("timestamp")) type = event.pop("type").lower() hostname = event.pop("hostname") if type.startswith("task-"): uuid = event.pop("uuid") if type in ("task-received", "task-sent"): task = TASK_NAMES[uuid] = "%s(%s) args=%s kwargs=%s" % ( event.pop("name"), uuid, event.pop("args"), event.pop("kwargs")) else: task = TASK_NAMES.get(uuid, "") return self.format_task_event(hostname, timestamp, type, task, event) fields = ", ".join("%s=%s" % (key, event[key]) for key in sorted(event.keys())) sep = fields and ":" or "" print("%s [%s] %s%s %s" % (hostname, timestamp, humanize_type(type), sep, fields)) def format_task_event(self, hostname, timestamp, type, task, event): fields = ", ".join("%s=%s" % (key, event[key]) for key in sorted(event.keys())) sep = fields and ":" or "" print("%s [%s] %s%s %s %s" % (hostname, timestamp,
humanize_type(type), sep, task, fields)) def evdump(app=None): sys.stderr.write("-> evdump: starting capture...\n") app = app_or_default(app) dumper = Dumper() conn = app.broker_connection() recv = app.events.Receiver(conn, handlers={"*": dumper.on_event}) try: recv.capture() except (KeyboardInterrupt, SystemExit):
conn and conn.close() if __name__ == "__main__": evdump()
partofthething/home-assistant
homeassistant/components/ovo_energy/config_flow.py
Python
apache-2.0
3,440
0.000291
"""Config flow to configure the OVO Energy integration.""" import aiohttp from ovoenergy.ovoenergy import OVOEnergy import voluptuous as vol from homeassistant import config_entries from homeassistant.config_entries import ConfigFlow from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DOMAIN # pylint: disable=unused-import REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) USER_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a OVO Energy config flow.""" VERSION = 1 CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL def __init__(self): """Initialize the flow.""" self.username = None async def async_step_user(self, user_input=None): """Handle a flow initiated by the user.""" errors = {} if user_input is not None: client = OVOEnergy() try: authenticated = await client.authenticate( user_input[CONF_USERNAME], user_input[CONF_PASSWORD] ) except aiohttp.ClientError: errors["base"] = "cannot_connect" else: if authenticated: await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() return self.async_create_entry( title=client.username, data={ CONF_USERNAME: user_input[CONF_USERNAME], CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) errors["base"] = "invalid_auth" return self.async_show_form( step_id="user", data_schema=USER_SCHEMA, errors=errors ) async def async_step_reauth(self, user_input): """Handle configuration by re-auth.""" errors = {} if user_input and user_input.get(CONF_USERNAME): self.username = user_input[CONF_USERNAME] self.context["title_placeholders"] = {CONF_USERNAME: self.username} if user_input is not None and user_input.get(CONF_PASSWORD) is not None: client = OVOEnergy() try: authenticated = await client.authenticate(
self.username, user_input[CONF_PASSWORD] ) except aiohttp.ClientError: errors["base"] = "connection_error" else: if authenticated: await self.async_set_unique_id(self.username) for entry in self._async_current_entries(): if entry.unique_id == self.unique_id: self.hass.config_entries.asyn
c_update_entry( entry, data={ CONF_USERNAME: self.username, CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) return self.async_abort(reason="reauth_successful") errors["base"] = "authorization_error" return self.async_show_form( step_id="reauth", data_schema=REAUTH_SCHEMA, errors=errors )
V11/volcano
server/sqlmap/plugins/dbms/sybase/connector.py
Python
mit
2,499
0.002401
#!/usr/bin/env python """ Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ try: import _mssql import pymssql except ImportError: pass import logging from lib.core.convert import utf8encode from lib.core.data import conf from lib.core.data import logger from lib.core.exception import SqlmapConnectionException from plugins.generic.connector import Connector as GenericConnector class Connector(GenericConnector): """ Homepage: http://pymssql.sourceforge.net/ User guide: http://pymssql.sourceforge.net/examples_pymssql.php API: http://pymssql.sourceforge.net/ref_pymssql.php Debian package: python-pymssql License: LGPL Possible connectors: http://wiki.python.org/moin/SQL%20Server Important note: pymssql library on your system MUST be version 1.0.2 to work, get it from http://sourceforge.net/projects/pymssql/files/pymssql/1.0.2/ """ def __init__(self): GenericConnector.__init__(self) def connect(self): self.initConnection() try: self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout) except pymssql.OperationalError, msg: raise SqlmapConnectionException(msg) self.initCursor() self.printConnected() def fetchall(self): try: return self.cursor.fetchall() except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) return None def execute(self, query): retVal = False try: self.cursor.execute(utf8encode(query)) retVal = True except (pymssql.OperationalError, pymssql.ProgrammingError), msg: logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " ")) except pymssql.InternalError, ms
g: raise SqlmapConnectionException(msg) return retVal def select(self, query): retVal = None if self.execute(query): retVal = self.fetchall() try: self.connector.commit() except pymssql.Opera
tionalError: pass return retVal
boundlessgeo/geogig-py
src/geogigpy/diff.py
Python
bsd-3-clause
2,681
0.001119
# -*- coding: utf-8 -*- """ *************************************************************************** diff.py --------------------- Date : November 2013 Copyright : (C) 2013-2016 Boundless, http://boundlessgeo.com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'November 2013' __copyright__ = '(C) 2013-2016 Boundless, http://boundlessgeo.com' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from feature import Feature from geogig import NULL_ID TYPE_MODIFIED = "Modified" TYPE_ADDED = "Added" TYPE_REMOVED = "Removed" ATTRIBUTE_DIFF_MODIFIED, ATTRIBUTE_DIFF_ADDED, ATTRIBUTE_DIFF_REMOVED, ATTRIBUTE_DIFF_UNCHANGED = ["M", "A", "R", "U"] class Diffentry(object): '''A difference between two references for a given path''' def __init__(self, repo, oldcommitref, newcommitref, oldref, newref, path): self.repo = repo self.path = path self.oldref = oldref self.newref = newref self.oldcommitref = oldcommitref self.newcommitref = newcommitref def oldobject(self): if self.oldref == NULL_ID: return None else: return Feature(self.repo, self.oldcommitref, self.path) def newobject(self): if self.newref == NULL_ID: return None else: return Feature(self.repo, self.newcommitref, self.path) def featurediff(self): return self.repo.featurediff(self.oldcommitref, self.newcommitref, self.path) def type(self): if self.oldref == NULL_ID: return TYPE_ADDED
elif self.newref == NULL_ID: return TYPE_REMOVED else: return TYPE_MODIFIED def __str__(self): if self.o
ldref == NULL_ID: return "%s %s (%s)" % (TYPE_ADDED, self.path, self.newref) elif self.newref == NULL_ID: return TYPE_REMOVED + " " + self.path else: return "%s %s (%s --> %s)" % (TYPE_MODIFIED, self.path, self.oldref, self.newref)
ties/py-sonic
libsonic/connection.py
Python
gpl-3.0
100,484
0.001513
""" This file is part of py-sonic. py-sonic is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. py-sonic is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with py-sonic. If not, see <http://www.gnu.org/licenses/> """ from base64 import b64encode from urllib import urlencode from .errors import * from pprint import pprint from cStringIO import StringIO from netrc import netrc import json, urllib2, httplib, logging, socket, ssl, sys API_VERSION = '1.13.0' logger = logging.getLogger(__name__) class HTTPSConnectionChain(httplib.HTTPSConnection): _preferred_ssl_protos = sorted([ p for p in dir(ssl) if p.startswith('PROTOCOL_') ], reverse=True) _ssl_working_proto = None def _create_sock(self): sock = socket.create_connection((self.host, self.port), self.timeout) if self._tunnel_host: self.sock = sock self._tunnel() return sock def connect(self): if self._ssl_working_proto is not None: # If we have a working proto, let's use that straight away logger.debug("Using known working proto: '%s'", self._ssl_working_proto) sock = self._create_sock() self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=self._ssl_working_proto) return # Try connecting via the different SSL protos in preference order for proto_name in self._preferred_ssl_protos: sock = self._create_sock() proto = getattr(ssl, proto_name, None) try: self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=proto) except: sock.close() else: # Cache the working ssl version HTTPSConnectionChain._ssl_working_proto = proto break class HTTPSHandlerChain(urllib2.HTTPSHandler): def https_open(self, req): return self.do_open(HTTPSConnectionChain, req) # install opener urllib2.install_opener(urllib2.build_opener(HTTPSHandlerChain())) class PysHTTPRedirectHandler(urllib2.HTTPRedirectHandler): """ This class is used to override the default behavior of the HTTPRedirectHandler, which does *not* redirect POST data """ def redirect_request(self, req, fp, code, msg, headers, newurl): m = req.get_method() if (code in (301, 302, 303, 307) and m in ("GET", "HEAD") or code in (301, 302, 303) and m == "POST"): newurl = newurl.replace(' ', '%20') newheaders = dict((k, v) for k, v in req.headers.items() if k.lower() not in ("content-length", "content-type") ) data = None if req.has_data(): data = req.get_data() return urllib2.Request(newurl, data=data, headers=newheaders, origin_req_host=req.get_origin_req_host(), unverifiable=True) else: raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp) class Connection(object): def __init__(self, baseUrl, username=None, password=None, port=4040, serverPath='/rest', appName='py-sonic', apiVersion=API_VERSION, insecure=False, useNetrc=None): """ This will create a connection to your subsonic server baseUrl:str The base url for your server. Be sure to use "https" for SSL connections. If you are using a port other than the default 4040, be sure to specify that with the port argument. Do *not* append it here. ex: http://subsonic.example.com If you are running subsonic under a different path, specify that with the "serverPath" arg, *not* here. For example, if your subsonic lives at: https://mydomain.com:8080/path/to/subsonic/rest You would set the following: baseUrl = "https://mydomain.com" port = 8080 serverPath = "/path/to/subsonic/rest" username:str The username to use for the connection. This can be None if `useNetrc' is True (and you have a valid entry in your netrc file) password:str The password to use for the connection. This can be None if `useNetrc' is True (and you have a valid entry in your netrc file) port:int The port number to connect on. The default for unencrypted subsonic connections is 4040 serverPath:str The base resource path for the subsonic views. This is useful if you have your subsonic server behind a proxy and the path that you are proxying is different from the default of '/rest'. Ex: serverPath='/path/to/subs' The full url that would be built then would be (assuming defaults and using "example.com" and you are using the "ping" view): http://example.com:4040/path/to/subs/ping.view appName:str The name of your application. apiVersion:str The API version you wish to use for your application. Subsonic will throw an error if you try to use/send an api version higher than what the server supports. See the Subsonic API docs to find the Subsonic version -> API version t
able. This is useful if you are connecting to an older version of Subsonic. insecure:bool This will allow you to use self signed certificates when connecting if set to True. useNetrc:str|bool You can either specify a specific netrc formatted file or True to use your default
netrc file ($HOME/.netrc). """ self._baseUrl = baseUrl self._hostname = baseUrl.split('://')[1].strip() self._username = username self._rawPass = password self._netrc = None if useNetrc is not None: self._process_netrc(useNetrc) elif username is None or password is None: raise CredentialError('You must specify either a username/password ' 'combination or "useNetrc" must be either True or a string ' 'representing a path to a netrc file') self._port = int(port) self._apiVersion = apiVersion self._appName = appName self._serverPath = serverPath.strip('/') self._insecure = insecure self._opener = self._getOpener(self._username, self._rawPass) # Properties def setBaseUrl(self, url): self._baseUrl = url self._opener = self._getOpener(self._username, self._rawPass) baseUrl = property(lambda s: s._baseUrl, setBaseUrl) def setPort(self, port): self._port = int(port) port = property(lambda s: s._port, setPort) def setUsername(self, username): self._username = use
MCRSoftwares/AcadSocial
universidades/migrations/0002_auto_20150118_1319.py
Python
gpl-2.0
640
0
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('universidades', '0001_initial'), ] operations = [ migrations.AlterField( model_name='universidademodel',
name='nome', field=models.CharField(max_length=256), preserve_default=True, ), migrations.AlterField( model_name='uni
versidademodel', name='sigla', field=models.CharField(max_length=32), preserve_default=True, ), ]
spacedogXYZ/sms_checkin
sms_checkin/settings/development.py
Python
agpl-3.0
883
0.001133
from .common import * INTERNAL_IPS = ['127.0.0.1', ] CORS_ORIGIN_WHITELIST = ( 'localhost:8000', ) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } Q_CLUSTER = { 'name': 'DjangORM', 'workers': 2, 'timeout': 90, 'retry': 120, 'queue_limit': 50, 'bulk': 10, 'orm': 'default', 'cat
ch_up': False # do not replay missed schedules past } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'console': { 'class': 'logging.StreamHandler', }, }, 'loggers': { 'reminders': { 'handlers': ['console'], 'level': os
.getenv('DJANGO_LOG_LEVEL', 'INFO'), }, 'messages': { 'handlers': ['console'], 'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'), }, }, }
lrq3000/pyFileFixity
pyFileFixity/lib/gooey/gui/windows/views.py
Python
mit
111
0.036036
CONFIG_SCR
EEN = 'config' RUNNING_SCREEN = 'running' SUCCESS_SCREEN = 'success' ERROR_SCREEN = 'error
'
tosmun/AdventOfCode
solutions/day21/p2/main.py
Python
apache-2.0
1,368
0.067982
from itertools import combinations START_P_HP = 100 START_P_DMG = 0 START_P_A = 0 START_B_HP = 100 START_B_DMG = 8 START_B_A = 2 WEAPONS = [ [8,4,0], [10,5,0], [25,6,0], [40,7,0], [74,8,0] ] ARMOR = [ [13,0,1], [31,0,2], [53,0,3], [75,0,4], [102,0,5] ] #Include 'no armor' option ARMOR.append([0,0,0]) RINGS = [ [25,1,0], [50,2,0], [100,3,0], [20,0,1], [40,0,2], [80,0,3] ] #Include 'no ring' options RINGS.append([0,0,0]) RINGS.append([0,0,0]) def main(): cost = None #1 Weapon for w in combinations(WEAPONS, 1): #0-1 Armor for a in combinations(ARMOR, 1): #0-2 Rings for r in combinations(RINGS, 2): bonuses = calc_bonuses(w,a,r) p_hp = START_P_HP p_cost = bonuses[0] p_dmg = bonuses[1] + START_P_DMG p_a = bonuses[2] + START_P_A win = is_win(START_B_HP, START_B_DMG, START_B_A, p_hp, p_dmg, p_a) #We are seeking to lose the fight, so not win #We are also looking for highest cost if not win and (cost is None or p
_cost > cost): cost = p_cost print cost def is_win(b_hp, b_dmg, b_a, p_hp, p_dmg, p_a): b_dmg = max(b_dmg - p_a, 1) p_dmg = max(p_dmg - b_a, 1) #<= because we start first return (b_hp / p_dmg) <= (p_hp / b_dmg) def calc_bonuses(w,a,r): ret = [0, 0, 0] for i in [w,a,r]: for j in i: ret[0] += j[0] ret[1] += j[1] ret[2] += j[2]
return ret if __name__ == "__main__": main()
rlowrance/re-local-linear
Logger.py
Python
mit
886
0.003386
import datetime import sys import pdb from directory import directory if False: pdb.set_trace() # avoid warning message from pyflakes class Logger(object): # from stack overflow: how do i duplicat sys stdout to a log file in
python def __init__(self, logfile_path=None, logfile_mode='w', base_name=
None): def path(s): return directory('log') + s + datetime.datetime.now().isoformat('T') + '.log' self.terminal = sys.stdout clean_path = logfile_path.replace(':', '-') if base_name is None else path(base_name) self.log = open(clean_path, logfile_mode) def write(self, message): self.terminal.write(message) self.log.write(message) def flush(): pass if False: # usage example sys.stdout = Logger('path/to/log/file') # now print statements write on both stdout and the log file
spiceqa/virt-test
virttest/lvsb.py
Python
gpl-2.0
4,505
0
""" Higher order classes and functions for Libvirt Sandbox (lxc) container testing :copyright: 2013 Red Hat Inc. """ import datetime import time import logging import lvsb_base # This utility function lets test-modules quickly create a list of all # sandbox aggregate types, themselves containing a list of individual # sandboxes. def make_sandboxes(params, env, extra_ns=None): """ Return list of instantiated lvsb_testsandboxes classes from params :param params: an undiluted Params instance :param env: the current env instance :param extra_ns: An extra, optional namespace to search for classes """ namespace = globals() # stuff in this module # For specialized sandbox types, allow their class to be defined # inside test module or elsewhere. if extra_ns is not None: namespace.update(extra_ns) # copy in additional symbols names = namespace.keys() # Test may require more than one sandbox agregator class pobs = params.objects('lvsb_testsandboxes') # manditory parameter # filter out non-TestSandboxes subclasses for name in names: try: if not issubclass(namespace[name], lvsb_base.TestSandboxes): # Working on name list, okay to modify dict del namespace[name]
except TypeError: # Symbol wasn't a class, just ignore it pass # Return a list of instantiated sandbox_testsandboxes's classes return [namespace[type_name](params, env) for type_name in pobs] # TestSa
ndboxes subclasses defined below, or inside other namespaces like # a test module. They simply help the test-module iterate over many # aggregate manager classes and the sandboxes they contain. class TestSimpleSandboxes(lvsb_base.TestSandboxes): """ Simplistic sandbox aggregate manager that just executes a command """ def __init__(self, params, env): """ Initialize to run, all SandboxCommandBase's """ super(TestSimpleSandboxes, self).__init__(params, env) self.init_sandboxes() # create instances of SandboxCommandBase # Point all of them at the same local uri self.for_each(lambda sb: sb.add_optarg('-c', self.uri)) # Use each instances name() method to produce name argument self.for_each(lambda sb: sb.add_optarg('-n', sb.name)) # Command should follow after a -- self.for_each(lambda sb: sb.add_mm()) # Each one gets the same command (that's why it's simple) self.for_each(lambda sb: sb.add_pos(self.command)) def results(self, each_timeout=5): """ Run sandboxe(s), allowing each_timeout to complete, return output list """ # Sandboxes run asynchronously, prevent them from running forever start = datetime.datetime.now() total_timeout_seconds = each_timeout * self.count timeout_at = start + datetime.timedelta(seconds=total_timeout_seconds) # No need to write a method just to call the run method self.for_each(lambda sb: sb.run()) while datetime.datetime.now() < timeout_at: # Wait until number of running sandboxes is zero if bool(self.are_running()): time.sleep(0.1) # Don't busy-wait continue else: # none are running break # Needed for accurate time in logging message below end = datetime.datetime.now() # Needed for logging message if none exited before timeout still_running = self.are_running() # Cause all exited sessions to clean up when sb.stop() called self.for_each(lambda sb: sb.auto_clean(True)) # If raise, auto_clean will make sure cleanup happens if bool(still_running): raise lvsb_base.SandboxException("%d of %d sandboxes are still " "running after " "the timeout of %d seconds." % (still_running, self.count, total_timeout_seconds)) # Kill off all sandboxes, just to be safe self.for_each(lambda sb: sb.stop()) logging.info("%d sandboxe(s) finished in %s", self.count, end - start) # Return a list of stdout contents from each return self.for_each(lambda sb: sb.recv())
botify-labs/moto
tests/test_swf/responses/test_domains.py
Python
apache-2.0
3,834
0
import boto from boto.swf.exceptions import SWFResponseError import sure # noqa from moto import mock_swf_deprecated # RegisterDomain endpoint @mock_swf_deprecated def test_register_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain("test-domain", "60", description="A test domain") all_domains = conn.list_domains("REGISTERED") domain = all_domains["domainInfos"][0] domain["name"].should.equal("test-domain") domain["status"].should.equal("REGISTERED") domain["description"].should.equal("A test domain") @mock_swf_deprecated def test_register_already_existing_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain("test-domain", "60", description="A test domain") conn.register_domain.when.called_with( "test-domain", "60", description="A test domain" ).should.throw(SWFResponseError) @mock_swf_deprecated def test_register_with_wrong_parameter_type(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain.when.called_with( "test-domain", 60, description="A test domain" ).should.throw(SWFResponseError) # ListDomains endpoint @mock_swf_deprecated def test_list_domains_order(): conn = boto.connec
t_swf("the_key", "the_secret") conn.register_domain("b-test-domain", "60") conn.register_domain("a-test-domain", "60") conn.register_domain("c-test-domain", "60") all_domains = conn.list_d
omains("REGISTERED") names = [domain["name"] for domain in all_domains["domainInfos"]] names.should.equal(["a-test-domain", "b-test-domain", "c-test-domain"]) @mock_swf_deprecated def test_list_domains_reverse_order(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain("b-test-domain", "60") conn.register_domain("a-test-domain", "60") conn.register_domain("c-test-domain", "60") all_domains = conn.list_domains("REGISTERED", reverse_order=True) names = [domain["name"] for domain in all_domains["domainInfos"]] names.should.equal(["c-test-domain", "b-test-domain", "a-test-domain"]) # DeprecateDomain endpoint @mock_swf_deprecated def test_deprecate_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain("test-domain", "60", description="A test domain") conn.deprecate_domain("test-domain") all_domains = conn.list_domains("DEPRECATED") domain = all_domains["domainInfos"][0] domain["name"].should.equal("test-domain") @mock_swf_deprecated def test_deprecate_already_deprecated_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain("test-domain", "60", description="A test domain") conn.deprecate_domain("test-domain") conn.deprecate_domain.when.called_with( "test-domain" ).should.throw(SWFResponseError) @mock_swf_deprecated def test_deprecate_non_existent_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.deprecate_domain.when.called_with( "non-existent" ).should.throw(SWFResponseError) # DescribeDomain endpoint @mock_swf_deprecated def test_describe_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.register_domain("test-domain", "60", description="A test domain") domain = conn.describe_domain("test-domain") domain["configuration"][ "workflowExecutionRetentionPeriodInDays"].should.equal("60") domain["domainInfo"]["description"].should.equal("A test domain") domain["domainInfo"]["name"].should.equal("test-domain") domain["domainInfo"]["status"].should.equal("REGISTERED") @mock_swf_deprecated def test_describe_non_existent_domain(): conn = boto.connect_swf("the_key", "the_secret") conn.describe_domain.when.called_with( "non-existent" ).should.throw(SWFResponseError)
techiaith/seilwaith
srdk/htk/SRDK_Train.py
Python
apache-2.0
1,957
0.036791
#!/usr/bin/env python # -*- coding: utf-8 -*- import os, sys import traceback f
rom argparse import
ArgumentParser class SRDKRunError(Exception): def __init__(self, message): self.msg = message def run_commands(cmds): for cmd in cmds: cmd = u" ".join(cmd) print("Rhedeg %s" % cmd) returncode = os.system(cmd) try: if returncode != 0: exception_str = ["Problem yn rhedeg y gorchymyn:", " %s" % cmd] raise SRDKRunError(u"\n".join(exception_str)) except SRDKRunError, arg: print 'Exception:', arg.msg def train_singleuser(userid, **args): """Hyfforddi model acwstig HTK / Train HTK acoustic model""" srdk_cmds = [] print "SRDK_Train : %s" % userid if userid : srdk_cmds.append(["rm -rf results/" + userid]) srdk_cmds.append(["mkdir -p results/" + userid]) srdk_cmds.append(["SRDK_2_PronunciationDictionary"]) srdk_cmds.append(["SRDK_4_Transcriptions"]) if userid: srdk_cmds.append(["SRDK_5_CodingAudioData " + userid ]) else: srdk_cmds.append(["SRDK_5_CodingAudioData"]) srdk_cmds.append(["SRDK_6_FlatStart"]) srdk_cmds.append(["SRDK_7_SilenceModels"]) srdk_cmds.append(["SRDK_8_Realign"]) srdk_cmds.append(["SRDK_9_Triphones"]) srdk_cmds.append(["SRDK_10_TiedStateTriphones"]) srdk_cmds.append(["SRDK_11_TestModels"]) if userid: srdk_cmds.append(["cp recout.mlf results/" + userid]) #srdk_cmds.append(["SRDK_12_Release"]) run_commands(srdk_cmds) if __name__ == "__main__": parser = ArgumentParser(description="Sgript creu model acwstig gyda un gorchymun") parser.add_argument('-u', '--userid', dest="userid", required=False, help="userid cyfrannwr benodol") parser.set_defaults(func=train_singleuser) args=parser.parse_args() try: args.func(**vars(args)) except SRDKRunError as e: print ("\n**SRDK ERROR**\n") print (e)
n4hy/gnuradio
gr-uhd/apps/uhd_rx_cfile.py
Python
gpl-3.0
5,974
0.005524
#!/usr/bin/env python # # Copyright 2011 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # """ Read samples from a UHD device and write to file formatted as binary outputs single precision complex float values or complex short values (interleaved 16 bit signed short integers). """ from gnuradio import gr, eng_notation from gnuradio import uhd from gnuradio.eng_option import eng_option from optparse import OptionParser import sys n2s = eng_notation.num_to_str class rx_cfile_block(gr.top_block): def __init__(self, options, filename): gr.top_block.__init__(self) # Create a UHD device source if options.output_shorts: self._u = uhd.usrp_source(device_addr=options.args, stream_args=uhd.stream_args('sc16')) self._sink = gr.fil
e_sink(gr.sizeof_short*2, filename) else: self._u = uhd.usrp_source(device_addr=options.args, stream_args=uhd.stream_args('fc32')) self._sink = gr.file_sink(gr.sizeof_gr_complex, filename) # Set receiver sample rate self._u.set_samp_rate(options.samp_rate) # Set receive daughterboard gain
if options.gain is None: g = self._u.get_gain_range() options.gain = float(g.start()+g.stop())/2 print "Using mid-point gain of", options.gain, "(", g.start(), "-", g.stop(), ")" self._u.set_gain(options.gain) # Set the subdevice spec if(options.spec): self._u.set_subdev_spec(options.spec, 0) # Set the antenna if(options.antenna): self._u.set_antenna(options.antenna, 0) # Set frequency (tune request takes lo_offset) if(options.lo_offset is not None): treq = uhd.tune_request(options.freq, options.lo_offset) else: treq = uhd.tune_request(options.freq) tr = self._u.set_center_freq(treq) if tr == None: sys.stderr.write('Failed to set center frequency\n') raise SystemExit, 1 # Create head block if needed and wire it up if options.nsamples is None: self.connect(self._u, self._sink) else: if options.output_shorts: self._head = gr.head(gr.sizeof_short*2, int(options.nsamples)) else: self._head = gr.head(gr.sizeof_gr_complex, int(options.nsamples)) self.connect(self._u, self._head, self._sink) input_rate = self._u.get_samp_rate() if options.verbose: print "Args: ", options.args print "Rx gain:", options.gain print "Rx baseband frequency:", n2s(tr.actual_rf_freq) print "Rx DDC frequency:", n2s(tr.actual_dsp_freq) print "Rx Sample Rate:", n2s(input_rate) if options.nsamples is None: print "Receiving samples until Ctrl-C" else: print "Receving", n2s(options.nsamples), "samples" if options.output_shorts: print "Writing 16-bit complex shorts" else: print "Writing 32-bit complex floats" print "Output filename:", filename def get_options(): usage="%prog: [options] output_filename" parser = OptionParser(option_class=eng_option, usage=usage) parser.add_option("-a", "--args", type="string", default="", help="UHD device address args , [default=%default]") parser.add_option("", "--spec", type="string", default=None, help="Subdevice of UHD device where appropriate") parser.add_option("-A", "--antenna", type="string", default=None, help="select Rx Antenna where appropriate") parser.add_option("", "--samp-rate", type="eng_float", default=1e6, help="set sample rate (bandwidth) [default=%default]") parser.add_option("-f", "--freq", type="eng_float", default=None, help="set frequency to FREQ", metavar="FREQ") parser.add_option("-g", "--gain", type="eng_float", default=None, help="set gain in dB (default is midpoint)") parser.add_option( "-s","--output-shorts", action="store_true", default=False, help="output interleaved shorts instead of complex floats") parser.add_option("-N", "--nsamples", type="eng_float", default=None, help="number of samples to collect [default=+inf]") parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose output") parser.add_option("", "--lo-offset", type="eng_float", default=None, help="set daughterboard LO offset to OFFSET [default=hw default]") (options, args) = parser.parse_args () if len(args) != 1: parser.print_help() raise SystemExit, 1 if options.freq is None: parser.print_help() sys.stderr.write('You must specify the frequency with -f FREQ\n'); raise SystemExit, 1 return (options, args[0]) if __name__ == '__main__': (options, filename) = get_options() tb = rx_cfile_block(options, filename) try: tb.run() except KeyboardInterrupt: pass
dylanseago/LeagueOfLadders
leagueofladders/urls.py
Python
apache-2.0
420
0.004762
from django.conf.urls import patterns, include, url from django.contrib im
port admin from leagueofladders import settings urlpatterns = patterns('', url(r'^l/', include('leagueofladders.app
s.myleague.urls', namespace='myleague')), url(r'^admin/', include(admin.site.urls)), url(r'^%s$' % settings.LOGIN_URL[1:], 'django.contrib.auth.views.login'))
Azure/azure-sdk-for-python
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/_storage_management_client.py
Python
mit
63,288
0.006652
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING from azure.mgmt.core import ARMPipelineClie
nt from azure.profiles import KnownProfiles, ProfileDefinition from azure.profiles.multiapiclient import MultiApiClientMixin from msrest import Deserializer, Serializer from ._configuration import StorageManagementClientConfiguration if T
YPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Optional from azure.core.credentials import TokenCredential class _SDKClient(object): def __init__(self, *args, **kwargs): """This is a fake class to support current implemetation of MultiApiClientMixin." Will be removed in final version of multiapi azure-core based client """ pass class StorageManagementClient(MultiApiClientMixin, _SDKClient): """The Azure Storage Management API. This ready contains multiple API versions, to help you deal with all of the Azure clouds (Azure Stack, Azure Government, Azure China, etc.). By default, it uses the latest API version available on public Azure. For production, you should stick to a particular api-version and/or profile. The profile sets a mapping between an operation group and its API version. The api-version parameter sets the default API version if the operation group is not described in the profile. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. :type subscription_id: str :param api_version: API version to use if no profile is provided, or if missing in profile. :type api_version: str :param base_url: Service URL :type base_url: str :param profile: A profile definition, from KnownProfiles to dict. :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ DEFAULT_API_VERSION = '2021-08-01' _PROFILE_TAG = "azure.mgmt.storage.StorageManagementClient" LATEST_PROFILE = ProfileDefinition({ _PROFILE_TAG: { None: DEFAULT_API_VERSION, 'usage': '2018-02-01', }}, _PROFILE_TAG + " latest" ) def __init__( self, credential, # type: "TokenCredential" subscription_id, # type: str api_version=None, # type: Optional[str] base_url="https://management.azure.com", # type: str profile=KnownProfiles.default, # type: KnownProfiles **kwargs # type: Any ): self._config = StorageManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) super(StorageManagementClient, self).__init__( api_version=api_version, profile=profile ) @classmethod def _models_dict(cls, api_version): return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} @classmethod def models(cls, api_version=DEFAULT_API_VERSION): """Module depends on the API version: * 2015-06-15: :mod:`v2015_06_15.models<azure.mgmt.storage.v2015_06_15.models>` * 2016-01-01: :mod:`v2016_01_01.models<azure.mgmt.storage.v2016_01_01.models>` * 2016-12-01: :mod:`v2016_12_01.models<azure.mgmt.storage.v2016_12_01.models>` * 2017-06-01: :mod:`v2017_06_01.models<azure.mgmt.storage.v2017_06_01.models>` * 2017-10-01: :mod:`v2017_10_01.models<azure.mgmt.storage.v2017_10_01.models>` * 2018-02-01: :mod:`v2018_02_01.models<azure.mgmt.storage.v2018_02_01.models>` * 2018-03-01-preview: :mod:`v2018_03_01_preview.models<azure.mgmt.storage.v2018_03_01_preview.models>` * 2018-07-01: :mod:`v2018_07_01.models<azure.mgmt.storage.v2018_07_01.models>` * 2018-11-01: :mod:`v2018_11_01.models<azure.mgmt.storage.v2018_11_01.models>` * 2019-04-01: :mod:`v2019_04_01.models<azure.mgmt.storage.v2019_04_01.models>` * 2019-06-01: :mod:`v2019_06_01.models<azure.mgmt.storage.v2019_06_01.models>` * 2020-08-01-preview: :mod:`v2020_08_01_preview.models<azure.mgmt.storage.v2020_08_01_preview.models>` * 2021-01-01: :mod:`v2021_01_01.models<azure.mgmt.storage.v2021_01_01.models>` * 2021-02-01: :mod:`v2021_02_01.models<azure.mgmt.storage.v2021_02_01.models>` * 2021-04-01: :mod:`v2021_04_01.models<azure.mgmt.storage.v2021_04_01.models>` * 2021-06-01: :mod:`v2021_06_01.models<azure.mgmt.storage.v2021_06_01.models>` * 2021-08-01: :mod:`v2021_08_01.models<azure.mgmt.storage.v2021_08_01.models>` """ if api_version == '2015-06-15': from .v2015_06_15 import models return models elif api_version == '2016-01-01': from .v2016_01_01 import models return models elif api_version == '2016-12-01': from .v2016_12_01 import models return models elif api_version == '2017-06-01': from .v2017_06_01 import models return models elif api_version == '2017-10-01': from .v2017_10_01 import models return models elif api_version == '2018-02-01': from .v2018_02_01 import models return models elif api_version == '2018-03-01-preview': from .v2018_03_01_preview import models return models elif api_version == '2018-07-01': from .v2018_07_01 import models return models elif api_version == '2018-11-01': from .v2018_11_01 import models return models elif api_version == '2019-04-01': from .v2019_04_01 import models return models elif api_version == '2019-06-01': from .v2019_06_01 import models return models elif api_version == '2020-08-01-preview': from .v2020_08_01_preview import models return models elif api_version == '2021-01-01': from .v2021_01_01 import models return models elif api_version == '2021-02-01': from .v2021_02_01 import models return models elif api_version == '2021-04-01': from .v2021_04_01 import models return models elif api_version == '2021-06-01': from .v2021_06_01 import models return models elif api_version == '2021-08-01': from .v2021_08_01 import models return models raise ValueError("API version {} is not available".format(api_version)) @property def blob_containers(self): """Instance depends on the API version: * 2018-02-01: :class:`BlobContainersOperations<azure.mgmt.storage.v2018_02_01.operations.BlobContainersOperations>` * 2018-03-01-preview: :class:`BlobContainersOperations<azure.mgmt.storage.v2018_03_01_preview.operations.BlobContainersOperations>` * 2018-07-01: :class:`BlobContainersOperations<azure.mgmt.storage.v2018_07_01.operations.BlobContainersOperations>` * 2018-11-01: :class:`BlobContainersOperations<azure.mgmt.storage.v2018_11_01.operations.BlobContainersOperations>` * 2019-04-01: :class:`BlobContainersOperations<azure.mgmt.storage.v2019_04_01.operations.BlobContainersOperations>` * 2019-06-01: :class:`BlobContainersOperations<azure.mgmt.storage.v2019_06_01.operations.BlobContainersOperations
jdowner/uuid64
uuid64/__init__.py
Python
mit
23
0
from . uu
id64 impo
rt *
dleecefft/pcapstats
pbin/parseL4Info.py
Python
apache-2.0
5,615
0.018878
#!/usr/bin/env python # take a large pcap and dump the data into a CSV so it can be analysed by something like R. # # This version we want to know what the source IP is, what the protocol is and based on those # peices of info run a function to grab that data and write a line to a CSV file # # Ignore all traffic sourced from the self IP, pass self ip as on arg # # Parse HTTP data decoded by tshark into additional content. # # Prereqs: pyshark, http://kiminewt.github.io/pyshark/ import pyshark, sys, getopt from datetime import datetime # input and output files ifile='' ofile='' selfip='' # read command line args and bail if not complete if len(sys.argv) != 9: print("Usage: %s -i input.pcap -o output.csv -s 192.168.100.6 -l l4proto " % sys.argv[0]) exit() # Use getopt to avoid param order errors opts, args = getopt.getopt(sys.argv[1:],"i:o:s:l:") for o, a in opts: if o == '-i': ifile=a elif o == '-o': ofile=a elif o == '-s': selfip=a elif o == '-l': l4proto=a elif o == '-h': print("Usage: %s -i input.pcap -o output.csv -s 192.168.100.6 -l l4proto" % sys.argv[0]) else: print("Usage: %s -i input.pcap -o output.csv -s 192.168.100.6 -l l4proto" % sys.argv[0]) # Functions def evall4plist(plist): protolist=[] #plist = plist.strip() if plist.find(',')!=-1: protolist = l4proto.split(",") elif plist.find(' ')!=-1: protolist = l4proto.split(" ") else: protolist.append(plist) #print "Unexpected error, likely bad characters in list of ports :", sys.exc_info()[0] protolist= map(lambda x:x.lower(),protolist) return protolist def readpcap(pfile): return pyshark.FileCapture(pfile) def epochconv(tsstr): # convert the frame time into iso via epoch, clumsy but works better for excel # return list so we can have both in the CSV, epoch and friendly retlist=[] dtobj=datetime.fromtimestamp(float(tsstr)) retlist.append(str(dtobj).strip()) retlist.append(tsstr.strip()) return retlist def appendcsv(rlist): # convert ints and outputline = ",".join(map(str, rlist)) with open(ofile,"a") as outputfile: outputfile.write(outputline + "\n") return def tcpdecode(lyrlst,l4plist): if lyrlst._layer_name.lower() in l4plist : tmplist=[] tmpdict=lyrlst._all_fields for key in tmpdict: tmplist.append(tmpdict[key]) return "#".join(map(str,tmplist)) else: return def udpdecode(lyrlst, l4plist): if lyrlst._layer_name.lower() in l4plist: tmplist=[] tmpdict=lyrlst._all_fields for key in tmpdict: tmplist.append(tmpdict[key]) return "#".join(map(str,tmplist)) else: return def parseTCP(tpkt): #print "running parseTCP" if len(tpkt.layers) > 3: # pass to http module decoded = tcpdecode(tpkt.layers[3],thisproto) rowlist[8]= str(decoded) #rowlist[8]= str(tpkt.layers[3]).replace('\n','') # Complete this section regardless rowlist[3]= 6 rowlist[4]= str(tpkt.ip.src).strip() rowlist[5]= int(tpkt.tcp.dstport) rowlist[6]= int(tpkt.tcp.srcport) rowlist[7]= str(tpkt.tcp.flags).strip() tsstr=str(tpkt.frame_info.time_epoch) dtobj=datetime.fromtimestamp(float(tsstr)) rowlist[0]= dtobj.strftime("%Y%m%d") rowlist[1]= dtobj.strftime("%H:%M:%S.%f") rowlist[2]= tsstr return def parseICMP(ipkt): #print "running parseICMP" rowlist[3]= 1 rowlist[4]= str(ipkt.ip.src).strip() rowlist[5]= int(ipkt.icmp.type) rowlist[6]= int(ipkt.icmp.code) tsstr=str(ipkt.frame_info.time_epoch) dtobj=datetime.fromtimestamp(float(tsstr)) rowlist[0]= dtobj.strftime("%Y-%m-%d") rowlist[1]= dtobj.strftime("%H:%M:%S.%f") rowlist[2]= tsstr return def parseUDP(upkt): #print "running parseUDP" if len(upkt.layers) > 3: # pass to http module decoded = udpdecode(upkt.layers[3],thisproto) rowlist[8]= str(decoded) rowlist[3]= 17 rowlist[4]= str(upkt.ip.src).strip() rowlist[5]= int(upkt.udp.dstport) rowlist[6]= int(upkt.udp.srcport) tsstr=str(upkt.frame_info.time_epoch) dtobj=datetime.fromtim
estamp(float(tsstr)) rowlist[0]=
dtobj.strftime("%Y-%m-%d") rowlist[1]= dtobj.strftime("%H:%M:%S.%f") rowlist[2]= tsstr return def parseIPother(ipopkt): print "running parseIP Other " rowlist[3]= int(ipopkt.ip.proto) rowlist[4]= str(ipopkt.ip.src).strip() tsstr=str(ipopkt.frame_info.time_epoch) dtobj=datetime.fromtimestamp(float(tsstr)) rowlist[0]= dtobj.strftime("%Y-%m-%d") rowlist[1]= dtobj.strftime("%H:%M:%S.%f") rowlist[2]= tsstr return def protorouter(evalpkt): # direct if int(evalpkt.ip.proto) == 6: parseTCP(evalpkt) elif int(evalpkt.ip.proto) == 1: parseICMP(evalpkt) elif int(evalpkt.ip.proto) == 17: parseUDP(evalpkt) else: parseIPother(evalpkt) return def initrow(): # iso-tstamp Date, iso-tstamp Time, epoch-tstamp, proto, src-ip, dest port/type, flag/code, src port, payload decode rwlist = [str('iso-date'),str('iso-time'),str('epoch-tstamp'),int(6),str('1.2.3.4'),None,None,None,None] return rwlist # Main flow thiscap = readpcap(ifile) wrstat = True # cheat making a global rowlist=[] thisproto=evall4plist(l4proto) for pkt in thiscap: pktsrc = str(pkt.ip.src) if pktsrc != selfip: #reinit array rowlist = initrow() protorouter(pkt) appendcsv(rowlist)
kiyukuta/chainer
chainer/functions/activation/leaky_relu.py
Python
mit
2,382
0
from chainer import cuda from chainer i
mport function from chainer.utils imp
ort type_check def _kern(): return cuda.elementwise( 'T cond, T x, T slope', 'T y', 'y = cond >= 0 ? x : (T)(slope * x)', 'lrelu') class LeakyReLU(function.Function): """Leaky rectifier unit.""" def __init__(self, slope=0.2): self.slope = slope def check_type_forward(self, in_types): type_check.expect(in_types.size() == 1) x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward_cpu(self, x): y = x[0].copy() y[x[0] < 0] *= self.slope if self.slope >= 0: self.retain_inputs(()) self.retain_outputs((0,)) return y, def forward_gpu(self, x): y = _kern()(x[0], x[0], self.slope) if self.slope >= 0: self.retain_inputs(()) self.retain_outputs((0,)) return y, def backward_cpu(self, x, gy): gx = gy[0].copy() if self.slope >= 0: y = self.output_data gx[y[0] < 0] *= self.slope else: gx[x[0] < 0] *= self.slope return gx, def backward_gpu(self, x, gy): if self.slope >= 0: y = self.output_data gx = _kern()(y[0], gy[0], self.slope) else: gx = _kern()(x[0], gy[0], self.slope) return gx, def leaky_relu(x, slope=0.2): """Leaky Rectified Linear Unit function. This function is expressed as .. math:: f(x)=\\max(x, ax), where :math:`a` is a configurable slope value. Args: x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \ :class:`cupy.ndarray`): Input variable. A :math:`(s_1, s_2, ..., s_N)`-shaped float array. slope (float): Slope value :math:`a`. Returns: ~chainer.Variable: Output variable. A :math:`(s_1, s_2, ..., s_N)`-shaped float array. .. admonition:: Example >>> x = np.array([[-1, 0], [2, -3], [-2, 1]], 'f') >>> x array([[-1., 0.], [ 2., -3.], [-2., 1.]], dtype=float32) >>> F.leaky_relu(x, slope=0.2).data array([[-0.2 , 0. ], [ 2. , -0.60000002], [-0.40000001, 1. ]], dtype=float32) """ return LeakyReLU(slope)(x)
endlessm/chromium-browser
third_party/angle/third_party/vulkan-validation-layers/src/scripts/layer_chassis_dispatch_generator.py
Python
bsd-3-clause
99,103
0.005711
#!/usr/bin/python3 -i # # Copyright (c) 2015-2020 The Khronos Group Inc. # Copyright (c) 2015-2020 Valve Corporation # Copyright (c) 2015-2020 LunarG, Inc. # Copyright (c) 2015-2020 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: Tobin Ehlis <tobine@google.com> # Author: Mark Lobodzinski <mark@lunarg.com> import os,re,sys import xml.etree.ElementTree as etree from generator import * from collections import namedtuple from common_codegen import * # LayerChassisDispatchGeneratorOptions - subclass of GeneratorOptions. # # Adds options used by LayerChassisDispatchOutputGenerator objects during # layer chassis dispatch file generation. # # Additional members # prefixText - list of strings to prefix generated header with # (usually a copyright statement + calling convention macros). # protectFile - True if multiple inclusion protection should be # generated (based on the filename) around the entire header. # protectFeature - True if #ifndef..#endif protection should be # generated around a feature interface in the header file. # genFuncPointers - True if function pointer typedefs should be # generated # protectProto - If conditional protection should be generated # around prototype declarations, set to either '#ifdef' # to require opt-in (#ifdef protectProtoStr) or '#ifndef' # to require opt-out (#ifndef protectProtoStr). Otherwise # set to None. # protectProtoStr - #ifdef/#ifndef symbol to use around prototype # declarations, if protectProto is set # apicall - string to use for the function declaration prefix, # such as APICALL on Windows. # apientry - string to use for the calling convention macro, # in typedefs, such as APIENTRY. # apientryp - string to use for the calling convention macro # in function pointer typedefs, such as APIENTRYP. # indentFuncProto - True if prototype declarations should put each # parameter on a separate line # indentFuncPointer - True if typedefed function pointers should put each # parameter on a separate line # alignFuncParam - if nonzero and parameters are being put on a # separate line, align parameter names at the specified column class LayerChassisDispatchGeneratorOptions(GeneratorOptions): def __init__(self, conventions = None, filename = None, directory = '.', apiname = None, profile = None, versions = '.*', emitversions = '.*', defaultExtensions = None, addExtensions = None, removeExtensions = None, emitExtensions = None, sortProcedure = regSortFeatures, prefixText = "", genFuncPointers = True, protectFile = True, protectFeature = True, apicall = '', apientry = '', apientryp = '', indentFuncProto = True, indentFuncPointer = False, alignFuncParam = 0, expandEnumerants = True): GeneratorOptions.__init__(self, conventions, filename, directory, apiname, profile, versions, emitversions, defaultExtensions, addExtensions, removeExtensions, emitExtensions, sortProcedure) self.prefixText = prefixText self.genFuncPointers = genFuncPointers self.protectFile = protectFile self.protectFeature = protectFeature self.apicall = apicall self.apientry = apientry self.apientryp = apientryp self.indentFuncProto = indentFuncProto self.indentFuncPointer = indentFuncPointer self.alignFuncParam = alignFuncParam self.expandEnumerants = expandEnumerants # LayerChassisDispatchOutputGenerator - subclass of OutputGenerator. # Generates layer chassis non-dispatchable handle-wrapping code. # # ---- methods ---- # LayerChassisDispatchOutputGenerator(errFile, warnFile, diagFile) - args as for OutputGenerator. Defines additional internal state. # ---- methods overriding base class ---- # beginFile(genOpts) # endFile() # beginFeature(interface, emit) # endFeature() # genCmd(cmdinfo) # genStruct() # genType() class LayerChassisDispatchOutputGenerator(OutputGenerator): """Generate layer chassis handle wrapping code based on XML element attributes""" inline_copyright_message = """ // This file is ***GENERATED***. Do Not Edit. // See layer_chassis_dispatch_generator.py for modifications. /* Copyright (c) 2015-2020 The Khronos Group Inc. * Copyright (c) 2015-2020 Valve Corporation * Copyright (c) 2015-2020 LunarG, Inc. * Copyright (c) 2015-2020 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Author: Mark Lobodzinski <mark@lunarg.com> */""" inline_custom_source_preamble = """ #define DISPATCH_MAX_STACK_ALLOCATIONS 32 // The VK_EXT_pipeline_creation_feedback extension returns data from the driver -- we've created a copy of the pnext chain, so // copy the returned data to the caller before freeing the copy's data. void CopyCreatePipelineFeedbackData(const void *src_chain, const void *dst_chain) { auto src_feedback_struct = lvl_find_
in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(src_chain); if (!src_feedback_struct) return; auto dst_feedback_struct = const_cast<VkPipelineCreationFeedbackCreateInfoEXT *>( lvl_find_in_chain<VkPipelineCreationFeedbackCreateInfoEXT>(
dst_chain)); *dst_feedback_struct->pPipelineCreationFeedback = *src_feedback_struct->pPipelineCreationFeedback; for (uint32_t i = 0; i < src_feedback_struct->pipelineStageCreationFeedbackCount; i++) { dst_feedback_struct->pPipelineStageCreationFeedbacks[i] = src_feedback_struct->pPipelineStageCreationFeedbacks[i]; } } VkResult DispatchCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) { auto layer_data = GetLayerDataPtr(get_dispatch_key(device), layer_data_map); if (!wrap_handles) return layer_data->device_dispatch_table.CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); safe_VkGraphicsPipelineCreateInfo *local_pCreateInfos = nullptr; if (pCreateInfos) { local_pCreateInfos = new safe_VkGraphicsPipelineCreateInfo[createInfoCount]; read_lock_guard_t lock(dispatch_lock); for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) { bool uses_color_attachment = false; bool uses_depthstencil_attachment = false; { const auto subpasses_uses_it = layer_data->renderpasses_states.find(layer_data->Unwrap(pCreateInfos[idx0].renderPass)); if (subpas
joshuahoman/vivisect
envi/tests/msp430/iswpb.py
Python
apache-2.0
341
0.017595
from envi.archs.msp430.regs import *
checks = [ # SWPB ( 'DEC r15', { 'regs': [(REG_R15, 0xaabb)], 'flags': [(SR_N, 0), (SR_Z, 0), (SR_C, 0), (SR_V, 0)], 'code': "8f10", 'data': "" }, { 'regs': [(REG_R15, 0xbbaa)], 'flags': [(SR_N, 0), (SR_Z, 0), (SR_C, 0), (SR_V, 0)], 'code': "8f10", 'data': "" }
), ]
jeffw16/elephant
nlp/nlpserver.py
Python
mit
849
0.042403
import socket import nlp class NLPServer(object): def __init__(self, ip, port):
self.sock = socket.socket() self.sock.bind((ip, port)) self.processor = nlp.NLPProcessor() print "Established Server" def listen(self): import thread self.sock.listen(5) prin
t "Started listening at port." while True: c = self.sock.accept() cli_sock, cli_addr = c try: print 'Got connection from', cli_addr thread.start_new_thread(self.manageRequest, (cli_sock,)) except Exception, Argument: print Argument self.sock.close() quit() def manageRequest(self, cli_sock): data = cli_sock.recv(8192) result = self.processor.processQuestion(data) cli_sock.send(str(result)) cli_sock.close() # server = NLPServer('127.0.0.1', 3369) import sys server = NLPServer(str(sys.argv[1]), int(sys.argv[2])) server.listen()
NSLS-II-XPD/ipython_ophyd
archived/profile_collection-dev/startup/42-energy-calib.py
Python
bsd-2-clause
4,842
0.001239
from __future__ import division, print_function import numpy as np from lmfit.models import VoigtModel from scipy.signal import argrelmax import matplotlib.pyplot as plt def lamda_from_bragg(th, d, n): return 2 * d * np.sin(th / 2.) / n def find_peaks(chi, sides=6, intensity_threshold=0): # Find all potential peaks preliminary_peaks = argrelmax(chi, order=20)[0] # peaks must have at least sides pixels of data to work with preliminary_peaks2 = preliminary_peaks[ np.where(preliminary_peaks < len(chi) - sides)] # make certain that a peak has a drop off which causes the peak height to # be more than twice the height at sides pixels away criteria = chi[preliminary_peaks2] >= 2 * chi[preliminary_peaks2 + sides] criteria *= chi[preliminary_peaks2] >= 2 * chi[preliminary_peaks2 - sides] criteria *= chi[preliminary_peaks2] >= intensity_threshold peaks = preliminary_peaks[np.where(criteria)] left_idxs = peaks - sides right_idxs = peaks + sides peak_centers = peaks left_idxs[left_idxs < 0] = 0 right_idxs[right_idxs > len(chi)] = len(chi) return left_idxs, right_idxs, peak_centers def get_wavelength_from_std_tth(x, y, d_spacings, ns, plot=False): """ Return the wavelength from a two theta scan of a standard Parameters ---------- x: ndarray the two theta coordinates y: ndarray the detector intensity d_spacings: ndarray the dspacings of the standard ns: ndarray the multiplicity of the reflection plot: bool If true plot some of the intermediate data Returns ------- float: The average wavelength float: The standard deviation of the wavelength """ l, r, c = find_peaks(y) lmfit_centers = [] for lidx, ridx, peak_center in zip(l, r, c): mod = VoigtModel() pars = mod.guess(y[lidx: ridx], x=x[lidx: ridx]) out = mod.fit(y[lidx: ridx], pars, x=x[lidx: ridx]) lmfit_centers.append(out.values['center']) lmfit_centers = np.asarray(lmfit_centers) if plot: plt.plot(x, y) plt.plot(x[c], y[c], 'ro') plt.show() wavelengths = [] l_peaks = lmfit_centers[lm
fit_centers < 0.] r_peaks = lmfit_centers[lmfit_centers > 0.] for peak_set in [r_peaks, l_peak
s[::-1]]: for peak_center, d, n in zip(peak_set, d_spacings, ns): tth = np.deg2rad(np.abs(peak_center)) wavelengths.append(lamda_from_bragg(tth, d, n)) return np.average(wavelengths), np.std(wavelengths) from bluesky.callbacks import CollectThenCompute class ComputeWavelength(CollectThenCompute): """ Example ------- >>> cw = ComputeWavelgnth('tth_cal', 'some_detector', d_spacings, ns) >>> RE(scan(...), cw) """ CONVERSION_FACTOR = 12.3984 # keV-Angstroms def __init__(self, x_name, y_name, d_spacings, ns=None): self._descriptors = [] self._events = [] self.x_name = x_name self.y_name = y_name self.d_spacings = d_spacings self.wavelength = None self.wavelength_std = None if ns is None: self.ns = np.ones(self.d_spacings.shape) else: self.ns = ns @property def energy(self): if self.wavelength is None: return None else: return self.CONVERSION_FACTOR / self.wavelength def compute(self): x = [] y = [] for event in self._events: x.append(event['data'][self.x_name]) y.append(event['data'][self.y_name]) x = np.array(x) y = np.array(y) self.wavelength, self.wavelength_std = get_wavelength_from_std_tth(x, y, self.d_spacings, self.ns) print('wavelength', self.wavelength, '+-', self.wavelength_std) print('energy', self.energy) """ if __name__ == '__main__': import os calibration_file = os.path.join('../../data/LaB6_d.txt') # step 0 load data d_spacings = np.loadtxt(calibration_file) for data_file in ['../../data/Lab6_67p8.chi', '../../data/Lab6_67p6.chi']: a = np.loadtxt(data_file) wavechange = [] b = np.linspace(.1, 3, 100) for dx in b: x = a[:, 0] x = np.hstack((np.zeros(1), x)) x = np.hstack((-x[::-2], x)) y = a[:, 1] y = np.hstack((np.zeros(1), y)) y = np.hstack((y[::-1], y)) x = x[:] + dx y = y[:] wavechange.append(get_wavelength_from_std_tth(x, y, d_spacings, np.ones(d_spacings.shape), )[0]) plt.plot(b, wavechange) plt.show() """
feilongfl/micropython
tests/wipy/pin.py
Python
mit
4,685
0.007044
""" This test need a set of pins which can be set as inputs and have no external pull up or pull down connected. """ from machine import Pin import os mch = os.uname().machine if 'LaunchPad' in mch: pin_map = ['GP24', 'GP12', 'GP14', 'GP15', 'GP16', 'GP17', 'GP28', 'GP8', 'GP6', 'GP30', 'GP31', 'GP3', 'GP0', 'GP4', 'GP5'] max_af_idx = 15 elif 'WiPy' in mch: pin_map = ['GP23', 'GP24', 'GP12', 'GP13', 'GP14', 'GP9', 'GP17', 'GP28', 'GP22', 'GP8', 'GP30', 'GP31', 'GP0', 'GP4', 'GP5'] max_af_idx = 15 else: raise Exception('Board not supported!') def test_noinit(): for p in pin_map: pin = Pin(p) pin.value() def test_pin_read(pull): # enable the pull resistor on all pins, then read the value for p in pin_map: pin = Pin(p, mode=Pin.IN, pull=pull) for p in pin_map: print(pin()) def test_pin_af(): for p in pin_map: for af in Pin(p).alt_list(): if af[1] <= max_af_idx: Pin(p, mode=Pin.ALT, alt=af[1]) Pin(p, mode=Pin.ALT_OPEN_DRAIN, alt=af[1]) # test un-initialized pins test_noinit() # test with pull-up and pull-down test_pin_read(Pin.PULL_UP) test_pin_read(Pin.PULL_DOWN) # test all constructor combinations pin = Pin(pin_map[0]) pin = Pin(pin_map[0], mode=Pin.IN) pin = Pin(pin_map[0], mode=Pin.OUT) pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_DOWN) pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_UP) pin = Pin(pin_map[0], mode=Pin.OPEN_DRAIN, pull=Pin.PULL_UP) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_DOWN) pin = Pin(pin_map[0], mode=Pin.OUT, pull=None) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.MED_POWER) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER) pin = Pin(pin_map[0], mode=Pin.OUT, drive=pin.LOW_POWER) pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_DOWN) pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP) pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP) test_pin_af() # try the entire af range on all pins # test pin init and printing pin = Pin(pin_map[0]) pin.init(mode=Pin.IN) print(pin) pin.init(Pin.IN, Pin.PULL_DOWN) print(pin) pin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER) print(pin) pin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER) print(pin) # test value in OUT mode pin = Pin(pin_map[0], mode=Pin.OUT) pin.value(0) pin.toggle() # test toggle print(pin()) pin.toggle() # test toggle again print(pin()) # test different value settings pin(1) print(pin.value()) pin(0) print(pin.value()) pin.value(1) print(pin()) pin.value(0) print(pin()) # test all getters and setters pin = Pin(pin_map[0], m
ode=Pin.OUT) # mode print(pin.mode() == Pin.OUT) pin.mode(Pin.IN) print(pin.mode() == Pin.IN) # pull pin.pull(None) print(pin.pull() == None) pin.pull(Pin.PULL_DOWN) print(pin.pull() == Pin.PULL_DOWN) # drive pin.drive(Pin.MED_POWER) print(pin.drive() == Pin.MED_POWER) pin.drive(Pin.HIGH_POWER) print(pin.drive() == Pin
.HIGH_POWER) # id print(pin.id() == pin_map[0]) # all the next ones MUST raise try: pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.IN) # incorrect drive value except Exception: print('Exception') try: pin = Pin(pin_map[0], mode=Pin.LOW_POWER, pull=Pin.PULL_UP) # incorrect mode value except Exception: print('Exception') try: pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.HIGH_POWER) # incorrect pull value except Exception: print('Exception') try: pin = Pin('A0', Pin.OUT, Pin.PULL_DOWN) # incorrect pin id except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.IN, Pin.PULL_UP, alt=0) # af specified in GPIO mode except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_UP, alt=7) # af specified in GPIO mode except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP, alt=0) # incorrect af except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=-1) # incorrect af except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=16) # incorrect af except Exception: print('Exception') try: pin.mode(Pin.PULL_UP) # incorrect pin mode except Exception: print('Exception') try: pin.pull(Pin.OUT) # incorrect pull except Exception: print('Exception') try: pin.drive(Pin.IN) # incorrect drive strength except Exception: print('Exception') try: pin.id('ABC') # id cannot be set except Exception: print('Exception')
bronycub/sugarcub
sugarcub/celery.py
Python
gpl-3.0
576
0
from __future__ import absolute_import import os from celery import Celery # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sugarcub.settings') from django.conf import settings # noqa app = Celery('sugarcub')
# Using a string here means the worker will not have to # pickle the object
when using Windows. app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request))
kustodian/ansible
lib/ansible/modules/cloud/amazon/kinesis_stream.py
Python
gpl-3.0
46,551
0.001547
#!/usr/bin/python # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: kinesis_stream short_description: Manage a Kinesis Stream. description: - Create or Delete a Kinesis Stream. - Update the retention period of a Kinesis Stream. - Update Tags on a Kinesis Stream. - Enable/disable server side encryption on a Kinesis Stream. version_added: "2.2" requirements: [ boto3 ] author: Allen Sanabria (@linuxdynasty) options: name: description: - The name of the Kinesis Stream you are managing. required: true type: str sh
ards: description: - The number of shards you want to have with this stream. - This is required when I(state=present) type: int retention_period: description: - The length of time (in hours) data records are accessible after they are added to the stream. - The default retention period is 24 hours and can not be less than 24 hours. - The maximum retention period is 168 hours. - The retention period can be modified during any point in
time. type: int state: description: - Create or Delete the Kinesis Stream. default: present choices: [ 'present', 'absent' ] type: str wait: description: - Wait for operation to complete before returning. default: true type: bool wait_timeout: description: - How many seconds to wait for an operation to complete before timing out. default: 300 type: int tags: description: - "A dictionary of resource tags of the form: C({ tag1: value1, tag2: value2 })." aliases: [ "resource_tags" ] type: dict encryption_state: description: - Enable or Disable encryption on the Kinesis Stream. choices: [ 'enabled', 'disabled' ] version_added: "2.5" type: str encryption_type: description: - The type of encryption. - Defaults to C(KMS) choices: ['KMS', 'NONE'] version_added: "2.5" type: str key_id: description: - The GUID or alias for the KMS key. version_added: "2.5" type: str extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' # Note: These examples do not set authentication details, see the AWS Guide for details. # Basic creation example: - name: Set up Kinesis Stream with 10 shards and wait for the stream to become ACTIVE kinesis_stream: name: test-stream shards: 10 wait: yes wait_timeout: 600 register: test_stream # Basic creation example with tags: - name: Set up Kinesis Stream with 10 shards, tag the environment, and wait for the stream to become ACTIVE kinesis_stream: name: test-stream shards: 10 tags: Env: development wait: yes wait_timeout: 600 register: test_stream # Basic creation example with tags and increase the retention period from the default 24 hours to 48 hours: - name: Set up Kinesis Stream with 10 shards, tag the environment, increase the retention period and wait for the stream to become ACTIVE kinesis_stream: name: test-stream retention_period: 48 shards: 10 tags: Env: development wait: yes wait_timeout: 600 register: test_stream # Basic delete example: - name: Delete Kinesis Stream test-stream and wait for it to finish deleting. kinesis_stream: name: test-stream state: absent wait: yes wait_timeout: 600 register: test_stream # Basic enable encryption example: - name: Encrypt Kinesis Stream test-stream. kinesis_stream: name: test-stream state: present encryption_state: enabled encryption_type: KMS key_id: alias/aws/kinesis wait: yes wait_timeout: 600 register: test_stream # Basic disable encryption example: - name: Encrypt Kinesis Stream test-stream. kinesis_stream: name: test-stream state: present encryption_state: disabled encryption_type: KMS key_id: alias/aws/kinesis wait: yes wait_timeout: 600 register: test_stream ''' RETURN = ''' stream_name: description: The name of the Kinesis Stream. returned: when state == present. type: str sample: "test-stream" stream_arn: description: The amazon resource identifier returned: when state == present. type: str sample: "arn:aws:kinesis:east-side:123456789:stream/test-stream" stream_status: description: The current state of the Kinesis Stream. returned: when state == present. type: str sample: "ACTIVE" retention_period_hours: description: Number of hours messages will be kept for a Kinesis Stream. returned: when state == present. type: int sample: 24 tags: description: Dictionary containing all the tags associated with the Kinesis stream. returned: when state == present. type: dict sample: { "Name": "Splunk", "Env": "development" } ''' import re import datetime import time from functools import reduce try: import botocore.exceptions except ImportError: pass # Taken care of by ec2.HAS_BOTO3 from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import HAS_BOTO3, boto3_conn, ec2_argument_spec, get_aws_connection_info from ansible.module_utils._text import to_native def convert_to_lower(data): """Convert all uppercase keys in dict with lowercase_ Args: data (dict): Dictionary with keys that have upper cases in them Example.. FooBar == foo_bar if a val is of type datetime.datetime, it will be converted to the ISO 8601 Basic Usage: >>> test = {'FooBar': []} >>> test = convert_to_lower(test) { 'foo_bar': [] } Returns: Dictionary """ results = dict() if isinstance(data, dict): for key, val in data.items(): key = re.sub(r'(([A-Z]{1,3}){1})', r'_\1', key).lower() if key[0] == '_': key = key[1:] if isinstance(val, datetime.datetime): results[key] = val.isoformat() elif isinstance(val, dict): results[key] = convert_to_lower(val) elif isinstance(val, list): converted = list() for item in val: converted.append(convert_to_lower(item)) results[key] = converted else: results[key] = val return results def make_tags_in_proper_format(tags): """Take a dictionary of tags and convert them into the AWS Tags format. Args: tags (list): The tags you want applied. Basic Usage: >>> tags = [{'Key': 'env', 'Value': 'development'}] >>> make_tags_in_proper_format(tags) { "env": "development", } Returns: Dict """ formatted_tags = dict() for tag in tags: formatted_tags[tag.get('Key')] = tag.get('Value') return formatted_tags def make_tags_in_aws_format(tags): """Take a dictionary of tags and convert them into the AWS Tags format. Args: tags (dict): The tags you want applied. Basic Usage: >>> tags = {'env': 'development', 'service': 'web'} >>> make_tags_in_proper_format(tags) [ { "Value": "web", "Key": "service" }, { "Value": "development", "key": "env" } ] Returns: List """ formatted_tags = list() for key, val in tags.items(): formatted_tags.append({ 'Key': key, 'Value': val }) return formatted_tags def get_tags(client, stream_name, check_mode=False): """Retrieve the tags for a Kinesis Stream. Args: client (botocore.client.EC2): Boto3 client. stream_name (str): Name of the Kinesis stream. Kwargs: check_mode
IlyaDjurin/django-shop
shop/migrations/0018_auto_20170327_1937.py
Python
mit
3,554
0.0039
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-03-27 16:37 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('shop', '0017_auto_20170327_1934'), ] operations = [ migrations.AlterField( model_name='tovar', name='tovar_image', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image1', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара1'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image10', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара10'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image11', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара11'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image2', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара2'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image3', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара3'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image4', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара4'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image5', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара5'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image6', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара6'), ), migrations.AlterField( model_name='tovar_img', name='tovar_image7', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара7'),
), migrations.AlterField( model_name='tovar_img', name='tovar_image8', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара8'), ),
migrations.AlterField( model_name='tovar_img', name='tovar_image9', field=models.ImageField(blank=True, upload_to='media/products/%Y/%m/%d/', verbose_name='Изображение товара9'), ), ]
DavidJohnGee/clicrud
clicrud/crud/__init__.py
Python
apache-2.0
3,616
0.000553
""" Co
pyright 2015 Brocade Communications Systems, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIN
D, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging import time import json import sys from clicrud.device.generic import generic def read(queue, finq, ranonceq, **kwargs): _cli_input = "['command', 'commands', 'listofcommands']" _command_list = [] _kwargs = {} _kwargs = kwargs _output_dict = {} # _ranonce = False for key in _kwargs: if key in _cli_input: if key == 'command': _command_list.append(_kwargs.get(key)) if key == 'commands': for key1 in _kwargs.get('commands'): _command_list.append(key1) if key == 'listofcommands': try: _command_file = open(_kwargs.get('listofcommands'), 'r') _output = _command_file.readlines() _command_file.close() for line in _output: line = line.translate(None, '\r\n') _command_list.append(line) except: logging.error("Could not open 'listofcommands' file") # Build transport _transport = generic(**_kwargs) if _transport.err: finq.put('error') _transport.close() return # Now we want to call each command and put the string output in a list for index, command in enumerate(_command_list): _output_dict[command] = _transport.read(command, return_type='string') if _kwargs['setup']._splash is True: sys.stdout.write("\r[%4s/%4s] Complete - " % (len(_command_list), index+1) + time.strftime("%d-%m-%Y") + time.strftime("-%H:%M:%S")) sys.stdout.flush() # PEP8 Fix # if _kwargs.has_key('delay'): if "delay" in _kwargs: time.sleep(_kwargs['delay']) # Sets the ranonce bool if triggered once # if not _ranonce: # _ranonce = True # ranonceq.put(True) ranonceq.put(True) queue.put(_output_dict) # If we need to output to a file, let's do that. # PEP8 Fix # if _kwargs.has_key('fileoutput'): if "fileoutput" in _kwargs: # Create a filename on hostname+date # Output the _output_dict to it in the right format _filename = _transport.hostname _filename += time.strftime("%d-%m-%Y-") + time.strftime("-%H-%M-%S") try: f = open(_filename, 'w') if _kwargs.get('fileformat') == 'json': f.write(json.dumps(_output_dict)) if _kwargs.get('fileformat') == 'string': for command in _command_list: f.write("COMMAND: " + command + "--------------------\r\n") f.write(_output_dict.get(command) + "\r\n\r\n") f.close() except: logging.error("Could not open/create file for output of commands") finq.put('completed_run') _transport.close() # print _command_list
iDigBio/idb-backend
idb/data_api/v1.py
Python
gpl-3.0
4,235
0.012043
from __future__ import division, absolute_import, print_function from flask import current_app, Blueprint, jsonify, url_for, request from idb.helpers.cors import crossdomain from .common import json_error, idbmodel, logger this_version = Blueprint(__name__,__name__) def format_list_item(t,uuid,etag,modified,version,parent): links = {} if t in current_app.config["PARENT_MAP"] and parent is not None: links["".join(current_app.config["PARENT_MAP"][t][:-1])] = url_for(".item",t=current_app.config["PARENT_MAP"][t],u=parent,_external=True) links["".join(t[:-1])] = url_for(".item",t=t,u=uuid,_external=True) return { "idigbio:uuid": uuid, "idigbio:etag": etag, "idigbio:dateModified": modified.isoformat(), "idigbio:version": version, "idigbio:links": links, } def format_item(t,uuid,etag,modified,version,parent,data,siblings,ids): r = format_list_item(t,uuid,etag,modified,version,parent) del r["idigbio:links"]["".join(t[:-1])] for l in r["idigbio:links"]: r["idigbio:links"][l] = [r["idigbio:links"][l]] l = {} if siblings is not None: for k in siblings: l[k] = [] for i in siblings[k]: l[k].append(url_for(".item",t=k,u=i,_external=True)) r["idigbio:data"] = data r["idigbio:links"].update(l) r["idigbio:recordIds"] = ids return r @this_version.route('/<string:t>/<uuid:u>/<string:st>', methods=['GET','OPTIONS']) @crossdomain(origin="*") def subitem(t,u,st): if not (t in current_app.config["SUPPORTED_TYPES"] and st in current_app.config["SUPPORTED_TYPES"]): return json_error(404) limit = request.args.get("limit") if limit is not None: limit = int(limit) else: limit = 100 offset = request.args.get("offset") if offset is not None: offset = int(offset) else: offset = 0 r = {} l = [ format_list_item( st, v["uuid"], v["etag"], v["modified"], v["version"], v["parent"], ) for v in idbmodel.get_children_list(str(u), "".join(st[:-1]),limit=limit,offset=offset) ] r["idigbio:items"] = l r["idigbio:itemCount"] = idbmodel.get_children_count(str(u), "".join(st[:-1])) return jsonify(r) @this_version.route('/<string:t>/<uuid:u>', methods=['GET','OPTIONS']) @crossdomain(origin="*") def item(t,u): if t not in current_app.config["SUPPORTED_TYPES"]: return json_error(404) version = request.args.get("version") v = idbmodel.get_item(str(u), version=version) if v is not None: if v["data"] is None: return json_error(500) if v["type"] + "s" == t: r = format_item( t, v["uuid"], v["etag"], v["modified"], v["version"], v["parent"], v["data"], v["siblings"], v["recordids"] )
return jsonify(r) else: return json_error(404) else: return json_error(404) @this_version.route('/<string:t>', methods=['GET','OPTIONS']) @crossdomain(origin="*") def list(t): if t not in current_app.config["SUPPORTED_TYPES"]: return json_error(404) limit = request.args.get("limit") if limit is not None: limit = int(limit) else: limit = 100 offset = request.args.get("offset") if offse
t is not None: offset = int(offset) else: offset = 0 r = {} l = [ format_list_item( t, v["uuid"], v["etag"], v["modified"], v["version"], v["parent"], ) for v in idbmodel.get_type_list("".join(t[:-1]),limit=limit,offset=offset) ] r["idigbio:items"] = l r["idigbio:itemCount"] = idbmodel.get_type_count("".join(t[:-1])) return jsonify(r) @this_version.route('/', methods=['GET','OPTIONS']) @crossdomain(origin="*") def index(): r = {} for t in current_app.config["SUPPORTED_TYPES"]: r[t] = url_for(".list",t=t,_external=True) return jsonify(r)
tensorflow/tensorflow
tensorflow/python/client/timeline.py
Python
apache-2.0
28,612
0.005033
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Timeline visualization for TensorFlow using Chrome Trace Format.""" import collections import copy import json import re # The timeline target is usually imported as part of BUILD target # "platform_test", which includes also includes the "platform" # dependency. This is why the logging import here is okay. from tensorflow.python.platform import build_info from tensorflow.python.platform import tf_logging as logging class AllocationMaximum(collections.namedtuple( 'AllocationMaximum', ('timestamp', 'num_bytes', 'tensors'))): """Stores the maximum allocation for a given allocator within the timelne. Parameters: timestamp: `tensorflow::Env::NowMicros()` when this maximum was reached. num_bytes: the total memory used at this time. tensors: the set of tensors allocated at this time. """ pass class StepStatsAnalysis(collections.namedtuple( 'StepStatsAnalysis', ('chrome_trace', 'allocator_maximums'))): """Stores the step stats analysis output. Parameters: chrome_trace: A dict containing the chrome trace analysis. allocator_maximums: A dict mapping allocator names to AllocationMaximum. """ pass class _ChromeTraceFormatter(object): """A helper class for generating traces in Chrome Trace Format.""" def __init__(self, show_memory=False): """Constructs a new Chrome Trace formatter.""" self._show_memory = show_memory self._events = [] self._metadata = [] def _create_event(self, ph, category, name, pid, tid, timestamp): """Creates a new Chrome Trace event. For details of the file format, see: https://github.com/catapult-project/catapult/blob/master/tracing/README.md Args: ph: The type of event - usually a single character. category: The event category as a string. name: The event name as a string. pid: Identifier of the process generating this event as an integer. tid: Identifier of the thread generating this event as an integer. timestamp: The timestamp of this event as a long integer. Returns: A JSON compatible event object. """ event = {} event['ph'] = ph event['cat'] = category event['name'] = name event['pid'] = pid event['tid'] = tid event['ts'] = timestamp return event def emit_pid(self, name, pid): """Adds a process metadata event to the trace. Args: name: The process name as a string. pid: Identifier of the process as an integer. """ event = {} event['name'] = 'process_name' event['ph'] = 'M' event['pid'] = pid event['args'] = {'name': name} self._metadata.append(event) def emit_tid(self, name, pid, tid): """Adds a thread metadata event to the trace. Args: name: The thread name as a string. pid: Identifier of the process as an integer. tid: Identifier of the thread as an integer. """ event = {} event['name'] = 'thread_name' event['ph'] = 'M' event['pid'] = pid event['tid'] = tid event['args'] = {'name': name} self._metadata.append(event) def emit_region(self, timestamp, duration, pid, tid, category, name, args): """Adds a region event to the trace. Args: timestamp: The start timestamp of this region as a long integer. duration: The duration of this region as a long integer. pid: Identifier of the process generating this event as an integer. tid: Identifier of the thread generating this event as an integer. category: The event category as a string. name: The event name as a string. args: A JSON-compatible dictionary of event arguments. """ event = self._create_event('X', category, name, pid, tid, timestamp) event['dur'] = duration event['args'] = args self._events.append(event) def emit_obj_create(self, category, name, timestamp, pid, tid, object_id): """Adds an object creation event to the trace. Args: category: The event category as a string. name: The event name as a string. timestamp: The timestamp of this event as a long integer. pid: Identifier of the process generating this event as an integer. tid: Identifier of the thread generating this event as an integer. object_id: Identifier of the object as an integer. """ event = self._create_event('N', category, name, pid, tid, timestamp) event['id'] = object_id self._events.append(event) def emit_obj_delete(self, category, name, timestamp, pid, tid, object_id): """Adds an object deletion event to the trace. Args: category: The event category as a string. name: The event name as a string. timestamp: The timestamp of this event as a long integer. pid: Identifier of the process generating this event as an integer. tid: Identifier of the thread generating this event as an integer. object_id: Identifier of the object as an integer. """ event = self._create_event('D', category, name, pid, tid, timestamp) event['id'] = object_id self._events.append(event) def emit_obj_snapshot(self, category, name, timestamp, pid, tid, object_id, snapshot): """Adds an object snapshot event to the trace. Args: category: The event category as a string. name: The event name as a string. timestamp: The timestamp of this event as a long integer. pid: Identifier of the process generating this event as an integer. tid: Identifier of the thread generating this event as an integer. object_id: Identifier of the object as an integer. snapshot: A JSON-compatible representation of the object. """ event = self._create_event('O', category, name, pid, tid, timestamp) event['id'] = object_id event['args'] = {'snapshot': snapshot} self._events.append(event) def emit_flow_start(self, name, timestamp, pid, tid, flow_id): """Adds a flow start event to the trace. When matched with a flow end event (with the same 'flow_id') this will cause the trace viewer to draw an arrow between the start and end events. Args: name: The event name as a string. timestamp: The timestamp of this event as a long integer. pid: Identifier of the process gen
erating this event as an integer. tid: Identifier of the thread generating this event as an integer. flow_id: Identifier of the flow a
s an integer. """ event = self._create_event('s', 'DataFlow', name, pid, tid, timestamp) event['id'] = flow_id self._events.append(event) def emit_flow_end(self, name, timestamp, pid, tid, flow_id): """Adds a flow end event to the trace. When matched with a flow start event (with the same 'flow_id') this will cause the trace viewer to draw an arrow between the start and end events. Args: name: The event name as a string. timestamp: The timestamp of this event as a long integer. pid: Identifier of the process generating this event as an integer. tid: Identifier of the thread generating this event as an integer. flow_id: Identifier of the flow as an integer. """ event = self._create_event('t', 'DataFlow', name, pid, tid, timestamp) event['id'] = flow_id self._events.append(event) def emit_counter(self, category, name, pid, timestamp, counter, value): """Emits a record for a single counter. Args: category: The event cat
piotrlewalski/birdstorm
game/apps/core/models/buildings.py
Python
mit
2,036
0.002947
import blinker from concurrency.fields import IntegerVersionField from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch.dispatcher import receiver from game.apps.core.models.planet.models import Planet from game.utils.models import ResourceContainer from game.utils.polymorph import PolymorphicBase from jsonfield import JSONField import game.apps.core.signals class Building(PolymorphicBase): level = models.IntegerField(default=1) data = JSONField(default={}) planet = models.ForeignKey(Planet, related_name="buildings") version = IntegerVersionField() user = models.ForeignKey(User, related_name="buildings") def save(self, *args, **kwargs): signal = blinker.signal(game.apps.core.signals.building % self.id) signal.send(self, building=self) super().save(*args, **kwargs) class Meta: app_label = 'core' ordering = ('id', ) class Citadel(Building): class Meta: proxy = True def process_turn(self): warehouse = self.owner.buildings.filter(type='Warehouse') warehouse.add_resource("Aluminium", 10) warehouse.add_resource("Steel", 10) warehouse.save() class Warehouse(Building, ResourceContainer): class Meta: proxy = True class Terminal(Building): class Meta: proxy = True class Mine(Building): class Meta: proxy = True #TODO use Django ready() @receiver(post_save, sender=User, dispatch_uid="create_default_buildings") def create_default_buildings(sender, **kwargs): if kwargs['crea
ted']: Citadel.objects.create(user=kwargs['instance'], planet_id=1) # TODO don't hard-code planet id Warehouse.objects.create(user=kwargs['instance'], planet_id=1) # TODO don't hard-code planet id def get_base(self): #TODO cache return self.buildings.get(type="Base") User.base = property(ge
t_base)
MSHallOpenSoft/plotter
Thesidetab/sliderder.py
Python
gpl-2.0
2,815
0.001421
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'slidersd.ui' # # Created: Tue Mar 17 23:31:52 2015 # by: PyQt4 UI code generator 4.10.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Form(object): def setupUi(self, Form): Form.setObjectName(_fromUtf8("Form")) Form.resize(392, 74) self.verticalLayout = QtGui.QVBoxLayout(Form) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.horizontalLayout_2 = QtGui.QHBoxLayout() self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2")) self.label = QtGui.QLabel(Form) self.label.setObjectName(_fromUtf8("label")) self.horizontalLayout_2.addWidget(self.label) self.value = QtGui.QLabel(Form) self.value.setText(_fromUtf8("")) self.value.setObjectName(_fromUtf8("value")) self.horizontalLayout_2.addWidget(self.value) self.verticalLayout.addLayout(self.horizontalLayout_2) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setSizeConstraint(QtGui.QLayout.SetFixedSize) self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) spacerItem = QtGui.QSpacerItem(10, 20, QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.label_3 = QtGui.QLabel(Form) self.label_3.setObjectName(_fromUtf8("label_3")) self.horizontalLayout.addWidget(self.label_3) self.horizontalSlider = QtGui.QSlider(Form) self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal) self.horizontalSlider.setObjectName(_fromUtf8("horizontalSlider")) self.horizontalLayout.addWidget(self.horizontalSlider) self.label_4 = QtGui.QLabel(Form) self.label_4.setObjectName(_fromUtf8("label_4")) self.horizontalLayout.addWidget(self.label_4) self.verticalLayout.addLayout(self.horizontalLayout) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retra
nslateUi(self, Form): Form.setWindowTitle(_translate("Form", "Form", None)) self.label.setText(_translate("Form", "a = ", None)) self.label_3.setText(_translate("Form", "-10", None)) self.label_4.setText(_translate("Form", "10", None))
PetePriority/home-assistant
homeassistant/components/modbus/climate.py
Python
apache-2.0
5,304
0
""" Platform for a Generic Modbus Thermostat. This uses a setpoint and process value within the controller, so both the current temperature register and the target temperature register need to be configured. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/climate.modbus/ """ import logging import struct import voluptuous as vol from homeassistant.const import ( CONF_NAME, CONF_SLAVE, ATTR_TEMPERATURE) from homeassistant.components.climate import ( ClimateDevice, PLATFORM_SCHEMA, SUPPORT_TARGET_TEMPERATURE) from homeassistant.components import modbus import homeassistant.helpers.config_validation as cv DEPENDENCIES = ['modbus'] # Parameters not defined by homeassistant.const CONF_TARGET_TEMP = 'target_temp_register' CONF_CURRENT_TEMP = 'current_temp_register' CONF_DATA_TYPE = 'data_type' CONF_COUNT = 'data_count' CONF_PRECISION = 'precision' DATA_TYPE_INT = 'int' DATA_TYPE_UINT = 'uint' DATA_TYPE_FLOAT = 'float' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_NAME): cv.string, vol.Required(CONF_SLAVE): cv.positive_int, vol.Required(CONF_TARGET_TEMP): cv.positive_int, vol.Required(CONF_CURRENT_TEMP): cv.positive_int, vol.Optional(CONF_DATA_TYPE, default=DATA_TYPE_FLOAT): vol.In([DATA_TYPE_INT, DATA_TYPE_UINT, DATA_TYPE_FLOAT]), vol.Optional(CONF_COUNT, default=2): cv.positive_int, vol.Optional(CONF_PRECISION, default=1): cv.positive_int }) _LOGGER = logging.getLogger(__name__) SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Modbus Thermostat Platform.""" name = config.get(CONF_NAME) modbus_slave = config.get(CONF_SLAVE) target_temp_register = config.get(CONF_TARGET_TEMP) current_temp_register = config.get(CONF_CURRENT_TEMP) data_type = config.get(CONF_DATA_TYPE) count = config.get(CONF_COUNT) precision = config.get(CONF_PRECISION) add_entities([ModbusThermostat(name, modbus_slave, target_temp_register, current_temp_register, data_type, count, precision)], True) class ModbusThermostat(ClimateDevice): """Representation of a Modbus Thermostat.""" def __init__(self, name, modbus_slave, target_temp_register, current_temp_register, data_type, count, precision): """Initialize the unit.""" self._name = name self._slave = modbus_slave self._target_temperature_register = target_temp_register self._current_temperature_register = current_temp_register self._target_temperature = None self._current_temperature = None self._data_type = data_type self._count = int(count) self._precision = precision self._structure = '>f' data_types = {DATA_TYPE_INT: {1: 'h', 2: 'i', 4: 'q'}, DATA_TYPE_UINT: {1: 'H', 2: 'I', 4: 'Q'}, DATA_TYPE_FLOAT: {1: 'e', 2: 'f', 4: 'd'}} self._structure = '>{}'.format(data_types[self._data_type]
[self._count]) @property def supported_features(self): """Return the list of supported fea
tures.""" return SUPPORT_FLAGS def update(self): """Update Target & Current Temperature.""" self._target_temperature = self.read_register( self._target_temperature_register) self._current_temperature = self.read_register( self._current_temperature_register) @property def name(self): """Return the name of the climate device.""" return self._name @property def current_temperature(self): """Return the current temperature.""" return self._current_temperature @property def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature def set_temperature(self, **kwargs): """Set new target temperature.""" target_temperature = kwargs.get(ATTR_TEMPERATURE) if target_temperature is None: return byte_string = struct.pack(self._structure, target_temperature) register_value = struct.unpack('>h', byte_string[0:2])[0] try: self.write_register(self._target_temperature_register, register_value) except AttributeError as ex: _LOGGER.error(ex) def read_register(self, register): """Read holding register using the modbus hub slave.""" try: result = modbus.HUB.read_holding_registers(self._slave, register, self._count) except AttributeError as ex: _LOGGER.error(ex) byte_string = b''.join( [x.to_bytes(2, byteorder='big') for x in result.registers]) val = struct.unpack(self._structure, byte_string)[0] register_value = format(val, '.{}f'.format(self._precision)) return register_value def write_register(self, register, value): """Write register using the modbus hub slave.""" modbus.HUB.write_registers(self._slave, register, [value, 0])
kpech21/Greek-Stemmer
tests/closets/test_word_exceptions.py
Python
lgpl-3.0
157
0
#
-*- coding: utf-8 -*- from greek_stemmer.closets.word_exceptions import exceptions def test_word_exceptions(): assert isinstance(except
ions, dict)
CitrineInformatics/pif-dft
dfttopif/parsers/vasp.py
Python
apache-2.0
16,836
0.00689
from pypif.obj import Property, Scalar from .base import DFTParser, Value_if_true, InvalidIngesterException import os import re from ase.io.vasp import read_vasp_out from pypif.obj import Value, FileReference from dftparse.vasp.outcar_parser import OutcarParser from dftparse.vasp.eigenval_parser import EigenvalParser class VaspParser(DFTParser): ''' Parser for VASP calculations ''' def __init__(self, files): super(VaspParser, self).__init__(files) self.settings = {} parser = OutcarParser() # Find the outcar file def _find_file(name): """Find a filename that contains a certain string""" name = name.upper() my_file = None for f in self._files: if os.path.basename(f).upper().startswith(name): if my_file is not None: raise InvalidIngesterException('Found more than one {} file'.format(name)) my_file = f return my_file self.outcar = _find_file('OUTCAR') if self.outcar is None: raise InvalidIngesterException('OUTCAR not found!') with open(self.outcar, "r") as fr: for parsed_line in parser.parse(fr.readlines()): for k, v in parsed_line.items(): if k in self.settings: self.settings[k].append(v) else: self.settings[k] = [v] # Find the DOSCAR, EIGENVAL, and INCAR files # None of these are required so we do not throw exceptions self.incar = _find_file('INCAR') self.poscar = _find_file('POSCAR') self.doscar = _find_file('DOSCAR') self.eignval = _find_file('EIGNVAL') def get_name(self): return "VASP" def get_output_structure(self): self.atoms = read_vasp_out(self.outcar) return self.atoms def get_outcar(self): raw_path = self.outcar if raw_path[0:2] == "./": raw_path = raw_path[2:] return Property(files=[FileReference( relative_path=raw_path )]) def get_incar(self): if self.incar is None: return None raw_path = self.incar if raw_path[0:2] == "./": raw_path = raw_path[2:] return Value(files=[FileReference( relative_path=raw_path )]) def get_poscar(self): if self.poscar is None: return None raw_path = self.poscar if raw_path[0:2] == "./": raw_path = raw_path[2:] return Value(files=[FileReference( relative_path=raw_path )]) def get_cutoff_energy(self): # Open up the OUTCAR with open(self.outcar, 'r') as fp: # Look for ENCUT for line in fp: if "ENCUT" in line: words = line.split() return Value(scalars=[Scalar(value=float(words[2]))], units=words[3])
# Error handling: ENCUT not found raise Exception('ENCUT not found') @Value_if_true def uses_SOC(self): # Open up the OUTCAR with open(self.outcar) a
s fp: #look for LSORBIT for line in fp: if "LSORBIT" in line: words = line.split() return words[2] == 'T' # Error handling: LSORBIT not found raise Exception('LSORBIT not found') @Value_if_true def is_relaxed(self): # Open up the OUTCAR with open(self.outcar) as fp: # Look for NSW for line in fp: if "NSW" in line: words = line.split() return int(words[2]) != 0 # Error handling: NSW not found raise Exception('NSW not found') def get_xc_functional(self): # Open up the OUTCAR with open(self.outcar) as fp: # Look for TITEL for line in fp: if "TITEL" in line: words = line.split() return Value(scalars=[Scalar(value=words[2])]) def get_pp_name(self): # Open up the OUTCAR with open(self.outcar) as fp: #initialize empty list to store pseudopotentials pp = [] # Look for TITEL for line in fp: if "TITEL" in line: words = line.split() pp.append(words[3]) return Value(vectors=[[Scalar(value=x) for x in pp]]) def get_KPPRA(self): # Open up the OUTCAR with open(self.outcar) as fp: #store the number of atoms and number of irreducible K-points for line in fp: if "number of ions NIONS =" in line: words = line.split() NI = int(words[11]) elif "k-points NKPTS =" in line: words = line.split() NIRK = float(words[3]) #check if the number of k-points was reduced by VASP if so, sum all the k-points weight if "irreducible" in open(self.outcar).read(): fp.seek(0) for line in fp: #sum all the k-points weight if "Coordinates Weight" in line: NK=0; counter = 0 for line in fp: if counter == NIRK: break NK += float(line.split()[3]) counter += 1 return Value(scalars=[Scalar(value=NI*NK)]) #if k-points were not reduced KPPRA equals the number of atoms * number of irreducible k-points else: return Value(scalars=[Scalar(value=NI*NIRK)]) def _is_converged(self): # Follows the procedure used by qmpy, but without reading the whole file into memory # Source: https://github.com/wolverton-research-group/qmpy/blob/master/qmpy/analysis/vasp/calculation.py with open(self.outcar) as fp: # Part 1: Determine the NELM nelm = None for line in fp: if line.startswith(" NELM ="): nelm = int(line.split()[2][:-1]) break # If we don't find it, tell the user if nelm is None: raise Exception('NELM not found. Cannot tell if this result is converged') # Now, loop through the file. What we want to know is whether the last ionic # step of this file terminates because it converges or because we hit NELM re_iter = re.compile('([0-9]+)\( *([0-9]+)\)') converged = False for line in fp: # Count the ionic steps if 'Iteration' in line: ionic, electronic = map(int, re_iter.findall(line)[0]) # If the loop is finished, mark the number of electronic steps if 'aborting loop' in line: converged = electronic < nelm return converged def get_total_energy(self): with open(self.outcar) as fp: last_energy = None for line in fp: if line.startswith(' free energy TOTEN'): last_energy = float(line.split()[4]) if last_energy is None: return None return Property(scalars=[Scalar(value=last_energy)], units='eV') def get_version_number(self): # Open up the OUTCAR with open(self.outcar) as fp: #look for vasp for line in fp: if "vasp" in line: words = line.split() return (words[0].strip('vasp.')) break # Error handling: vasp not found raise Exception('vasp not found') def get_U_settings(self): #Open up the OUTCAR
bj7/pwndbg
pwndbg/memory.py
Python
mit
5,918
0.010308
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Reading, writing, and describing memory. """ import gdb import pwndbg.compat import pwndbg.typeinfo PAGE_SIZE = 0x1000 PAGE_MASK = ~(PAGE_SIZE-1) MMAP_MIN_ADDR = 0x8000 def read(addr, count, partial=False): result = '' try: result = gdb.selected_inferior().read_memory(addr, count) except gdb.error as e: if not partial: raise stop_addr = int(e.message.split()[-1], 0) if stop_addr != addr: return read(addr, stop_addr-addr) # QEMU will return the start address as the failed # read address. Try moving back a few pages at a time. stop_addr = addr + count # Move the stop address down to the previous page boundary stop_addr &= PAGE_MASK while stop_addr > addr: result = read(addr, stop_addr-addr) if result: return result # Move down by another page stop_addr -= PAGE_SIZE if pwndbg.compat.python3: result = result.tobytes() return bytearray(result) def readtype(gdb_type, addr): return int(gdb.Value(addr).cast(gdb_type.pointer()).dereference()) def write(addr, data): gdb.selected_inferior().write_memory(addr, data) def peek(address): try: return read(address, 1) except: pass return None def poke(address): c = peek(address) if c is None: return False try: write(address, c) except: return False return True def byte(addr): return readtype(pwndbg.typeinfo.uchar, addr) def uchar(addr): return readtype(pwndbg.typeinfo.uchar, addr) def ushort(addr): return readtype(pwndbg.typeinfo.ushort, addr) def uint(addr): return readtype(pwndbg.typeinfo.uint, addr) def pvoid(addr): return readtype(pwndbg.typeinfo.pvoid, addr) def u8(addr): return readtype(pwndbg.typeinfo.uint8, addr) def u16(addr): return readtype(pwndbg.typeinfo.uint16, addr) def u32(addr): return readtype(pwndbg.typeinfo.uint32, addr) def u64(addr): return readtype(pwndbg.typeinfo.uint64, addr) def u(addr, size): return { 8: u8, 16: u16, 32: u32, 64: u64 }[size](addr) def s8(addr): return readtype(pwndbg.typeinfo.int8, addr) def s16(addr): return readtype(pwndbg.typeinfo.int16, addr) def s32(addr): return readtype(pwndbg.typeinfo.int32, addr) def s64(addr): return readtype(pwndbg.typeinfo.int64, addr) def write(addr, data): gdb.selected_inferior().write_memory(addr, data) def poi(type, addr): return gdb.Value(addr).cast(type.pointer()).dereference() def round_down(address, align): return address & ~(align-1) def round_up(address, align): return (address+(align-1))&(~(align-1)) align_down = round_down align_up = round_up
def page_align(address): return round_down(address, PAGE_SIZE) def page_size_align(address): return round_up(address, PAGE_SIZE) def page_offset(address): return
(address & (PAGE_SIZE-1)) assert round_down(0xdeadbeef, 0x1000) == 0xdeadb000 assert round_up(0xdeadbeef, 0x1000) == 0xdeadc000 def find_upper_boundary(addr, max_pages=1024): addr = pwndbg.memory.page_align(int(addr)) try: for i in range(max_pages): pwndbg.memory.read(addr, 1) import sys sys.stdout.write(hex(addr) + '\n') addr += pwndbg.memory.PAGE_SIZE except gdb.MemoryError: pass return addr def find_lower_boundary(addr, max_pages=1024): addr = pwndbg.memory.page_align(int(addr)) try: for i in range(max_pages): pwndbg.memory.read(addr, 1) addr -= pwndbg.memory.PAGE_SIZE except gdb.MemoryError: pass return addr class Page(object): """ Represents the address space and page permissions of at least one page of memory. """ vaddr = 0 #: Starting virtual address memsz = 0 #: Size of the address space, in bytes flags = 0 #: Flags set by the ELF file, see PF_X, PF_R, PF_W offset = 0 #: Offset into the original ELF file that the data is loaded from objfile = '' #: Path to the ELF on disk def __init__(self, start, size, flags, offset, objfile=''): self.vaddr = start self.memsz = size self.flags = flags self.offset = offset self.objfile = objfile # if self.rwx: # self.flags = self.flags ^ 1 @property def read(self): return bool(self.flags & 4) @property def write(self): return bool(self.flags & 2) @property def execute(self): return bool(self.flags & 1) @property def rw(self): return self.read and self.write @property def rwx(self): return self.read and self.write and self.execute @property def permstr(self): flags = self.flags return ''.join(['r' if flags & 4 else '-', 'w' if flags & 2 else '-', 'x' if flags & 1 else '-', 'p']) def __str__(self): width = 2 + 2*pwndbg.typeinfo.ptrsize fmt_string = "%#{}x %#{}x %s %8x %-6x %s" fmt_string = fmt_string.format(width, width) return fmt_string % (self.vaddr, self.vaddr+self.memsz, self.permstr, self.memsz, self.offset, self.objfile or '') def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self.__str__()) def __contains__(self, a): return self.vaddr <= a < (self.vaddr + self.memsz) def __eq__(self, other): return self.vaddr == getattr(other, 'vaddr', other) def __lt__(self, other): return self.vaddr < getattr(other, 'vaddr', other) def __hash__(self): return hash((self.vaddr, self.memsz, self.flags, self.offset, self.objfile))
kparal/anaconda
pyanaconda/anaconda.py
Python
gpl-2.0
9,570
0.001463
# anaconda: The Red Hat Linux Installation program # # Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 # Red Hat, Inc. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Author(s): Brent Fox <bfox@redhat.com> # Mike Fulbright <msf@redhat.com> # Jakub Jelinek <jakub@redhat.com> # Jeremy Katz <katzj@redhat.com> # Chris Lumens <clumens@redhat.com> # Paul Nasrat <pnasrat@redhat.com> # Erik Troan <ewt@rpath.com> # Matt Wilson <msw@rpath.com> # import os import sys import stat from glob import glob from tempfile import mkstemp import threading from pyanaconda.bootloader import get_bootloader from pyanaconda import constants from pyanaconda import iutil from pyanaconda.iutil import open # pylint: disable=redefined-builtin from pyanaconda import addons import logging log = logging.getLogger("anaconda") stdoutLog = logging.getLogger("anaconda.stdout") class Anaconda(object): def __init__(self): from pyanaconda import desktop self._bootloader = None self.canReIPL = False self.desktop = desktop.Desktop() self.dir = None self.displayMode = None self.gui_startup_fai
led = False self.id = None self._instClass = None self._intf = None self.isHeadless = False self.ksdata = None self.mediaDevice = None self.methodstr = None
self.opts = None self._payload = None self.proxy = None self.proxyUsername = None self.proxyPassword = None self.reIPLMessage = None self.rescue_mount = True self.rootParts = None self.stage2 = None self._storage = None self.updateSrc = None self.mehConfig = None # *sigh* we still need to be able to write this out self.xdriver = None # Data for inhibiting the screensaver self.dbus_session_connection = None self.dbus_inhibit_id = None # This is used to synchronize Gtk.main calls between the graphical # interface and error dialogs. Whoever gets to their initialization code # first will lock gui_initializing self.gui_initialized = threading.Lock() @property def bootloader(self): if not self._bootloader: self._bootloader = get_bootloader() return self._bootloader @property def instClass(self): if not self._instClass: from pyanaconda.installclass import DefaultInstall self._instClass = DefaultInstall() return self._instClass def _getInterface(self): return self._intf def _setInterface(self, v): # "lambda cannot contain assignment" self._intf = v def _delInterface(self): del self._intf intf = property(_getInterface, _setInterface, _delInterface) @property def payload(self): # Try to find the packaging payload class. First try the install # class. If it doesn't give us one, fall back to the default. if not self._payload: klass = self.instClass.getBackend() if not klass: from pyanaconda.flags import flags if self.ksdata.ostreesetup.seen: from pyanaconda.packaging.rpmostreepayload import RPMOSTreePayload klass = RPMOSTreePayload elif flags.livecdInstall: from pyanaconda.packaging.livepayload import LiveImagePayload klass = LiveImagePayload elif self.ksdata.method.method == "liveimg": from pyanaconda.packaging.livepayload import LiveImageKSPayload klass = LiveImageKSPayload else: from pyanaconda.packaging.dnfpayload import DNFPayload klass = DNFPayload self._payload = klass(self.ksdata) return self._payload @property def protected(self): specs = [] if os.path.exists("/run/initramfs/livedev") and \ stat.S_ISBLK(os.stat("/run/initramfs/livedev")[stat.ST_MODE]): specs.append(os.readlink("/run/initramfs/livedev")) if self.methodstr and self.methodstr.startswith("hd:"): specs.append(self.methodstr[3:].split(":", 3)[0]) if self.stage2 and self.stage2.startswith("hd:"): specs.append(self.stage2[3:].split(":", 3)[0]) # zRAM swap devices need to be protected for zram_dev in glob("/dev/zram*"): specs.append(zram_dev) return specs @property def storage(self): if not self._storage: import blivet import blivet.arch import gi gi.require_version("BlockDev", "1.0") from gi.repository import BlockDev as blockdev self._storage = blivet.Blivet(ksdata=self.ksdata) if self.instClass.defaultFS: self._storage.setDefaultFSType(self.instClass.defaultFS) if blivet.arch.isS390(): # want to make sure s390 plugin is loaded if "s390" not in blockdev.get_available_plugin_names(): plugin = blockdev.PluginSpec() plugin.name = blockdev.Plugin.S390 plugin.so_name = None blockdev.reinit([plugin], reload=False) return self._storage def dumpState(self): from meh import ExceptionInfo from meh.dump import ReverseExceptionDump from inspect import stack as _stack from traceback import format_stack # Skip the frames for dumpState and the signal handler. stack = _stack()[2:] stack.reverse() exn = ReverseExceptionDump(ExceptionInfo(None, None, stack), self.mehConfig) # gather up info on the running threads threads = "\nThreads\n-------\n" # Every call to sys._current_frames() returns a new dict, so it is not # modified when threads are created or destroyed. Iterating over it is # thread safe. for thread_id, frame in sys._current_frames().items(): threads += "\nThread %s\n" % (thread_id,) threads += "".join(format_stack(frame)) # dump to a unique file (fd, filename) = mkstemp(prefix="anaconda-tb-", dir="/tmp") dump_text = exn.traceback_and_object_dump(self) dump_text += threads dump_text_bytes = dump_text.encode("utf-8") iutil.eintr_retry_call(os.write, fd, dump_text_bytes) iutil.eintr_ignore(os.close, fd) # append to a given file with open("/tmp/anaconda-tb-all.log", "a+") as f: f.write("--- traceback: %s ---\n" % filename) f.write(dump_text + "\n") def initInterface(self, addon_paths=None): if self._intf: raise RuntimeError("Second attempt to initialize the InstallInterface") if self.displayMode == 'g': from pyanaconda.ui.gui import GraphicalUserInterface # Run the GUI in non-fullscreen mode, so live installs can still # use the window manager self._intf = GraphicalUserInterface(self.storage, self.payload, self.instClass, gui_lock=self.gui_initialized, fullscreen=False) # needs to be refreshed now we know
mihow/footer
config/wsgi.py
Python
mit
1,706
0
""" WSGI config for Footer project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os from django.core.wsgi import get_wsgi_application if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production': from raven.contrib.django.raven_compat.middleware.wsgi import Sen
try # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode wi
th each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. application = get_wsgi_application() if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production': application = Sentry(application) # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
junhuac/MQUIC
src/mojo/tools/mopy/config.py
Python
mit
4,103
0.014136
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import ast import os.path import platform import re import sys class Config(object): '''A Config contains a dictionary that species a build configuration.''' # Valid values for target_os: OS_ANDROID = 'android' OS_CHROMEOS = 'chromeos' OS_LINUX = 'linux' OS_MAC = 'mac' OS_WINDOWS = 'windows' # Valid values for target_cpu: ARCH_X86 = 'x86' ARCH_X64 = 'x64' ARCH_ARM = 'arm' def __init__(self, build_dir=None, target_os=None, target_cpu=None, is_debug=None, is_verbose=None, apk_name='MojoRunner.apk'): '''Function arguments take precedence over GN args and default values.''' assert target_os in (None, Config.OS_ANDROID, Config.OS_CHROMEOS, Config.OS_LINUX, Config.OS_MAC, Config.OS_WINDOWS) assert target_cpu in (None, Config.ARCH_X86, Config.ARCH_X64, Config.ARCH_ARM) assert is_debug in (None, True, False) assert is_verbose in (None, True, False) self.values = { 'build_dir': build_dir, 'target_os': self.GetHostOS(), 'target_cpu': self.GetHostCPU(), 'is_debug': True, 'is_verbose': True, 'dcheck_always_on': False, 'is_asan': False, 'apk_name': apk_name, } self._ParseGNArgs() if target_os is not None: self.values['target_os'] = target_os if target_cpu is not None: self.values['target_cpu'] = target_cpu if is_debug is not None: self.values['is_debug'] = is_debug if is_verbose is not None: self.values['is_verbose'] = is_verbose @staticmethod def GetHostOS(): if sys.platform == 'linux2': return Config.OS_LINUX if sys.platform == 'darwin': return Config.OS_MAC if sys.platform == 'win32': return Config.OS_WINDOWS raise NotImplementedError('Unsupported host OS') @staticmethod def GetHostCPU(): # Derived from //native_client/pynacl/platform.py machine = platform.machine() if machine in ('x86', 'x86-32', 'x86_32', 'x8632', 'i386', 'i686', 'ia32', '32'): return Config.ARCH_X86 if machine in ('x86-64', 'amd64', 'AMD64', 'x86_64', 'x8664', '64'): return Config.ARCH_X64 if machine.startswith('arm'): return Config.ARCH_ARM raise Exception('Cannot identify CPU arch: %s' % machine) def _ParseGNArgs(self): '''Parse the gn config file from the build directory, if it exists.''' TRANSLATIONS = { 'true': 'True', 'false': 'False', } if self.values['build_dir'] is None: return gn_file = os.path.join(self.values['build_dir'], 'args.gn') if not os.path.isfile(gn_file): return with open(gn_file, 'r') as f: for line in f: line = re.sub('\s*#.*', '', line) result = re.match('^\s*(\w+)\s*=\s*(.*)\s*$', line) if result: key = result.group(1) value = result.group(2) self.values[key] = ast.literal_eval(TRANSLATIO
NS.ge
t(value, value)) # Getters for standard fields ------------------------------------------------ @property def build_dir(self): '''Build directory path.''' return self.values['build_dir'] @property def target_os(self): '''OS of the build/test target.''' return self.values['target_os'] @property def target_cpu(self): '''CPU arch of the build/test target.''' return self.values['target_cpu'] @property def is_debug(self): '''Is Debug build?''' return self.values['is_debug'] @property def is_verbose(self): '''Should print additional logging information?''' return self.values['is_verbose'] @property def dcheck_always_on(self): '''DCHECK is fatal even in release builds''' return self.values['dcheck_always_on'] @property def is_asan(self): '''Is ASAN build?''' return self.values['is_asan'] @property def apk_name(self): '''Name of the APK file to run''' return self.values['apk_name']
pzread/sdup
pg.py
Python
mit
8,196
0.0194
import random import datetime import psycopg2 from collections import deque from tornado.stack_context import wrap from tornado.ioloop import IOLoop from tornado.concurrent import return_future class WrapCursor: def __init__(self,db,cur): self._db = db self._cur = cur self._oldcur = None self._init_member() def __iter__(self): return self._cur @return_future def execute(self,sql,param = None,callback = None): def _cb(err = None): if err != None: raise err self.arraysize = self._cur.arraysize self.itersize = self._cur.itersize self.rowcount = self._cur.rowcount self.rownumber = self._cur.rownumber self.lastrowid = self._cur.lastrowid self.query = self._cur.query self.statusmessage = self._cur.statusmessage callback() self._db._execute(self._cur,sql,param,_cb) @return_future def begin(self,callback): def _cur_cb(cur,err = None): if err != None: self._db._end_tran(cur) raise err self._db._execute(cur,'BEGIN;',callback = lambda err : _exec_cb(cur,err)) def _exec_cb(cur,err = None): if err != None: self._db._end_tran(cur) raise err self._oldcur = self._cur self._cur = cur callback() assert(self._oldcur == None) self._db._begin_tran(_cur_cb) @return_future def commit(self,callback): def _cb(err = None): if err != None: raise err self._db._end_tran(self._cur) self._cur = self._oldcur self._oldcur = None callback() assert(self._oldcur != None) self._db._execute(self._cur,'COMMIT;',callback = _cb) @return_future def rollback(self,callback): def _cb(err = None): if err != None: raise err self._db._end_tran(self._cur) self._cur = self._oldcur self._oldcur = None callback() assert(self._oldcur != None) self._db._execute(self._cur,'ROLLB
ACK;',callback = _cb) def _init_member(self): self.fetchone = self._cur.fetchone self.fet
chmany = self._cur.fetchmany self.fetchall = self._cur.fetchall self.scroll = self._cur.scroll self.cast = self._cur.cast self.tzinfo_factory = self._cur.tzinfo_factory self.arraysize = 0 self.itersize = 0 self.rowcount = 0 self.rownumber = 0 self.lastrowid = None self.query = '' self.statusmessage = '' class AsyncPG: def __init__(self,dbname,dbuser,dbpasswd, dbschema = 'public',dbtz = '+0'): self.INITCONN_SHARE = 4 self.INITCONN_FREE = 16 self.OPER_CURSOR = 0 self.OPER_EXECUTE = 1 self._ioloop = IOLoop.instance() self._dbname = dbname self._dbuser = dbuser self._dbpasswd = dbpasswd self._dbschema = dbschema self._dbtz = dbtz self._share_connpool = [] self._free_connpool = [] self._conn_fdmap = {} class _InfDateAdapter: def __init__(self,wrapped): self.wrapped = wrapped def getquoted(self): if self.wrapped == datetime.datetime.max: return b"'infinity'::date" elif self.wrapped == datetime.datetime.min: return b"'-infinity'::date" else: return psycopg2.extensions.TimestampFromPy( self.wrapped).getquoted() psycopg2.extensions.register_adapter(datetime.datetime,_InfDateAdapter) for i in range(self.INITCONN_SHARE): conn = self._create_conn() self._share_connpool.append(conn) self._ioloop.add_handler(conn[0],self._dispatch,IOLoop.ERROR) conn[2] = True self._ioloop.add_callback(self._dispatch,conn[0],0) for i in range(self.INITCONN_FREE): conn = self._create_conn() self._free_connpool.append(conn) self._ioloop.add_handler(conn[0],self._dispatch,IOLoop.ERROR) conn[2] = True self._ioloop.add_callback(self._dispatch,conn[0],0) @return_future def cursor(self,callback): def _cb(cur,err = None): if err != None: raise err callback(WrapCursor(self,cur)) self._cursor(callback = _cb) def _cursor(self,conn = None,callback = None): def _cb(err = None): if err != None: callback(None,err) callback(conn[4].cursor()) if conn == None: conn = self._share_connpool[ random.randrange(len(self._share_connpool))] conn[1].append((self.OPER_CURSOR,None,wrap(_cb))) if conn[2] == False: conn[2] = True self._ioloop.add_callback(self._dispatch,conn[0],0) def _execute(self,cur,sql,param = (),callback = None): conn = self._conn_fdmap[cur.connection.fileno()] conn[1].append((self.OPER_EXECUTE,(cur,sql,param),wrap(callback))) if conn[2] == False: conn[2] = True self._ioloop.add_callback(self._dispatch,conn[0],0) def _begin_tran(self,callback): if len(self._free_connpool) == 0: conn = self._create_conn() self._ioloop.add_handler(conn[0],self._dispatch,IOLoop.ERROR) else: conn = self._free_connpool.pop() self._cursor(conn,callback) def _end_tran(self,cur): conn = self._conn_fdmap[cur.connection.fileno()] if len(self._free_connpool) < self.INITCONN_FREE: self._free_connpool.append(conn) else: self._close_conn(conn) def _create_conn(self): dbconn = psycopg2.connect(database = self._dbname, user = self._dbuser, password = self._dbpasswd, async = 1, options = ( '-c search_path=%s ' '-c timezone=%s' )%(self._dbschema,self._dbtz)) conn = [dbconn.fileno(),deque(),False,None,dbconn] self._conn_fdmap[conn[0]] = conn return conn def _close_conn(self,conn): self._conn_fdmap.pop(conn[0],None) self._ioloop.remove_handler(conn[0]) conn[4].close() def _dispatch(self,fd,evt): err = None try: conn = self._conn_fdmap[fd] except KeyError: self._ioloop.remove_handler(fd) return try: stat = conn[4].poll() except Exception as e: err = e if err != None or stat == psycopg2.extensions.POLL_OK: self._ioloop.update_handler(fd,IOLoop.ERROR) elif stat == psycopg2.extensions.POLL_READ: self._ioloop.update_handler(fd,IOLoop.READ | IOLoop.ERROR) return elif stat == psycopg2.extensions.POLL_WRITE: self._ioloop.update_handler(fd,IOLoop.WRITE | IOLoop.ERROR) return cb = conn[3] if cb != None: conn[3] = None cb(err) else: try: oper,data,cb = conn[1].popleft() except IndexError: conn[2] = False return try: if oper == self.OPER_CURSOR: conn[3] = cb elif oper == self.OPER_EXECUTE: cur,sql,param = data cur.execute(sql,param) conn[3] = cb except Exception as e: conn[3] = None cb(e) self._ioloop.add_callback(self.
mozilla/iacomus-alerts
python/poster.py
Python
epl-1.0
2,563
0.006633
import simplejson as json import urllib import urllib2 import time server = "" def GET(uri, params): params = urllib.urlencode(params) req = urllib2.Request(server + uri + "?" + params , headers={'Accept': 'application/json'}) return json.loads(urllib2.urlopen(req).read()) def POST(uri, params): params = json.dumps(params) req = urllib2.Request(server + uri, params, headers={'Content-Type': 'application/json', 'Accept': 'application/json'})
response = json.loads(urllib2.urlopen(req).read()) return response["id"] def set_server_url(url): global server server = url class Detector: def __init__(self, name, url): self.name = name self.url = url def get_id(self): try: return self.id except AttributeError: try: detectors = GET("/detectors/", {'name': self.name}) self.id = detectors[0]['id']
except urllib2.HTTPError as e: self.id = POST("/detectors/", {'name': self.name, 'url': self.url}) return self.id def realize(self): self.get_id() class Metric: def __init__(self, name, descr, detector): self.name = name self.descr = descr self.detector = detector def get_id(self): try: return self.id except AttributeError: uri = "/detectors/" + str(self.detector.get_id()) + "/metrics/" try: metrics = GET(uri, {'name': self.name}) return metrics[0]['id'] except urllib2.HTTPError as e: return POST(uri, {'name': self.name, 'description': self.descr}) def realize(self): self.get_id() def post_alert(detector, metric, payload, emails="", date=time.strftime("%Y-%m-%d")): try: payload = json.dumps(payload) uri = "/detectors/" + str(detector.get_id()) + "/metrics/" + str(metric.get_id()) + "/alerts/" return POST(uri, {'description': payload, 'date': date, 'emails': emails}) except urllib2.HTTPError as e: if e.code == 422: print "Alert for detector: " + detector.name + ", metric: " + metric.name + ", has already been submitted!" else: raise e if __name__ == "__main__": set_server_url("http://localhost:8080") detector = Detector("Histogram Regression Detector", "foobar") metric = Metric("metric100", "foobar", detector) post_alert(detector, metric, "foobar")
Alberto-Beralix/Beralix
i386-squashfs-root/usr/share/hplip/ui4/readonlyradiobutton.py
Python
gpl-3.0
1,657
0.004225
# -*- coding: utf-8 -*- # # (c) Copyright 2001-2009 Hewlett-Packard Development Company, L.P. # # This program is free software; you can redistribute it
and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2
of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Authors: Don Welch # # Qt from PyQt4.QtCore import * from PyQt4.QtGui import * class ReadOnlyRadioButton(QRadioButton): def __init__(self, parent): QRadioButton.__init__(self, parent) self.setFocusPolicy(Qt.NoFocus) self.clearFocus() def mousePressEvent(self, e): if e.button() == Qt.LeftButton: return QRadioButton.mousePressEvent(e) def mouseReleaseEvent(self, e): if e.button() == Qt.LeftButton: return QRadioButton.mouseReleaseEvent(e) def mouseMoveEvent(self, e): return def keyPressEvent(self, e): if e.key() not in (Qt.Key_Up, Qt.Key_Left, Qt.Key_Right, Qt.Key_Down, Qt.Key_Escape): return QRadioButton.keyPressEvent(e) def keyReleaseEvent(self, e): return
blajoie/vcf2fasta
scripts/vcf2fasta.py
Python
apache-2.0
8,351
0.025626
from __future__ import print_function from __future__ import division import numpy as np import sys import argparse import time import re import gzip import os import logging from collections import defaultdict from operator import itemgetter __version__ = "1.0" def main(): parser=argparse.ArgumentParser(description='vcf2fasta (diploid)',formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-r', '--ref', dest='ref_fasta', type=str, required=True, help='input reference file (fasta)') parser.add_argument('-v', '--vcf', dest='vcf_file', type=str, required=True, help='input vcf file (vcf)') parser.add_argument('-n', '--name', dest='name', type=str, required=True, help='sample name (column header)') parser.add_argument('--verbose', dest='verbose', action='count', help='Increase verbosity (specify multiple times for more)') parser.add_argument('--version', action='version', version='%(prog)s '+__version__) args=parser.parse_args() ref_fasta=args.ref_fasta vcf_file=args.vcf_file name=args.name verbose=args.verbose log_level = logging.WARNING if verbose == 1: log_level = logging.INFO elif verbose >= 2: log_level = logging.DEBUG logging.basicConfig(level=log_level) verboseprint = print if verbose else lambda *a, **k: None # process VCF file verboseprint("processing VCF file") snps=defaultdict(list) snp_count=0 last_chrom=None chrs=dict() # open vcf file if vcf_file.endswith('.gz'): vcf_fh=gzip.open(vcf_file,'r') else: vcf_fh=open(vcf_file,'r') # iterate over vcf file for linenum,line in enumerate(vcf_fh): if line.startswith("##"): # skip headers continue line=line.rstrip("\n") # get header line if line.startswith("#"): header=line.lstrip("#").split("\t"); header2index=dict([(h,i) for i,h in enumerate(header)]) # ensure FORMAT column is included if "FORMAT" not in header2index: print("FORMAT field not specified in VCF file!") print(header2index) sys.exit('error') # ensure user-specified sample name column is included if name not in header2index: print(name,"field not specified in VCF file!") print(header2index) sys.exit('error') continue tmp=line.split("\t") genotype=tmp[header2index[name]] format=tmp[header2index["FORMAT"]].split(":") index2field=dict([(f,i) for i,f in enumerate(format)]) # ensure GT field id included in FORMAT column if "GT" not in index2field: print("GT field not specified in FORMAT!") print(index2field) sys.exit('error') genotype_list=genotype.split(":") gt=genotype_list[index2field["GT"]] pattern = re.compile('[\|\/]') (a,b) = pattern.split(gt) if(a != b): sys.exit('error: non-homo SNP found @ line# '+linenum+'\n') c=a=b c=int(c) chrom=tmp[header2index["CHROM"]] pos=int(tmp[header2index["POS"]]) ref=tmp[header2index["REF"]] alt=tmp[header2index["ALT"]].split(",") if(c == 0): snps["chr"+chrom].append((pos,ref)) snp_count+=1 elif(c >= 1): snps["chr"+chrom].append((pos,alt[c-1])) snp_count+=1 if(chrom != last_chrom): if(chrom not in chrs): verboseprint("\tchr",chrom) chrs[chrom]=1 last_chrom=chrom vcf_fh.close() verboseprint("found",snp_count,"snps") # process VCF file verboseprint("") # ensure all snps are sorted by position (sorted seperatley for each chromosome) verboseprint("sorting by position") for chr in snps: # ensure sorted by pos snp_positions=snps[chr] verboseprint("\t",chr," ... ",len(snp_positions)," snps",sep="") sorted_snp_positions=sorted(snp_positions, key=itemgetter(0)) snps[chr]=sorted_snp_positions verboseprint("") # process REFERENCE file verboseprint("processing REF file") # get output name ref_fasta_name=os.path.basename(ref_fasta) ref_fasta_name=re.sub(".gz", "", ref_fasta_name) ref_fasta_name=re.sub(".fasta", "", ref_fasta_name) ref_fasta_name=re.sub(".fa", "", ref_fasta_name) out_fh=open(ref_fasta_name+'__'+name+'.fa',"w") placed_snps=0 total_placed_snps=0 current_snp=(None,None) pos=1 last_chrom=None tmp_pos_list=[(None,None)] # open reference fasta file if ref_fasta.endswith('.gz'): ref_fh=gzip.open(ref_fasta,'r') else: ref_fh=open(ref_fasta,'r') # iterate over fasta file for linenum,line in enumerate(ref_fh): line=line.rstrip("\n") # search for > (contig name) regexp = re.compi
le('>') if regexp.search(line): if line.startswith(">"): chrom=line.lstrip(">") pos=1 print(line,"-",name,file=out_fh,sep="") continue else: # random > found in
line - issue with cat ? sys.exit('error with fasta file'+'\n'+str(line)) if(chrom != last_chrom): tmp_pos_list=[] if(last_chrom != None): verboseprint(" ... ",placed_snps," / ",possible_snps,sep="") tmp_pos_list=[(None,None)] possible_snps=0 if(chrom in snps): tmp_pos_list=snps[chrom] possible_snps=len(tmp_pos_list) verboseprint("\t",chrom,sep="",end="") current_snp=tmp_pos_list.pop(0) total_placed_snps += placed_snps placed_snps=0 tmp_len=len(line) start=pos end=pos+tmp_len-1 while((current_snp[0] < start) and (len(tmp_pos_list) > 0)): print("ERROR: missed snp!",current_snp,"\t",start,"-",end,">",current_snp[0]) current_snp=tmp_pos_list.pop(0) if((current_snp[0] == None) or (current_snp[0] > end)): print(line,file=out_fh) else: char_list=list(line) snp_offset=current_snp[0]-start if((snp_offset < 0) or (snp_offset > len(char_list))): # check to ensure SNP overlaps interval sys.exit('error '+str(current_snp)+' '+str(snp_offset)+' '+str(start)+'-'+str(end)) # replace snp in char arr char_list[snp_offset]=current_snp[1] placed_snps+=1 if(len(tmp_pos_list) == 0): current_snp=(None,None) # handle multiple SNPs per FASTA line (normally 50 chars /buffer/) if(len(tmp_pos_list) > 0): current_snp=tmp_pos_list.pop(0) while((current_snp[0] <= end) and (len(tmp_pos_list) > 0)): snp_offset=current_snp[0]-start # replace snp in char arr char_list[snp_offset]=current_snp[1] placed_snps+=1 current_snp=tmp_pos_list.pop(0) if((current_snp[0] <= end) and (len(tmp_pos_list) == 0)): snp_offset=current_snp[0]-start char_list[snp_offset]=current_snp[1] placed_snps+=1 current_snp=(None,None) # char list to string, and print print(''.join(char_list),file=out_fh) pos += tmp_len last_chrom=chrom ref_fh.close() # handle last line
keeprocking/pygelf
tests/test_common_fields.py
Python
mit
2,884
0.001387
import socket import pytest import mock from pygelf import GelfTcpHandler, GelfUdpHandler, GelfHttpHandler, GelfTlsHandler, GelfHttpsHandler from tests.helper import logger, get_unique_message, log_warning, log_exception SYSLOG_LEVE
L_ERROR = 3 SYSLOG_LEVEL_WARNING = 4 @pytest.fixture(params=[ GelfTcpHandler(host='127.0.0.1', port=12201), GelfUdpHandler(host='127.0.0.1', port=12202), GelfUdpHandler(host='127.0.0.1', port=12202, compress=False), GelfHttpHandler(host='127.0.0.1', port=12203), GelfHttpHandler(host='127.0.
0.1', port=12203, compress=False), GelfTlsHandler(host='127.0.0.1', port=12204), GelfHttpsHandler(host='127.0.0.1', port=12205, validate=False), GelfHttpsHandler(host='localhost', port=12205, validate=True, ca_certs='tests/config/cert.pem'), GelfTlsHandler(host='127.0.0.1', port=12204, validate=True, ca_certs='tests/config/cert.pem'), ]) def handler(request): return request.param def test_simple_message(logger): message = get_unique_message() graylog_response = log_warning(logger, message) assert graylog_response['message'] == message assert graylog_response['level'] == SYSLOG_LEVEL_WARNING assert 'full_message' not in graylog_response assert 'file' not in graylog_response assert 'module' not in graylog_response assert 'func' not in graylog_response assert 'logger_name' not in graylog_response assert 'line' not in graylog_response def test_formatted_message(logger): message = get_unique_message() template = message + '_%s_%s' graylog_response = log_warning(logger, template, args=('hello', 'gelf')) assert graylog_response['message'] == message + '_hello_gelf' assert graylog_response['level'] == SYSLOG_LEVEL_WARNING assert 'full_message' not in graylog_response def test_full_message(logger): message = get_unique_message() try: raise ValueError(message) except ValueError as e: graylog_response = log_exception(logger, message, e) assert graylog_response['message'] == message assert graylog_response['level'] == SYSLOG_LEVEL_ERROR assert message in graylog_response['full_message'] assert 'Traceback (most recent call last)' in graylog_response['full_message'] assert 'ValueError: ' in graylog_response['full_message'] assert 'file' not in graylog_response assert 'module' not in graylog_response assert 'func' not in graylog_response assert 'logger_name' not in graylog_response assert 'line' not in graylog_response def test_source(logger): original_source = socket.gethostname() with mock.patch('socket.gethostname', return_value='different_domain'): message = get_unique_message() graylog_response = log_warning(logger, message) assert graylog_response['source'] == original_source
nmakhotkin/mistral-extra
examples/webhooks/api/controllers/resource.py
Python
apache-2.0
1,470
0
# -*- coding: utf-8 -*- # # Copyright 2013 - Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from wsme import types as wtypes LOG = logging.getLogger(__name__) API_STATUS = wtypes.Enum(str, 'SUPPORTED', 'CURRENT', 'DEPRECATED') class Resource(wtypes.Base): """REST API Resource.""" @classmethod def from_dict(cls, d): # TODO: take care of nested resources obj = cls() for key, val in d.items(): if hasattr(
obj, key): setattr(obj, key, val) return obj def __str__(self): """WSME based implementation
of __str__.""" res = "%s [" % type(self).__name__ first = True for attr in self._wsme_attributes: if not first: res += ', ' else: first = False res += "%s='%s'" % (attr.name, getattr(self, attr.name)) return res + "]"
chromakode/karmabot
karmabot/extensions/lmgtfy.py
Python
bsd-3-clause
2,423
0.000413
# Copyright the Karmabot authors and contributors. # All rights reserved. See AUTHORS. # # This file is part of 'karmabot' and is distributed under the BSD license. # See LICENSE for more details. # dedicated to LC from json import JSONDecoder from urllib import urlencode from urllib2 import urlopen from karmabot.core.client import thing from karmabot.core.commands.sets import CommandSet from karmabot.core.register import facet_registry from karmabot.core.facets impo
rt Facet import re import htmlentitydefs ## # Function Placed in public domain by Fredrik Lundh # http://effbot.org/zone/copyright.htm # http://effbot.org/zone/re-sub.htm#unesc
ape-html # Removes HTML or XML character references and entities from a text string. # # @param text The HTML (or XML) source text. # @return The plain text, as a Unicode string, if necessary. def unescape(text): def fixup(m): text = m.group(0) if text[:2] == "&#": # character reference try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except ValueError: pass else: # named entity try: text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) except KeyError: pass # leave as is return text return re.sub("&#?\w+;", fixup, text) @facet_registry.register class LmgtfyFacet(Facet): name = "lmgtfy" commands = thing.add_child(CommandSet(name)) @classmethod def does_attach(cls, thing): return thing.name == "lmgtfy" @commands.add(u"lmgtfy {item}", u"googles for a {item}") def lmgtfy(self, context, item): api_url = "http://ajax.googleapis.com/ajax/services/search/web?" response = urlopen(api_url + urlencode(dict(v="1.0", q=item))) response = dict(JSONDecoder().decode(response.read())) top_result = {} if response.get('responseStatus') == 200: results = response.get('responseData').get('results') top_result = results.pop(0) context.reply(", ".join([unescape(top_result.get('titleNoFormatting')), top_result.get('unescapedUrl'), ]))
witcxc/scipy
scipy/linalg/_decomp_qz.py
Python
bsd-3-clause
8,764
0.004792
from __future__ import division, print_function, absolute_import import warnings import numpy as np from numpy import asarray_chkfinite from .misc import LinAlgError, _datacopied from .lapack import get_lapack_funcs from scipy._lib.six import callable __all__ = ['qz'] _double_precision = ['i','l','d'] def _select_function(sort, typ): if typ in ['F','D']: if callable(sort): # assume the user knows what they're doing sfunction = sort elif sort == 'lhp': sfunction = lambda x,y: (np.real(x/y) < 0.0) elif sort == 'rhp': sfunction = lambda x,y: (np.real(x/y) >= 0.0) elif sort == 'iuc': sfunction = lambda x,y: (abs(x/y) <= 1.0) elif sort == 'ouc': sfunction = lambda x,y: (abs(x/y) > 1.0) else: raise ValueError("sort parameter must be None, a callable, or " "one of ('lhp','rhp','iuc','ouc')") elif typ in ['f','d']: if callable(sort): # assume the user knows what they're doing sfunction = sort elif sort == 'lhp': sfunction = lambda x,y,z: (np.real((x+y*1j)/z) < 0.0) elif sort == 'rhp': sfunction = lambda x,y,z: (np.real((x+y*1j)/z) >= 0.0) elif sort == 'iuc': sfunction = lambda x,y,z: (abs((x+y*1j)/z) <= 1.0) elif sort == 'ouc': sfunction = lambda x,y,z: (abs((x+y*1j)/z) > 1.0) else: raise ValueError("sort parameter must be None, a callable, or " "one of ('lhp','rhp','iuc','ouc')") else: # to avoid an error later raise ValueError("dtype %s not understood" % typ) return sfunction def qz(A, B, output='real', lwork=None, sort=None, overwrite_a=False, overwrite_b=False, check_finite=True): """ QZ decomposition for generalized eigenvalues of a pair of matrices. The QZ, or generalized Schur, decomposition for a pair of N x N nonsymmetric matrices (A,B) is:: (A,B) = (Q*AA*Z', Q*BB*Z') where AA, BB is in generalized Schur form if BB is upper-triangular with non-negative diagonal and AA is upper-triangular, or for real QZ decomposition (``output='real'``) block upper triangular with 1x1 and 2x2 blocks. In this case, the 1x1 blocks correspond to real generalized eigenvalues and 2x2 blocks are 'standardized' by making the corresponding elements of BB have the form:: [ a 0 ] [ 0 b ] and the pair of corresponding 2x2 blocks in AA and BB will have a complex conjugate pair of generalized eigenvalues. If (``output='complex'``) or A and B are complex matrices, Z' denotes the conjugate-transpose of Z. Q and Z are unitary matrices. Parameters ---------- A : (N, N) array_like 2d array to decompose B : (N, N) array_like 2d array to decompose output : str {'real','complex'} Construct the real or complex QZ decomposition for real matrices. Default is 'real'. lwork : int, optional Work array size. If None or -1, it is automatically computed. sort : {None, callable, 'lhp', 'rhp', 'iuc', 'ouc'}, optional NOTE: THIS INPUT IS DISABLED FOR NOW, IT DOESN'T WORK WELL ON WINDOWS. Specifies whether the upper eigenvalues should be sorted. A callable may be passed that, given a eigenvalue, returns a boolean denoting whether the eigenvalue should be sorted to the top-left (True). For real matrix pairs, the sort function takes three real arguments (alphar, alphai, beta). The eigenvalue x = (alphar + alphai*1j)/beta. For complex matrix pairs or output='complex', the sort function takes two complex arguments (alpha, beta). The eigenvalue x = (alpha/beta). Alternatively, string parameters may be used: - 'lhp' Left-hand plane (x.real < 0.0) - 'rhp' Right-hand plane (x.real > 0.0) - 'iuc' Inside the unit circle (x*x.conjugate() <= 1.0) - 'ouc' Outside the unit circle (x*x.conjugate() > 1.0) Defaults to None (no sorting). check_finite : boolean If true checks the elements of `A` and `B` are finite numbers. If false does no checking and passes matrix through to underlying algorithm. Returns ------- AA : (N, N) ndarray Generalized Schur form of A. BB : (N, N) ndarray Generalized Schur form of B. Q : (N, N) ndarray The left Schur vectors. Z : (N, N) ndarray The right Schur vectors. sdim : int, optional If sorting was requested, a fifth return value will contain the number of eigenvalues for which the sort condition was True. Notes ----- Q is transposed versus the equivalent function in Matlab. .. versionadded:: 0.11.0 Examples -------- >>> from scipy import linalg >>> np.random.seed(1234) >>> A = np.arange(9).reshape((3, 3)) >>> B = np.random.randn(3, 3) >>> AA, BB, Q, Z = linalg.qz(A, B) >>> AA array([[-13.40928183, -4.62471562, 1.09215523], [ 0. , 0. , 1.22805978], [ 0. , 0. , 0.31973817]]) >>> BB array([[ 0.33362547, -1.37393632, 0.02179805], [ 0. , 1.68144922, 0.74683866], [ 0. , 0. , 0.9258294 ]]) >>> Q array([[ 0.14134727, -0.97562773, 0.16784365], [ 0.49835904, -0.07636948, -0.86360059], [ 0.85537081, 0.20571399, 0.47541828]]) >>> Z array([[-0.24900855, -0.51772687, 0.81850696], [-0.79813178, 0.58842606, 0.12938478], [-0.54861681, -0.6210585 , -0.55973739]]) """ if sort is not None: # Disabled due to segfaults on win32, see ticket 1717. raise ValueError("The 'sort' input of qz() has to be None (will " " change when this functionality is made more robust).") if output not in ['real','complex','r','c']: raise ValueError("argument must be 'real', or 'complex'") if check_finite: a1 = asarray_chkfinite(A) b1 = asarray_chkfinite(B) else: a1 = np.asarray(A) b1 = np.asarray(B) a_m, a_n = a1.shape b_m, b_n = b1.shape if not (a_m == a_n == b_m == b_n): raise ValueError("Array dimensions must be square and agree") typa = a1.dtype.char if output in ['complex', 'c'] and typa not in ['F','D']: if typa in _double_precision: a1 = a1.astype('D') typa = 'D' else: a1 = a1.astype('F') typa = 'F' typb = b1.dtype.char if output in ['complex', 'c'] and typb not in ['F','D']: if typb in _double_precision: b1 = b1.astype('D') typb = 'D' else: b1 = b1.astype('F') typb = 'F' overwrite_a = overwrite_a or (_datacopied(a1,A)) overwrite_b = overwrite_b or (_datacopied(b1,B)) gges, = get_lapack_funcs(('gges',), (a1,b1)) if lwork is None or lwork == -1: # get optimal work array size result = gges(lambda x: None, a1, b1, lwork=-1) lwork = result[-2][0].real.astype(np.int) if sort is None: sort_t = 0 sfunction = lambda x: None else: sort_t = 1 sfunction = _select_function(sort, typa) result = gges(sfunction, a1, b1, lwork=lwork, overwrite_a=overwrite_a, overwrite_b=overwrite_b, sort_t=sort_t) info = result[-1] if info < 0: raise ValueError("Illegal value in argument %d of gges" % -info) elif info > 0 and info <= a_n: warnings.warn("The QZ iteration failed. (a,b) are
not in Schur " "form, but ALPHAR(j), ALPHAI(j), and BETA(j) should be correct " "for J=%d,...,N" % info-1, UserWarning) elif info == a_n+1: raise LinAlgError("Something other than QZ iteration failed") elif info == a_n+2: raise LinAlgError("After reordering,
roundoff changed values of some " "complex eigenvalues s
cloudbase/nova-virtualbox
nova/openstack/common/versionutils.py
Python
apache-2.0
8,784
0
# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Helpers for comparing version strings. """ import functools import inspect import logging from oslo_config import cfg import pkg_resources import six from nova.openstack.common._i18n import _ LOG = logging.getLogger(__name__) CONF = cfg.CONF opts = [ cfg.BoolOpt('fatal_deprecations', default=False, help='Enables or disables fatal status of deprecations.'), ] class deprecated(object): """A decorator to mark callables as deprecated. This decorator logs a deprecation message when the callable it decorates is used. The message will include the release where the callable was deprecated, the release where it may be removed and possibly an optional replacement. Examples: 1. Specifying the required deprecated release >>> @deprecated(as_of=deprecated.ICEHOUSE) ... def a(): pass 2. Specifying a replacement: >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()') ... def b(): pass 3. Specifying the release where the functionality may be removed: >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=+1) ... def c(): pass 4. Specifying the deprecated functionality will not be removed: >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=0) ... def d(): pass 5. Specifying a replacement, deprecated functionality will not be removed: >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()', remove_in=0) ... def e(): pass """ # NOTE(morganfainberg): Bexar is used for unit test purposes, it is # expected we maintain a gap between Bexar and Folsom in this list. BEXAR = 'B' FOLSOM = 'F' GRIZZLY = 'G' HAVANA = 'H' ICEHOUSE = 'I' JUNO = 'J' KILO = 'K' _RELEASES = { # NOTE(morganfainberg): Bexar is used for unit test purposes, it is # expected we maintain a gap between Bexar and Folsom in this list. 'B': 'Bexar', 'F': 'Folsom', 'G': 'Grizzly', 'H': 'Havana', 'I': 'Icehouse', 'J': 'Juno', 'K': 'Kilo', } _deprecated_msg_with_alternative = _( '%(what)s is deprecated as of %(as_of)s in favor of ' '%(in_favor_of)s and may be removed in %(remove_in)s.') _deprecated_msg_no_alternative = _( '%(what)s is deprecated as of %(as_of)s and may be ' 'removed in %(remove_in)s. It will not be superseded.') _deprecated_msg_with_alternative_no_removal = _( '%(what)s is deprecated as of %(as_of)s in favor of %(in_favor_of)s.') _deprecated_msg_with_no_alternative_no_removal = _( '%(what)s is deprecated as of %(as_of)s. It will not be superseded.') def __init__(self, as_of, in_favor_of=None, remove_in=2, what=None): """Initialize decorator :param as_of: the release deprecating the callable. Constants are define in this class for convenience. :param in_favor_of: the replacement for the callable (optional) :param remove_in: an integer specifying how many releases to wait before removing (default: 2) :param what: name of the thing being deprecated (default: the callable's name) """ self.as_of = as_of self.in_favor_of = in_favor_of self.remove_in = remove_in self.what = what def __call__(self, func_or_cls): if not self.what: self.what = func_or_cls.__name__ + '()' msg, details = self._build_message() if inspect.isfunction(func_or_cls): @six.wraps(func_or_cls) def wrapped(*args, **kwarg
s): report_deprecated_feature(LOG, msg, details) return func_or_cls(*args, **kwargs) return wrappe
d elif inspect.isclass(func_or_cls): orig_init = func_or_cls.__init__ # TODO(tsufiev): change `functools` module to `six` as # soon as six 1.7.4 (with fix for passing `assigned` # argument to underlying `functools.wraps`) is released # and added to the oslo-incubator requrements @functools.wraps(orig_init, assigned=('__name__', '__doc__')) def new_init(self, *args, **kwargs): report_deprecated_feature(LOG, msg, details) orig_init(self, *args, **kwargs) func_or_cls.__init__ = new_init return func_or_cls else: raise TypeError('deprecated can be used only with functions or ' 'classes') def _get_safe_to_remove_release(self, release): # TODO(dstanek): this method will have to be reimplemented once # when we get to the X release because once we get to the Y # release, what is Y+2? new_release = chr(ord(release) + self.remove_in) if new_release in self._RELEASES: return self._RELEASES[new_release] else: return new_release def _build_message(self): details = dict(what=self.what, as_of=self._RELEASES[self.as_of], remove_in=self._get_safe_to_remove_release(self.as_of)) if self.in_favor_of: details['in_favor_of'] = self.in_favor_of if self.remove_in > 0: msg = self._deprecated_msg_with_alternative else: # There are no plans to remove this function, but it is # now deprecated. msg = self._deprecated_msg_with_alternative_no_removal else: if self.remove_in > 0: msg = self._deprecated_msg_no_alternative else: # There are no plans to remove this function, but it is # now deprecated. msg = self._deprecated_msg_with_no_alternative_no_removal return msg, details def is_compatible(requested_version, current_version, same_major=True): """Determine whether `requested_version` is satisfied by `current_version`; in other words, `current_version` is >= `requested_version`. :param requested_version: version to check for compatibility :param current_version: version to check against :param same_major: if True, the major version must be identical between `requested_version` and `current_version`. This is used when a major-version difference indicates incompatibility between the two versions. Since this is the common-case in practice, the default is True. :returns: True if compatible, False if not """ requested_parts = pkg_resources.parse_version(requested_version) current_parts = pkg_resources.parse_version(current_version) if same_major and (requested_parts[0] != current_parts[0]): return False return current_parts >= requested_parts # Track the messages we have sent already. See # report_deprecated_feature(). _deprecated_messages_sent = {} def report_deprecated_feature(logger, msg, *args, **kwargs): """Call this function when a deprecated feature is used. If the system is configured for fatal deprecations then the message is logged at the 'critical' level and :class:`DeprecatedConfig` will be raised. Otherwise, the message will be logged (once) at the 'warn' level. :raises: :class:`DeprecatedConfig` if the system is configured for fatal deprecations. """ stdmsg = _("Deprecated: %s") % msg CONF.register_opts(opts) if CONF.fatal_deprecatio
Humantrashcan/prices
exchanges/opportunity_kraken.py
Python
mit
658
0.024316
from exchanges import helpers from exchanges import kraken from decimal import Decimal ### Kraken opportunities #### ARBITRAGE
OPPORTUNITY 1 def opportunity_1(): sellLTCbuyEUR = kraken.get_current_bid_LTCEUR() sellEURbuyXBT = kraken.get_current_ask_XBTEUR() sellXBTbuyLTC = kraken.get_current_ask_XBTLTC() opport = 1-((sellLTCbuyEUR/sellEURbuyBTX)*sellXBTbuyLTC) return Decimal(opport) def opportunity_2(): sellEURbuyLTC = kraken.get_current_ask_LTCEUR() se
llLTCbuyXBT = kraken.get_current_ask_XBTLTC() sellXBTbuyEUR = kraken.get_current_bid_XBTEUR() opport = 1-(((1/sellEURbuyLTC)/sellLTCbuyXBT)*sellXBTbuyEUR) return Decimal(opport)
PyFilesystem/pyfilesystem2
tests/test_enums.py
Python
mit
335
0
import os from fs
import enums import unittest class T
estEnums(unittest.TestCase): def test_enums(self): self.assertEqual(enums.Seek.current, os.SEEK_CUR) self.assertEqual(enums.Seek.end, os.SEEK_END) self.assertEqual(enums.Seek.set, os.SEEK_SET) self.assertEqual(enums.ResourceType.unknown, 0)
sureshbvn/nlpProject
nGramModel/test.py
Python
mit
2,837
0.004582
#!/usr/bin/env python from __future__ import print_function # Use the srilm module from srilm import * # Initialize a trigram LM variable (1 = unigram, 2 = bigram and so on) n = initLM(5) # Read 'sam
ple.lm' into the LM variable readLM(n, "corpu.lm") # How many n-grams of different order are there ? print("1. Number of n-grams:") print(" There are {} unigrams in this LM"
.format(howManyNgrams(n, 1))) print(" There are {} bigrams in this LM".format(howManyNgrams(n, 2))) print(" There are {} trigrams in this LM".format(howManyNgrams(n, 3))) print(" There are {} 4-grams in this LM".format(howManyNgrams(n, 4))) print(" There are {} 5-grams in this LM".format(howManyNgrams(n, 5))) print() # Query the LM for some n-gram log probabilities. # Note that a SRI language model uses backoff smoothing, so if an n-gram is # not present in the LM, it will compute it using a smoothed lower-order # n-gram distribution. print("2. N-gram log probabilities:") p1 = getUnigramProb(n, 'Naturverbundenheit') print(" p('weil') = {}".format(p1)) p2 = getBigramProb(n, 'of the') print(" p('of the') = {}".format(p2)) p3 = getBigramProb(n, 'Niederlage Deutschlands') print(" p('Niederlage Deutschlands') = {}".format(p3)) p4 = getTrigramProb(n, 'there are some') print(" p('there are some') = {}".format(p4)) # generic n-gram probability function p5 = getNgramProb(n, 'sachin tendulkar .PERIOD', 3) print(" p('sachinr') = {}".format(p5)) p6 = getNgramProb(n, 'or whatever has yet to', 5) print(" p('or whatever has yet to') = {}".format(p6)) print() # Query the LM to get the final log probability for an entire sentence. # Note that this is different from a n-gram probability because # (1) For a sentence, SRILM appends <s> and </s> to its beginning # and the end respectively # (2) The log prob of a probability is the sum of all individual # n-gram log probabilities print("3. Sentence log probabilities and perplexities:") sprob = getSentenceProb(n,'there are some good',4) print(" p('there are some good') = {}".format(sprob)) # the perplexity sppl = getSentencePpl(n,'there are some good', 4) print(" ppl('there are some good') = {}".format(sppl)) print() # number of OOVs in a sentence print("4. OOvs:") noov = numOOVs(n, 'there are some foobar', 4) print(" nOOVs('there are some foobar') = {}".format(noov)) print() # Query the LM to get the total log probability for the file named 'corpus' print("5. Corpus log probabilties and perplexities:") corpus = 'test.txt' corpus_prob = getCorpusProb(n, corpus) print(" Logprob for the file {} = {}".format(corpus, corpus_prob)) # Query the LM to get the perplexity for the file named 'corpus' corpus_ppl = getCorpusPpl(n, corpus); print(" Perplexity for the file {} = {}".format(corpus, corpus_ppl)) # Free LM variable deleteLM(n);
rootio/rootio_telephony
sms_utils/sms_server.py
Python
agpl-3.0
3,873
0.018848
from flask import Flask from flask import request from flask.ext.sqlalchemy import SQLAlchemy import datetime import uuid as uid import sys import requests import urllib2 GOIP_SERVER_IP = '127.0.0.1' #'172.248.114.178' TELEPHONY_SERVER_IP = '127.0.0.1:5000/sms/in' sys.path.append('/home/csik/public_python/sms_server/deploy') #move app = Flask(__name__) from rootio.extensions import db from config import SQLALCHEMY_DATABASE_URI app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI db = SQLAlchemy(app) from rootio.telephony.models import PhoneNumber, Message def debug(request): if request.method == 'POST': deets = request.form.items() print >> sys.stderr, type(deets) deets_method = 'POST' else: deets = request.args.items() print >> sys.stderr, type(deets) deets_method = 'GET' s = "" #print "({0}) parameters via {1}".format(len(deets)-1, deets_method) for deet in deets: s += str(deet) print s return deets @app.route("/", methods=['GET', 'POST']) def hello(): debug(request) return "Hello World!" @app.route("/init_goip", methods=['GET', 'POST']) def init_goip(): try: import send_sms_GOIP if not send_sms_GOIP.create_flags(): raise Exception("Wrong machine") except: print "Unable to init GOIP -- are you sure you called the right machine?" return "Unable to init GOIP", 404 @app.route("/out", methods=['GET', 'POST']) def sms_out(): """ Handles outgoing message requests. Currently only from GOIP8, should be generalized to any type of sending unit, called by station. Expected args: line, to_number, message """ try: import send_sms_GOIP except: print "Unable to init GOIP -- are you sure you called the right machine?" return "Unable to init GOIP", 404 debug(request) line = request.args.get('line') to_number = request.args.get('to_number') message = request.args.get('message') if not line or not to_number or not message: print "Insufficient number of arguments!" return "False" if not send_sms_GOIP.send(line,to_number,message): print "Uh Oh, some kind of error in send_sms_GOIP" return "False" else: return "Sent!" @app.route("/in/", methods=['GET', 'POST']) def sms_in(): """ Handles incoming messages. Currently getting incoming messages from GOIP8, routed to extension 1100 which triggers handle_chat.py Expected args: Event-Date-Timestamp (Unix epoch), from, to, from_number, body """ debug(request) uuid = uid.uuid5(uid.NAMESPACE_DNS, 'rootio.org') edt = datetime.datetime.fromtimestamp(int(request.args.get('Event-Date-Timestamp'))/1000000) #.strftime('%Y-%m-%d %H:%M:%S') fr = request.args.get('from') #This line should look up the station through its from address to = request.args.get('to') #This will be the same for all related units -- again may make sense to have a representation of sending units from_number = request.args.get('from_number') #look up a number now? Save a foreign key body = request.args.get('body') payload = { 'uuid':
uuid, 'edt': edt, 'fr': fr, 'to': to, 'from_number': from_number, 'body': body, } r= requests.get(TELEPHONY_SERVER_IP,params=payload) print r.text return "looks alright " + str(uuid) #return str(str(edt)+'\n'+fr+'->'+to+'\n'+from_number+'\n'+body+'\n'+uuid) if _
_name__ == "__main__": app.run(debug=True) r = requests.get('http://'+GOIP_SERVER_IP+'/init_goip')
isc-projects/forge
tests/softwaresupport/bind9_server/bind_configs.py
Python
isc
54,716
0.009668
# Copyright (C) 2013-2020 Internet Systems Consortium. # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM # DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL # INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT, # INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING # FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION # WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # Author: Wlodzimierz Wencel config_file_set = { #number : [named.co
nf, rndc.conf, fwd.db, rev.db ] 1: [""" options { directory "${data_path}"; // Working directory listen-on-v6 port ${dns_port} { ${dns_addr}; }; allow-query-cache {
none; }; // Do not allow access to cache allow-update { any; }; // This is the default allow-query { any; }; // This is the default recursion no; // Do not provide recursive service }; zone "1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa" { type master; file "rev.db"; notify no; allow-update { any; }; // This is the default allow-query { any; }; // This is the default }; zone "six.example.com" { type master; file "fwd.db"; notify no; allow-update { any; }; // This is the default allow-transfer { any; }; allow-query { any; }; // This is the default }; #Use with the following in named.conf, adjusting the allow list as needed: key "rndc-key" { algorithm hmac-md5; secret "+kOEcvxPTCPxzGqB5n5FeA=="; }; controls { inet 127.0.0.1 port 53001 allow { 127.0.0.1; } keys { "rndc-key"; }; }; logging{ channel simple_log { file "/tmp/dns.log"; severity debug 99; print-time yes; print-severity yes; print-category yes; }; category default{ simple_log; }; category queries{ simple_log; }; }; """, """ key "rndc-key" { algorithm hmac-md5; secret "+kOEcvxPTCPxzGqB5n5FeA=="; }; options { default-key "rndc-key"; default-server 127.0.0.1; default-port 953; }; """, """$ORIGIN . $TTL 86400 ; 1 day six.example.com IN SOA dns6-1.six.example.com. mail.six.example.com. ( 107 ; serial 3600 ; refresh (1 hour) 900 ; retry (15 minutes) 2592000 ; expire (4 weeks 2 days) 3600 ; minimum (1 hour) ) NS dns6-1.six.example.com. $ORIGIN six.example.com. dns6-1 AAAA 2001:db8:1::1 nanny6 AAAA 2001:db8:1::10 """, """$ORIGIN . $TTL 3600 ; 1 hour 1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa IN SOA dns6-1.six.example.com. mail.six.example.com. ( 102 ; serial 3600 ; refresh (1 hour) 900 ; retry (15 minutes) 604800 ; expire (1 week) 3600 ; minimum (1 hour) ) NS dns6-1.six.example.com. $ORIGIN 1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa. 0 PTR nanny6.six.exmaple.com.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa. """], 2: [""" options { directory "${data_path}"; // Working directory listen-on-v6 port ${dns_port} { ${dns_addr}; }; allow-query-cache { none; }; // Do not allow access to cache allow-update { any; }; // This is the default allow-query { any; }; // This is the default recursion no; // Do not provide recursive service }; zone "1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa" { type master; file "rev.db"; notify no; allow-update { any; }; // This is the default allow-query { any; }; // This is the default }; zone "six.example.com" { type master; file "fwd.db"; notify no; allow-update { any; }; // This is the default allow-transfer { any; }; allow-query { any; }; // This is the default }; #Use with the following in named.conf, adjusting the allow list as needed: key "rndc-key" { algorithm hmac-md5; secret "+kOEcvxPTCPxzGqB5n5FeA=="; }; controls { inet 127.0.0.1 port 53001 allow { 127.0.0.1; } keys { "rndc-key"; }; }; logging{ channel simple_log { file "/tmp/dns.log"; severity debug 99; print-time yes; print-severity yes; print-category yes; }; category default{ simple_log; }; category queries{ simple_log; }; }; """, """ key "rndc-key" { algorithm hmac-md5; secret "+kOEcvxPTCPxzGqB5n5FeA=="; }; options { default-key "rndc-key"; default-server 127.0.0.1; default-port 953; }; """, """$ORIGIN . $TTL 86400 ; 1 day six.example.com IN SOA dns6-1.six.example.com. mail.six.example.com. ( 107 ; serial 3600 ; refresh (1 hour) 900 ; retry (15 minutes) 2592000 ; expire (4 weeks 2 days) 3600 ; minimum (1 hour) ) NS dns6-1.six.example.com. $ORIGIN six.example.com. dns6-1 AAAA 2001:db8:1::1 """, """$ORIGIN . $TTL 3600 ; 1 hour 1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa IN SOA dns6-1.six.example.com. mail.six.example.com. ( 102 ; serial 3600 ; refresh (1 hour) 900 ; retry (15 minutes) 604800 ; expire (1 week) 3600 ; minimum (1 hour) ) NS dns6-1.six.example.com. $ORIGIN 1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa. 0 PTR dns6-1.six.example.com.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa. """], 3: [""" options { directory "${data_path}"; // Working directory listen-on-v6 port ${dns_port} { ${dns_addr}; }; allow-query-cache { none; }; // Do not allow access to cache allow-update { any; }; // This is the default allow-query { any; }; // This is the default recursion no; // Do not provide recursive service }; zone "1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa" { type master; file "rev.db"; notify no; allow-update { key forge.sha1.key; }; allow-query { any; }; }; zone "six.example.com" { type master; file "fwd.db"; notify no; allow-update { key forge.sha1.key; }; allow-query { any; }; }; key "forge.sha1.key" { algorithm hmac-sha1; secret "PN4xKZ/jDobCMlo4rpr70w=="; }; #Use with the following in named.conf, adjusting the allow list as needed: key "rndc-key" { algorithm hmac-md5; secret "+kOEcvxPTCPxzGqB5n5FeA=="; }; controls { inet 127.0.0.1 port 53001 allow { 127.0.0.1; } keys { "rndc-key"; }; }; logging{ channel simple_log { file "/tmp/dns.log"; severity debug 99; print-time yes; print-severity yes; print-category yes; }; category default{ simple_log; }; category queries{ simple_log; }; }; """, """ key "rndc-key" { algorithm hmac-md5; secret "+kOEcvxPTCPxzGqB5n5FeA=="; }; options { default-key "rndc-key"; default-server 127.0.0.1; default-port 953; }; """, """$ORIGIN . $TTL 86400 ; 1 day six.example.com IN SOA dns6-1.six.example.com. mail.six.example.com. ( 107 ; serial 3600 ; refresh (1 hour) 900 ; retry (15 minutes) 2592000 ; expire (4 weeks 2 days) 3600 ; minimum (1 hour) ) NS dns6-1.six.example.com. $ORIGIN six.example.com. dns6-1 AAAA 2001:db8:1::1 nanny6 AAAA 2001:db8:1::10 """, """$ORIGIN . $TTL 3600 ; 1 hour 1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa IN SOA dns6-1.six.example.com. mail.six.example.com. ( 102 ; serial 3600 ; refresh (1 hour) 900 ; retry (15 minutes) 604800 ; expire (1 week) 3600 ; minimum (1 hour) ) NS dns6-1.six.example.com. $ORIGIN 1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa. 0 PTR nanny6.six.exmaple.com.1.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa. """], 4: [""" options { directory "${data_path}"; // Working directory listen-on-v6 port ${dns_port} { ${dns_addr}; }; allow-query-cache { none; };
jaybutera/tetrisRL
run_model.py
Python
mit
1,627
0.009834
impor
t sys import os import torch import time from engine import TetrisEngine from dqn_agent import DQN, ReplayMemory, Transition from torch.autograd import Variable use_cuda = torch.cuda.is_available() FloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor LongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor width, height = 10, 20 # standard tetris friends rules engine = TetrisE
ngine(width, height) def load_model(filename): model = DQN() if use_cuda: model.cuda() checkpoint = torch.load(filename) model.load_state_dict(checkpoint['state_dict']) return model def run(model): state = FloatTensor(engine.clear()[None,None,:,:]) score = 0 while True: action = model(Variable(state, volatile=True).type(FloatTensor)).data.max(1)[1].view(1,1).type(LongTensor) print( model(Variable(state, volatile=True).type(FloatTensor)).data) state, reward, done = engine.step(action[0,0]) state = FloatTensor(state[None,None,:,:]) # Accumulate reward score += int(reward) print(engine) print(action) time.sleep(.1) if done: print('score {0}'.format(score)) break if len(sys.argv) <= 1: print('specify a filename to load the model') sys.exit(1) if __name__ == '__main__': filename = sys.argv[1] if os.path.isfile(filename): print("=> loading model '{}'".format(filename)) model = load_model(filename).eval() run(model) else: print("=> no file found at '{}'".format(filename))
VikParuchuri/evolve-music2
extract_features.py
Python
mit
6,422
0.001557
import pandas as pd import logging import settings import os from scikits.audiolab import oggwrite, play, oggread from scipy.fftpack import dct from itertools import chain import numpy as np import math log = logging.getLogger(__name__) def read_sound(fpath, limit=settings.MUSIC_TIME_LIMIT): try: data, fs, enc = oggread(fpath) upto = fs * limit except IOError: log.error("Could not read file at {0}".format(fpath)) raise IOError if data.shape[0] < upto: log.error("Music file at {0} not long enough.".format(fpath)) raise ValueError try: if len(data.shape) == 1 or data.shape[1] != 2: data = np.vstack([data, data]).T except Exception: log.error("Invalid dimension count for file at {0}. Do you have left and right channel audio?".format(fpath)) raise ValueError data = data[0:upto, :] return data, fs, enc def calc_slope(x, y): x_mean = np.mean(x) y_mean = np.mean(y) x_dev = np.sum(np.abs(np.subtract(x, x_mean))) y_dev = np.sum(np.abs(np.subtract(y, y_mean))) slope = (x_dev * y_dev) / (x_dev * x_dev) return slope def get_indicators(vec): mean = np.mean(vec) slope = calc_slope(np.arange(len(vec)), vec) std = np.std(vec) return mean, slope, std def calc_u(vec): fft = np.fft.fft(vec) return np.sum(np.multiply(fft, vec)) / np.sum(vec) def calc_mfcc(fft): ps = np.abs(fft) ** 2 fs = np.dot(ps, mel_filter(ps.shape[0])) ls = np.log(fs) ds = dct(ls, type=2) return ds def mel_filter(blockSize): numBands = 13 maxMel = int(freqToMel(24000)) minMel = int(freqToMel(10)) filterMatrix = np.zeros((numBands, blockSize)) melRange = np.array(xrange(numBands + 2)) melCenterFilters = melRange * (maxMel - minMel) / (numBands + 1) + minMel aux = np.log(1 + 1000.0 / 700.0) / 1000.0 aux = (np.exp(melCenterFilters * aux) - 1) / 22050 aux = 0.5 + 700 * blockSize * aux aux = np.floor(aux) # Arredonda pra baixo centerIndex = np.array(aux, int) # Get int values for i in xrange(numBands): start, center, end = centerIndex[i:(i + 3)] k1 = np.float32(center - start) k2 = np.float32(end - center) up = (np.array(xrange(start, center)) - start) / k1 down = (end - np.array(xrange(center, end))) / k2 filterMatrix[i][start:center] = up try: filterMatrix[i][center:end] = down except ValueError: pass return filterMatrix.transpose() def freqToMel(freq): return 1127.01048 * math.log(1 + freq / 700.0) def melToFreq(freq): return 700 * (math.exp(freq / 1127.01048 - 1)) def calc_features(vec, freq): # bin count bc = settings.MUSIC_TIME_LIMIT bincount = list(range(bc)) # framesize fsize = 512 #mean m = np.mean(vec) #spectral flux sf = np.mean(vec - np.roll(vec, fsize)) mx = np.max(vec) mi = np.min(vec) sdev = np.std(vec) binwidth = len(vec) / bc bins = [] for i in xrange(0, bc): bins.append(vec[(i * binwidth):(binwidth * i + binwidth)]) peaks = [np.max(i) for i in bins] mins = [np.min(i) for i in bins] amin, smin, stmin = get_indicators(mins) apeak, speak, stpeak = get_indicators(peaks) #fft = np.fft.fft(vec) bin_fft = [] for i in xrange(0, bc): bin_fft.append(np.fft.fft(vec[(i * binwidth):(binwidth * i + binwidth)])) mel = [list(calc_mfcc(j)) for (i, j) in enumerate(bin_fft) if i % 3 == 0] mels = list(chain.from_iterable(mel)) cepstrums = [np.fft.ifft(np.log(np.abs(i))) for i in bin_fft] inter = [get_indicators(i) for i in cepstrums] acep, scep, stcep = get_indicators([i[0] for i in inter]) aacep, sscep, stsscep = get_indicators([i[1] for i in inter]) zero_crossings = np.where(np.diff(np.sign(vec)))[0] zcc = len(zero_crossings) zccn = zcc / freq u = [calc_u(i) for i in bins] spread = np.sqrt(u[-1] - u[0] ** 2) skewness = (u[0] ** 3 - 3 * u[0] * u[5] + u[-1]) / spread ** 3 #Spectral slope #ss = calc_slope(np.arange(len(fft)),fft) avss = [calc_slope(np.arange(len(i)), i) for i in bin_fft] savss = calc_slope(bincount, avss) mavss = np.mean(avss) features = [m, sf, mx, mi, sdev, amin, smin, stmin, apeak, speak, stpeak, acep, scep, stcep, aacep, sscep, stsscep, zcc, zccn, spread, skewness, savss, mavss] + mels + [i[0] for (j, i) in enumerate(inter) if j % 5 == 0] for i in xrange(0, len(features)): try: features[i] = features[i].real except Exception: pass return features def extract_features(sample, freq): left = calc_features(sample[:, 0], freq) right = calc_features(sample[:, 1], freq) return left + right def process_song(vec, f): try: features = extract_features(vec, f) except Exception: log.error("Cannot generate features for file {0}".format(f)) return None return features def generate_features(filepath): frame = None data, fs, enc = read_sound(filepath) features = process_song(data, fs) frame = pd.Series(features) frame['fs'] = fs frame['enc'] = enc frame['fname'] = filepath return frame def generate_train_features(): if not os.path.isfile(settings.TRAIN_FEATURE_PATH): d = [] encs = [] fss = [] fnames = [] for i, p in enumerate(os.listdir(settings.OGG_DIR)): if not p.endswith(".ogg"):
continue log.debug("On file {0}".format(p)) filepath = os.path.join(settings.OGG_DIR, p) try: data, fs, enc = read_sound(filepath) except Exception:
continue try: features = process_song(data, fs) except Exception: log.error("Could not get features for file {0}".format(p)) continue d.append(features) fss.append(fs) encs.append(enc) fnames.append(p) frame = pd.DataFrame(d) frame['fs'] = fss frame['enc'] = encs frame['fname'] = fnames frame.to_csv(settings.TRAIN_FEATURE_PATH) else: frame = pd.read_csv(settings.TRAIN_FEATURE_PATH) frame = frame.iloc[:, 1:] return frame
srottem/indy-sdk
samples/python/src/anoncreds.py
Python
apache-2.0
7,929
0.005171
import time from indy import anoncreds, wallet import json import logging from indy import pool from src.utils import run_coroutine, PROTOCOL_VERSION logger = logging.getLogger(__name__) async def demo(): logger.info("Anoncreds sample -> started") issuer = { 'did': 'NcYxiDXkpYi6ov5FcYDi1e', 'wallet_config': json.dumps({'id': 'issuer_wallet'}), 'wallet_credentials': json.dumps({'key': 'issuer_wallet_key'}) } prover = { 'did': 'VsKV7grR1BUE29mG2Fm2kX', 'wallet_config': json.dumps({"id": "prover_wallet"}), 'wallet_credentials': json.dumps({"key": "issuer_wallet_key"}) } v
erifier = {} store = {} # Set protocol version 2 to work with Indy Node 1.4 await pool.set_protocol_version(PROTOCOL_VERSION) # 1. Create Issuer Wallet and Get Wallet Handle await wallet.create_wallet(issuer['wallet_config'], issuer['wallet_credentials']) issuer['wallet'] = await wallet.open_wallet(issuer['wallet_config'], issuer['wallet_
credentials']) # 2. Create Prover Wallet and Get Wallet Handle await wallet.create_wallet(prover['wallet_config'], prover['wallet_credentials']) prover['wallet'] = await wallet.open_wallet(prover['wallet_config'], prover['wallet_credentials']) # 3. Issuer create Credential Schema schema = { 'name': 'gvt', 'version': '1.0', 'attributes': '["age", "sex", "height", "name"]' } issuer['schema_id'], issuer['schema'] = await anoncreds.issuer_create_schema(issuer['did'], schema['name'], schema['version'], schema['attributes']) store[issuer['schema_id']] = issuer['schema'] # 4. Issuer create Credential Definition for Schema cred_def = { 'tag': 'cred_def_tag', 'type': 'CL', 'config': json.dumps({"support_revocation": False}) } issuer['cred_def_id'], issuer['cred_def'] = await anoncreds.issuer_create_and_store_credential_def( issuer['wallet'], issuer['did'], issuer['schema'], cred_def['tag'], cred_def['type'], cred_def['config']) store[issuer['cred_def_id']] = issuer['cred_def'] # 5. Prover create Master Secret prover['master_secret_id'] = await anoncreds.prover_create_master_secret(prover['wallet'], None) # 6. Issuer create Credential Offer issuer['cred_offer'] = await anoncreds.issuer_create_credential_offer(issuer['wallet'], issuer['cred_def_id']) prover['cred_offer'] = issuer['cred_offer'] cred_offer = json.loads(prover['cred_offer']) prover['cred_def_id'] = cred_offer['cred_def_id'] prover['schema_id'] = cred_offer['schema_id'] prover['cred_def'] = store[prover['cred_def_id']] prover['schema'] = store[prover['schema_id']] # 7. Prover create Credential Request prover['cred_req'], prover['cred_req_metadata'] = \ await anoncreds.prover_create_credential_req(prover['wallet'], prover['did'], prover['cred_offer'], prover['cred_def'], prover['master_secret_id']) # 8. Issuer create Credential prover['cred_values'] = json.dumps({ "sex": {"raw": "male", "encoded": "5944657099558967239210949258394887428692050081607692519917050011144233"}, "name": {"raw": "Alex", "encoded": "1139481716457488690172217916278103335"}, "height": {"raw": "175", "encoded": "175"}, "age": {"raw": "28", "encoded": "28"} }) issuer['cred_values'] = prover['cred_values'] issuer['cred_req'] = prover['cred_req'] (cred_json, _, _) = await anoncreds.issuer_create_credential(issuer['wallet'], issuer['cred_offer'], issuer['cred_req'], issuer['cred_values'], None, None) prover['cred'] = cred_json # 9. Prover store Credential await anoncreds.prover_store_credential(prover['wallet'], None, prover['cred_req_metadata'], prover['cred'], prover['cred_def'], None) # 10. Prover gets Credentials for Proof Request verifier['proof_req'] = json.dumps({ 'nonce': '123432421212', 'name': 'proof_req_1', 'version': '0.1', 'requested_attributes': { 'attr1_referent': {'name': 'name'} }, 'requested_predicates': { 'predicate1_referent': {'name': 'age', 'p_type': '>=', 'p_value': 18} } }) prover['proof_req'] = verifier['proof_req'] # Prover gets Credentials for attr1_referent prover['cred_search_handle'] = \ await anoncreds.prover_search_credentials_for_proof_req(prover['wallet'], prover['proof_req'], None) creds_for_attr1 = await anoncreds.prover_fetch_credentials_for_proof_req(prover['cred_search_handle'], 'attr1_referent', 10) prover['cred_for_attr1'] = json.loads(creds_for_attr1)[0]['cred_info'] # Prover gets Credentials for predicate1_referent creds_for_predicate1 = await anoncreds.prover_fetch_credentials_for_proof_req(prover['cred_search_handle'], 'predicate1_referent', 10) prover['cred_for_predicate1'] = json.loads(creds_for_predicate1)[0]['cred_info'] await anoncreds.prover_close_credentials_search_for_proof_req(prover['cred_search_handle']) # 11. Prover create Proof for Proof Request prover['requested_creds'] = json.dumps({ 'self_attested_attributes': {}, 'requested_attributes': {'attr1_referent': {'cred_id': prover['cred_for_attr1']['referent'], 'revealed': True}}, 'requested_predicates': {'predicate1_referent': {'cred_id': prover['cred_for_predicate1']['referent']}} }) schemas_json = json.dumps({prover['schema_id']: json.loads(prover['schema'])}) cred_defs_json = json.dumps({prover['cred_def_id']: json.loads(prover['cred_def'])}) revoc_states_json = json.dumps({}) prover['proof'] = await anoncreds.prover_create_proof(prover['wallet'], prover['proof_req'], prover['requested_creds'], prover['master_secret_id'], schemas_json, cred_defs_json, revoc_states_json) verifier['proof'] = prover['proof'] # 12. Verifier verify proof proof = json.loads(verifier['proof']) assert 'Alex' == proof['requested_proof']['revealed_attrs']['attr1_referent']['raw'] identifier = proof['identifiers'][0] verifier['cred_def_id'] = identifier['cred_def_id'] verifier['schema_id'] = identifier['schema_id'] verifier['cred_def'] = store[verifier['cred_def_id']] verifier['schema'] = store[verifier['schema_id']] schemas_json = json.dumps({verifier['schema_id']: json.loads(verifier['schema'])}) cred_defs_json = json.dumps({verifier['cred_def_id']: json.loads(verifier['cred_def'])}) revoc_ref_defs_json = "{}" revoc_regs_json = "{}" assert await anoncreds.verifier_verify_proof(verifier['proof_req'], verifier['proof'], schemas_json, cred_defs_json, revoc_ref_defs_json, revoc_regs_json) # 13. Close and delete Issuer wallet await wallet.close_wallet(issuer['wallet']) await wallet.delete_wallet(issuer['wallet_config'], issuer['wallet_credentials']) # 14. Close and delete Prover wallet await wallet.close_wallet(prover['wallet']) await wallet.delete_wallet(prover['wallet_config'], prover['wallet_credentials']) logger.info("Anoncreds sample -> completed") if __name__ == '__main__': run_coroutine(demo) time.sleep(1) # FIXME waiting for libindy thread complete
sajuptpm/contrail-controller
src/config/svc-monitor/svc_monitor/rabbit.py
Python
apache-2.0
4,079
0.001226
import gevent import socket from vnc_api.vnc_api import * from cfgm_common.vnc_kombu import VncKombuClient from config_db import * from cfgm_common.dependency_tracker import DependencyTracker from reaction_map import REACTION_MAP import svc_monitor class RabbitConnection(object): _REACTION_MAP = REACTION_MAP def __init__(self, logger, args=None): self._args = args self.logger = logger def _connect_rabbit(self): rabbit_server = self._args.rabbit_server rabbit_port = self._args.rabbit_port rabbit_user = self._args.rabbit_user rabbit_password = self._args.rabbit_password rabbit_vhost = self._args.rabbit_vhost rabbit_ha_mode = self._args.rabbit_ha_mode self._db_resync_done = gevent.event.Event() q_name = 'svc_mon.%s' % (socket.gethostname()) self._vnc_kombu = VncKombuClient(rabbit_server, rabbit_port, rabbit_user, rabbit_password, rabbit_vhost, rabbit_ha_mode, q_name, self._vnc_subscribe_callback, self.logger.log) def _vnc_subscribe_callback(self, oper_info): self._db_resync_done.wait() try: self._vnc_subscribe_actions(oper_info) except Exception: svc_monitor.cgitb_error_log(self) def _vnc_subscri
be_actions(self, oper_info): msg = "Notification Message: %s" % (pformat(oper_info)) self.logger.log_debug(msg) obj_type = oper_info['type'].replace('-', '_') obj_class = DBBaseSM.get_obj_type_map().get(obj_type) if obj_class is None: return if oper_info['oper'] == 'CREATE': obj_dict = oper_info['obj_dict'] obj_id = oper_info['uuid'] obj = obj_class.loca
te(obj_id) dependency_tracker = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) dependency_tracker.evaluate(obj_type, obj) elif oper_info['oper'] == 'UPDATE': obj_id = oper_info['uuid'] obj = obj_class.get(obj_id) old_dt = None if obj is not None: old_dt = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) old_dt.evaluate(obj_type, obj) else: obj = obj_class.locate(obj_id) obj.update() dependency_tracker = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) dependency_tracker.evaluate(obj_type, obj) if old_dt: for resource, ids in old_dt.resources.items(): if resource not in dependency_tracker.resources: dependency_tracker.resources[resource] = ids else: dependency_tracker.resources[resource] = list( set(dependency_tracker.resources[resource]) | set(ids)) elif oper_info['oper'] == 'DELETE': obj_id = oper_info['uuid'] obj = obj_class.get(obj_id) if obj is None: return dependency_tracker = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) dependency_tracker.evaluate(obj_type, obj) obj_class.delete(obj_id) else: # unknown operation self.logger.log_error('Unknown operation %s' % oper_info['oper']) return if obj is None: self.logger.log_error('Error while accessing %s uuid %s' % ( obj_type, obj_id)) return for res_type, res_id_list in dependency_tracker.resources.items(): if not res_id_list: continue cls = DBBaseSM.get_obj_type_map().get(res_type) if cls is None: continue for res_id in res_id_list: res_obj = cls.get(res_id) if res_obj is not None: res_obj.evaluate()
vipmike007/avocado-vt
virttest/iscsi.py
Python
gpl-2.0
29,173
0.001165
""" Basic iscsi support for Linux host with the help of commands iscsiadm and tgtadm. This include the basic operates such as login and get device name by target name. And it can support the real iscsi access and emulated iscsi in localhost then access it. """ import re import os import logging from avocado.core import exceptions from avocado.utils import data_factory from avocado.utils import process from avocado.utils import path from . import utils_selinux from . import utils_net from . import data_dir ISCSI_CONFIG_FILE = "/etc/iscsi/initiatorname.iscsi" def iscsi_get_sessions(): """ Get the iscsi sessions activated """ cmd = "iscsiadm --mode session" output = process.system_output(cmd, ignore_status=True) sessions = [] if "No active sessions" not in output: for session in output.splitlines(): ip_addr = session.split()[2].split(',')[0] target = session.split()[3] sessions.append((ip_addr, target)) return sessions def iscsi_get_nodes(): """ Get the iscsi nodes """ cmd = "iscsiadm --mode node" output = process.system_output(cmd, ignore_status=True) pattern = r"(\d+\.\d+\.\d+\.\d+|\[.+\]):\d+,\d+\s+([\w\.\-:\d]+)" nodes = [] if "No records found" not in output: nodes = re.findall(pattern, output) return nodes def iscsi_login(target_name, portal): """ Login to a target with the target name :param target_name: Name of the target :params portal: Hostname/Ip for iscsi server """ cmd = "iscsiadm --mode node --login --targetname %s" % target_name cmd += " --portal %s" % portal output = process.system_output(cmd) target_login = "" if "successful" in output: target_login = target_name return target_login def iscsi_node_del(target_name=None): """ Delete target node record, if the target name is not set then delete all target node records. :params target_name: Name of the target. """ node_list = iscsi_get_nodes() cmd = '' if target_name: for node_tup in node_list: if target_name in node_tup: cmd = "iscsiadm -m node -o delete -T %s " % target_name cmd += "--portal %s" % node_tup[0] process.system(cmd, ignore_status=True) break if not cmd: logging.error("The target '%s' for delete is not in target node" " record", target_name) else: for node_tup in node_list: cmd = "iscsiadm -m node -o delete -T %s " % node_tup[1] cmd += "--portal %s" % node_tup[0] process.system(cmd, ignore_status=True) def iscsi_logout(target_name=None): """ Logout from a target. If the target name is not set then logout all targets. :params target_name: Name of the target. """ if target_name: cmd = "iscsiadm --mode node --logout -T %s" % target_name else: cmd = "iscsiadm --mode node --logout all" output = process.system_output(cmd) target_logout = "" if "successful" in output: target_logout = target_name return target_logout def iscsi_discover(portal_ip): """ Query from iscsi server for available targets :param portal_ip: Ip for iscsi server """ cmd = "iscsiadm -m discovery -t sendtargets -p %
s" % portal_ip output = process.system_output(cmd, ignore_status=True) session = "" if "Invalid" in output: logging.debug(output) else: session = output return session class _Is
csiComm(object): """ Provide an interface to complete the similar initialization """ def __init__(self, params, root_dir): """ common __init__ function used to initialize iSCSI service :param params: parameters dict for iSCSI :param root_dir: path for image """ self.target = params.get("target") self.export_flag = False self.luns = None self.restart_tgtd = 'yes' == params.get("restart_tgtd", "no") if params.get("portal_ip"): self.portal_ip = params.get("portal_ip") else: self.portal_ip = "127.0.0.1" if params.get("iscsi_thread_id"): self.id = params.get("iscsi_thread_id") else: self.id = data_factory.generate_random_string(4) self.initiator = params.get("initiator") # CHAP AUTHENTICATION self.chap_flag = False self.chap_user = params.get("chap_user") self.chap_passwd = params.get("chap_passwd") if self.chap_user and self.chap_passwd: self.chap_flag = True if params.get("emulated_image"): self.initiator = None emulated_image = params.get("emulated_image") self.emulated_image = os.path.join(root_dir, emulated_image) self.device = "device.%s" % os.path.basename(self.emulated_image) self.emulated_id = "" self.emulated_size = params.get("image_size") self.unit = self.emulated_size[-1].upper() self.emulated_size = self.emulated_size[:-1] # maps K,M,G,T => (count, bs) emulated_size = {'K': (1, 1), 'M': (1, 1024), 'G': (1024, 1024), 'T': (1024, 1048576), } if emulated_size.has_key(self.unit): block_size = emulated_size[self.unit][1] size = int(self.emulated_size) * emulated_size[self.unit][0] self.emulated_expect_size = block_size * size self.create_cmd = ("dd if=/dev/zero of=%s count=%s bs=%sK" % (self.emulated_image, size, block_size)) else: self.device = None def logged_in(self): """ Check if the session is login or not. """ sessions = iscsi_get_sessions() login = False if self.target in map(lambda x: x[1], sessions): login = True return login def portal_visible(self): """ Check if the portal can be found or not. """ return bool(re.findall("%s$" % self.target, iscsi_discover(self.portal_ip), re.M)) def set_initiatorName(self, id, name): """ back up and set up the InitiatorName """ if os.path.isfile("%s" % ISCSI_CONFIG_FILE): logging.debug("Try to update iscsi initiatorname") cmd = "mv %s %s-%s" % (ISCSI_CONFIG_FILE, ISCSI_CONFIG_FILE, id) process.system(cmd) fd = open(ISCSI_CONFIG_FILE, 'w') fd.write("InitiatorName=%s" % name) fd.close() process.system("service iscsid restart") def login(self): """ Login session for both real iscsi device and emulated iscsi. Include env check and setup. """ login_flag = False if self.portal_visible(): login_flag = True elif self.initiator: self.set_initiatorName(id=self.id, name=self.initiator) if self.portal_visible(): login_flag = True elif self.emulated_image: self.export_target() # If both iSCSI server and iSCSI client are on localhost. # It's necessary to set up the InitiatorName. if "127.0.0.1" in self.portal_ip: self.set_initiatorName(id=self.id, name=self.target) if self.portal_visible(): login_flag = True if login_flag: iscsi_login(self.target, self.portal_ip) def get_device_name(self): """ Get device name from the target name. """ cmd = "iscsiadm -m session -P 3" device_name = "" if self.logged_in(): output = process.system_output(cmd) pattern = r"Target:\s+%s.*?disk\s(\w+)\s+\S+\srunning" % self.target device_name = re.findall(pattern, output, re.S)
v-iam/azure-sdk-for-python
azure-mgmt-cognitiveservices/azure/mgmt/cognitiveservices/models/error.py
Python
mit
1,252
0.000799
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # --------
------------------------------------------------------------------ from msrest.serialization import Model from msrest.exceptions import HttpOperationError class Error(Model): """Cognitive Services error object. :param error: The error body. :type error: :class:`ErrorBody <azure.mgmt.cognitiveservices.models.ErrorBody>` """ _attribute_map = { 'error': {'key': 'error', 'type': 'ErrorBody'}, } def __init__(self, error=None): s
elf.error = error class ErrorException(HttpOperationError): """Server responsed with exception of type: 'Error'. :param deserialize: A deserializer :param response: Server response to be deserialized. """ def __init__(self, deserialize, response, *args): super(ErrorException, self).__init__(deserialize, response, 'Error', *args)
dsiddharth/access-keys
keystone/common/sql/migrate_repo/versions/001_add_initial_tables.py
Python
apache-2.0
5,811
0
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sql def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = sql.MetaData() meta.bind = migrate_engine # catalog service_table = sql.Table( 'service', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('type', sql.String(255)), sql.Column('extra', sql.Text())) service_table.create(migrate_engine, checkfirst=True) endpoint_table = sql.Table( 'endpoint', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('region', sql.String(255)), sql.Column('service_id', sql.String(64), sql.ForeignKey('service.id'), nullable=False), sql.Column('extra', sql.Text())) endpoint_table.create(migrate_engine, checkfirst=True) # identity role_table = sql.Table( 'role', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('name', sql.String(255), unique=True, nullable=False)) role_table.create(migrate_engine, checkfirst=True) if migrate_engine.name == 'ibm_db_sa': # NOTE(blk-u): SQLAlchemy for PostgreSQL picks the name tenant_name_key # for the unique constraint, but for DB2 doesn't give the UC a name # unless we tell it to and there is no DDL to alter a column to drop # an unnamed unique constraint, so this code creates a named unique # constraint on the name column rather than an unnamed one. # (This is used in migration 16.) tenant_table = sql.Table( 'tenant', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('name', sql.String(64), nullable=False), sql.Column('extra', sql.Text()), sql.UniqueConstraint('name', name='tenant_name_key')) else: tenant_table = sql.Table( 'tenant', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('name', sql.String(64), unique=True, nullable=False), sql.Column('extra', sql.Text())) tenant_table.create(migrate_engine, checkfirst=True) metadata_table = sql.Table( 'metadata', meta, sql.Column('user_id', sql.String(64), primary_key=True), sql.Column('tenant_id', sql.String(64), primary_key=True), sql.Column('data', sql.Text())) metadata_table.create(migrate_engine, checkfirst=True) ec2_credential_table = sql.Table( 'ec2_credential', meta, sql.Column('access', sql.String(64), primary_key=True), sql.Column('secret', sql.String(64)), sql.Column('user_id', sql.String(64)), sql.Column('tenant_id', sql.String(64))) ec2_credential_table.create(migrate_engine, checkfirst=True) if migrate_engine.name == 'ibm_db_sa': # NOTE(blk-u): SQLAlchemy for PostgreSQL picks the name user_name_key # for the unique constraint, but for DB2 doesn't give the UC a name # unless we
tell it to and there is no DDL to alter a column to drop # an unnamed unique constraint, so this code creates a named unique # constraint on the name column rather than an unnamed one. # (This is used in migration 16.) user_table = sql.Table( 'user', meta, sql
.Column('id', sql.String(64), primary_key=True), sql.Column('name', sql.String(64), nullable=False), sql.Column('extra', sql.Text()), sql.UniqueConstraint('name', name='user_name_key')) else: user_table = sql.Table( 'user', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('name', sql.String(64), unique=True, nullable=False), sql.Column('extra', sql.Text())) user_table.create(migrate_engine, checkfirst=True) user_tenant_membership_table = sql.Table( 'user_tenant_membership', meta, sql.Column( 'user_id', sql.String(64), sql.ForeignKey('user.id'), primary_key=True), sql.Column( 'tenant_id', sql.String(64), sql.ForeignKey('tenant.id'), primary_key=True)) user_tenant_membership_table.create(migrate_engine, checkfirst=True) # token token_table = sql.Table( 'token', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('expires', sql.DateTime()), sql.Column('extra', sql.Text())) token_table.create(migrate_engine, checkfirst=True) def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. meta = sql.MetaData() meta.bind = migrate_engine tables = ['user_tenant_membership', 'token', 'user', 'tenant', 'role', 'metadata', 'ec2_credential', 'endpoint', 'service'] for t in tables: table = sql.Table(t, meta, autoload=True) table.drop(migrate_engine, checkfirst=True)
steventimberman/masterDebater
venv/lib/python2.7/site-packages/rest_framework/compat.py
Python
mit
10,280
0.000486
""" The `compat` module provides support for backwards compatibility with older versions of Django/Python, and compatibility wrappers around optional packages. """ # flake8: noqa from __future__ import unicode_literals import inspect import django from django.apps import apps from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import connection, models, transaction from django.template import Context, RequestContext, Template from django.utils import six from django.views.generic import View try: from django.urls import ( NoReverseMatch, RegexURLPattern, RegexURLResolver, ResolverMatch, Resolver404, get_script_prefix, reverse, reverse_lazy, resolve ) except ImportError: from django.core.urlresolvers import ( # Will be removed in Django 2.0 NoReverseMatch, RegexURLPattern, RegexURLResolver, ResolverMatch, Resolver404, get_script_prefix, reverse, reverse_lazy, resolve ) try: import urlparse # Python 2.x except ImportError: import urllib.parse as urlparse def unicode_repr(instance): # Get the repr of an instance, but ensure it is a unicode string # on both python 3 (already the case) and 2 (not the case). if six.PY2: return repr(instance).decode('utf-8') return repr(instance) def unicode_to_repr(value): # Coerce a unicode string to the correct repr return type, depending on # the Python version. We wrap all our `__repr__` implementations with # this and then use unicode throughout internally. if six.PY2: return value.encode('utf-8') return value def unicode_http_header(value): # Coerce HTTP header value to unicode. if isinstance(value, six.binary_type): return value.decode('iso-8859-1') return value def total_seconds(timedelta): # TimeDelta.total_seconds() is only available in Python 2.7 if hasattr(timedelta, 'total_seconds'): return timedelta.total_seconds() else: return (timedelta.days * 86400.0) + float(timedelta.seconds) + (timedelta.microseconds / 1000000.0) def distinct(queryset, base): if settings.DATABASES[queryset.db]["ENGINE"] == "django.db.backends.oracle": # distinct analogue for Oracle users return base.filter(pk__in=set(queryset.values_list('pk', flat=True))) return queryset.distinct() # Obtaining manager instances and names from model options differs after 1.10. def get_names_and_managers(options): if django.VERSION >= (1, 10): # Django 1.10 onwards provides a `.managers` property on the Options. return [ (manager.name, manager) for manager in options.managers ] # For Django 1.8 and 1.9, use the three-tuple information provided # by .concrete_managers and .abstract_managers return [ (manager_info[1], manager_info[2]) for manager_info in (options.concrete_managers + options.abstract_managers) ] # field.rel is deprecated from 1.9 onwards def get_remote_field(field, **kwargs): if 'default' in kwargs: if django.VERSION < (1, 9): return getattr(field, 'rel', kwargs['default']) return getattr(field, 'remote_field', kwargs['default']) if django.VERSION < (1, 9): return field.rel return field.remote_field def _resolve_model(obj): """ Resolve supplied `obj` to a Django model class. `obj` must be a Django model class itself, or a string representation of one. Useful in situations like GH #1225 where Django may not have resolved a string-based reference to a model in another model's foreign key definition. String representations should have the format: 'appname.ModelName' """ if isinstance(obj, six.string_types) and len(obj.split('.')) == 2: app_name, model_name = obj.split('.') resolved_model = apps.get_model(app_name, model_name) if resolved_model is None: msg = "Django did not return a model for {0}.{1}" raise ImproperlyConfigured(msg.format(app_name, model_name)) return resolved_model elif inspect.isclass(obj) and issubclass(obj, models.Model): return obj raise ValueError("{0} is not a Django model".format(obj)) def is_authenticated(user): if django.VERSION < (1, 10): return user.is_authenticated() return user.is_authenticated def is_anonymous(user): if django.VERSION < (1, 10): return user.is_anonymous() return user.is_anonymous def get_related_model(field): if django.VERSION < (1, 9): return _resolve_model(field.rel.to) return field.remote_field.model def value_from_object(field, obj): if django.VERSION < (1, 9): return field._get_val_from_obj(obj) return field.value_from_object(obj) # contrib.postgres only supported from 1.8 onwards. try: from django.contrib.postgres import fields as postgres_fields except ImportError: postgres_fields = None # JSONField is only supported from 1.9 onwards try: from django.contrib.postgres.fields import JSONField except ImportError: JSONField = None # coreapi is optional (Note that uritemplate is a dependency of coreapi) try: import coreapi import uritemplate except (ImportError, SyntaxError): # SyntaxError is possible under python 3.2 coreapi = None uritemplate = None # coreschema is optional try: import coreschema except ImportError: coreschema = None # django-filter is optional try: import django_filters except ImportError: django_filters = None # django-crispy-forms is optional try: import crispy_forms except ImportError: crispy_forms = None # requests is optional try: import requests except ImportError: requests = None # Django-guardian is optional. Import only if guardian is in INSTALLED_APPS # Fixes (#1712). We keep the try/except for the test suite. guardian = None try: if 'guardian' in settings.INSTALLED_APPS: import guardian except ImportError: pass # PATCH method is not implemented by Django if 'patch' not in View.http_method_names: View.http_method_names = View.http_method_names + ['patch'] # Markdown is optional try: import markdown if markdown.version <= '2.2': HEADERID_EXT_PATH = 'headerid' LEVEL_PARAM = 'level' elif markdown.version < '2.6': HEADERID_EXT_PATH = 'markdown.extensions.headerid' LEVEL_PARAM = 'level' else: HEADERID_EXT_PATH = 'markdown.extensions.toc' LEVEL_PARAM = 'baselevel' def apply_markdown(text): """ Simple wrapper around :func:`markdown.markdown` to set the base level of '#' style headers to <h2>. """ extensions = [HEADERID_EXT_PATH] extension_configs = { HEADERID_EXT_PATH: { LEVEL_PARAM: '2' } } md = markdown.Markdown( extensions=extensions, extension_configs=extension_configs ) return md.convert(text) except ImportError: apply_markdown = None markdown = None try: import pygments from pygments.lexers import get_lexer_by_name from pygments.formatters import HtmlFormatter def pygments_highlight(text, lang, style): lexer = get_lexer_by_name(lang, stripall=False) formatter = HtmlFormatter(nowrap=True, style=style) return pygments.highlight(text, lexer, formatter) def pygments_css(style): formatter = HtmlFormatter(style=style) return formatter.get_style_defs('.highlight') except ImportError: pygments = None def pygments_highlight(text, lang, style): return text def pygments_css(style): return None try: import pytz from pytz.exceptions import InvalidTimeError except ImportError: InvalidTimeError = Exception # `separators` argu
ment to `json.dumps()` differs between 2.x and 3.x # See: http://bugs.python.org/issue22767 if six.PY3: SHORT_SEPARATORS = (',', ':') LONG_SEPARATORS = (', ', ': ') IND
ENT_SEPARATORS = (',', ': ') else: SHORT_SEPARATORS = (
NixaSoftware/CVis
venv/lib/python2.7/site-packages/pandas/tests/indexes/datetimes/test_datetimelike.py
Python
apache-2.0
2,661
0
""" generic tests from the Datetimelike class """ import numpy as np import pandas as pd from pandas.util import testing as tm from pandas import Series, Index, DatetimeIndex, date_range from ..datetimelike import DatetimeLike class TestDatetimeIndex(DatetimeLike): _holder = DatetimeIndex def setup_method(self, method): self.indices = dict(index=tm.makeDateIndex(10), index_dec=date_range('20130110', periods=10, freq='-1D')) self.setup_indices() def create_index(self): return date_range('20130101', periods=5) def test_shift(self): # test shift for datetimeIndex and non datetimeIndex # GH8083 drange = self.create_index() result = drange.shift(1)
expected = DatetimeIndex(['2013-01-02', '2013-01-03', '2013-01-04',
'2013-01-05', '2013-01-06'], freq='D') tm.assert_index_equal(result, expected) result = drange.shift(-1) expected = DatetimeIndex(['2012-12-31', '2013-01-01', '2013-01-02', '2013-01-03', '2013-01-04'], freq='D') tm.assert_index_equal(result, expected) result = drange.shift(3, freq='2D') expected = DatetimeIndex(['2013-01-07', '2013-01-08', '2013-01-09', '2013-01-10', '2013-01-11'], freq='D') tm.assert_index_equal(result, expected) def test_pickle_compat_construction(self): pass def test_intersection(self): first = self.index second = self.index[5:] intersect = first.intersection(second) assert tm.equalContents(intersect, second) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: result = first.intersection(case) assert tm.equalContents(result, second) third = Index(['a', 'b', 'c']) result = first.intersection(third) expected = pd.Index([], dtype=object) tm.assert_index_equal(result, expected) def test_union(self): first = self.index[:5] second = self.index[5:] everything = self.index union = first.union(second) assert tm.equalContents(union, everything) # GH 10149 cases = [klass(second.values) for klass in [np.array, Series, list]] for case in cases: result = first.union(case) assert tm.equalContents(result, everything)
mareuter/lct-python
lct/utils/observing_info.py
Python
mit
1,166
0.008576
# -*- coding: utf-8 -*- #------------------------------------------------------------------------------ # Copyright (c) 2012-2014, Michael Reuter # Distributed under the MI
T License. See LICENSE.txt for more information. #------------------------------------------------------------------------------ from .moon_info import MoonInfo from .observing_site import ObservingSite class ObservingInfo(object): ''' This class is responsible for keeping the observing site information and the moon inform
ation object together. It will be responsible for updating any of the observing site information that then affects the moon information. ''' __shared_state = {"obs_site": ObservingSite(), "moon_info": MoonInfo()} def __init__(self): ''' Constructor ''' self.__dict__ = self.__shared_state def update(self): self.moon_info.compute(self.obs_site.getObserver()) if __name__ == "__main__": oi = ObservingInfo() oi.update() print oi.obs_site import time time.sleep(2) oi2 = ObservingInfo() oi2.update() print oi2.obs_site
kashishm/gauge-python
start.py
Python
mit
2,076
0.000482
import os import platform import sys import threading from concurrent.futures import ThreadPoolExecutor from os import environ, path from threading import Timer import grpc import ptvsd from getgauge import handlers, logger, processor from getgauge.impl_loader import copy_skel_files from getgauge.messages import runner_pb2_grpc from getgauge.static_loader import load_files from getgauge.util import get_step_impl_dirs PLUGIN_JSON = 'python.json' VERSION = 'version' ATTACH_DEBUGGER_EVENT = 'Runner Ready for Debugging' def main(): logger.info("Python: {}".format(platform.python_version())) if sys.argv[1] == "--init": logger.debug("Initilizing gauge project.") copy_skel_files() else: load_implementations() start() def load_implementations(): d = get_step_impl_dirs() logger.debug( "Loading step implemetations from {} dirs.".format(', '.join(d))) for impl_dir in d: if not path.exists(impl_dir): logger.error('can not l
oad implementations from {}. {} does not exist.'.format( impl_dir, impl_dir)) load_files(d) def _handle_detached():
logger.info("No debugger attached. Stopping the execution.") os._exit(1) def start(): if environ.get('DEBUGGING'): ptvsd.enable_attach(address=( '127.0.0.1', int(environ.get('DEBUG_PORT')))) print(ATTACH_DEBUGGER_EVENT) t = Timer(int(environ.get("debugger_wait_time", 30)), _handle_detached) t.start() ptvsd.wait_for_attach() t.cancel() logger.debug('Starting grpc server..') server = grpc.server(ThreadPoolExecutor(max_workers=1)) p = server.add_insecure_port('127.0.0.1:0') handler = handlers.RunnerServiceHandler(server) runner_pb2_grpc.add_RunnerServicer_to_server(handler, server) logger.info('Listening on port:{}'.format(p)) server.start() t = threading.Thread( name="listener", target=handler.wait_for_kill_event) t.start() t.join() os._exit(0) if __name__ == '__main__': main()
antoinecarme/pyaf
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_MovingAverage_Seasonal_WeekOfYear_NoAR.py
Python
bsd-3-clause
163
0.04908
import tests.mod
el_control.test_ozone_custom_models_enabled as testmod testmod.build_model( ['Logit'] , ['MovingAverage'] , ['Seasonal_WeekOfYear'] , ['NoAR'
] );
yuanming-hu/taichi
tests/python/examples/simulation/test_ad_gravity.py
Python
mit
1,137
0
import argparse import taichi as ti FRAMES = 100 def test_ad_gravity(): from taichi.examples.simulation.ad_gravity import init, substep init() for _ in range(FRAMES): for _ in range(50): substep() def video_ad_gravity(result_dir): import numpy as np from taichi.examples.simulation.ad_gravity import init, substep, x video_manager = ti.tools.VideoManager(output_dir=result_dir,
framerate=24, automatic_build=False) gui = ti.GUI('Autodiff gravity', show_gui=False) init() for _ in range(FRAMES): for _ in range(50): substep() gui.circles(x.to_numpy(), radius=3) video_manager.write_frame(gui.get_image()) gui.clear() video_manager.make_video(mp4=True, gif=False) if
__name__ == '__main__': parser = argparse.ArgumentParser(description='Generate ad_gravity video') parser.add_argument('output_directory', help='output directory of generated video') video_ad_gravity(parser.parse_args().output_directory)
kmtoki/qmk_firmware
lib/python/qmk/cli/new/keymap.py
Python
gpl-2.0
1,884
0.004246
"""This script automates the copying of the default keymap into your own keymap. """ import shutil from pathlib import Path import qmk.path from qmk.decorators import automagic_keyboard, automagic_keymap from milc import cli @cli.argument('-kb', '--keyboard', help='Specify keyboard name. Example: 1upkeyboards/1up60hse') @cli.argument('-km', '--keymap', help='Specify the name for the new keymap directory') @cli.subcommand('Creates a new keymap for the keyboard of your choosing') @automagic_keyboard @automagic_keymap def new_keymap(cli): """Creates a new keymap for the keyboard of your choosing. """ # ask for user input if keyboard or keymap was not provided in the command line keyboard = cli.config.new_keymap.keyboard if cli.config.new_keymap.keybo
ard else input("Keyboard Name: ") keymap = cli.config.new_keymap.keymap if cli.config.new_keymap.keymap else input("Keymap Name: ") # generate keymap paths kb_path = Path('keyboards') / keyboard keymap_path = qmk.path.keymap(keyboard) keymap_path_default = keymap_path / 'default' keymap_path_new = keymap_path / keymap # check directories if not kb_path.exists():
cli.log.error('Keyboard %s does not exist!', kb_path) return False if not keymap_path_default.exists(): cli.log.error('Keyboard default %s does not exist!', keymap_path_default) return False if keymap_path_new.exists(): cli.log.error('Keymap %s already exists!', keymap_path_new) return False # create user directory with default keymap files shutil.copytree(keymap_path_default, keymap_path_new, symlinks=True) # end message to user cli.log.info("%s keymap directory created in: %s", keymap, keymap_path_new) cli.log.info("Compile a firmware with your new keymap by typing: \n\n\tqmk compile -kb %s -km %s\n", keyboard, keymap)
ddinsight/dd-streamworks
stream_worker/devmodule/production/networklog/__init__.py
Python
apache-2.0
12,862
0.003424
# -*- coding: utf-8 -*- # # Copyright 2015 AirPlug Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Created on 2012. 7. 19. @author: springchoi """ import sys from datatype import * import worker import re import traceback import time import datetime import collections import MySQLdb try: from worker import log except ImportError: import logging as log Priority = collections.namedtuple('Priority', 'LOW NORMAL HIGH')._make(range(3)) class UniqueList(list): key = lambda x: x def setKey(self, key=lambda x: x): if not callable(key): raise RuntimeError("Key is not callable") self.key = key def addSet(self, item, key=None): if not key: key = self.key elif not callable(key): raise RuntimeError("Key is not callable") if len(filter(lambda x: key(x) == key(item), self)) > 0: return False self.append(item) return True class GeoInfo(collections.namedtuple('_GeoInfo', 'lat, lng, acc, geosrc, from_cell')): def __new__(cls, lat=-9999, lng=-9999, acc=50000, geosrc='unknown', from_cell=False): # add default values return super(GeoInfo, cls).__new__(cls, lat, lng, acc, geosrc, from_cell) class WiFiNode(collections.namedtuple('_WiFiNode', 'state, bssid, ssid, rssi, regdtm, bregap, bmap, optrcom, geoloc, priority')): __registerdApSSIDPattern = {'LGT':('U\+',), 'KT':('olleh_GiGA_WiFi', 'ollehWiFi', 'NESPOT', 'QOOKnSHOW'), 'SKT':('T wifi zone',)} __hotspotApSSIDPattern = ('AndroidHotspot', 'AndroidAP', 'HTC-', 'Galaxy ', 'SKY A') __mobileApSSIDPattern = ('WibroEgg', 'ollehEgg', 'KWI-B', 'SHOW_JAPAN_EGG', 'egg\Z') def __new__(cls, state, bssid, ssid='', regdtm='19000101000000', rssi=-200, bregap=False, bmap=False, optrcom='none', geoloc=None, priority=Priority.NORMAL): # Classify WiFi try: if ssid not in ('', None): ssid = re.sub(r'^\s*"(.*)"\s*$', r'\1', unicode(ssid)) if ssid.find('"') >= 0: log.error("!!! SSID - %s" % ssid) if cls.isHotspot(ssid): priority = Priority.LOW else: optrcom = cls.getWiFiOperator(ssid) bregap = True if optrcom != 'none' else False if not bregap: bmap = cls.isMobile(ssid) try: ssid = MySQLdb.escape_string(unicode(ssid).encode('utf-8')) except Exception, e: # Non-ascii data. log.warn("SSID MySQLdb.escape_string Error - %s, %s" % (ssid, e)) if not geoloc: geoloc = GeoInfo() except Exception, e: log.error(e) log.error('BSSID - %s, SSID - %s' % (bssid, ssid)) exc_type, exc_value, exc_traceback = sys.exc_info() log.error(traceback.format_exception(exc_type, exc_value, exc_traceback)) raise e return super(WiFiNode, cls).__new__(cls, state, bssid, ssid, rssi, regdtm, bregap, bmap, optrcom, geoloc, priority) @classmethod def isHotspot(cls, ssid): patt = r'%s' % '|'.join(cls.__hotspotApSSIDPattern) if re.match(patt, ssid, re.IGNORECASE): #log.info("%s - Hotspot SSID, drop this AP" % ssid) return True @classmethod def getWiFiOperator(cls, ssid): for provider in cls.__registerdApSSIDPattern.keys(): patt = r'%s' % '|'.join(cls.__registerdApSSIDPattern[provider]) if re.match(patt, ssid, re.IGNORECASE): #log.info("Registered SSID - %s" % ssid) return provider return 'none' @classmethod def isMobile(cls, ssid): patt = r'%s' % '|'.join(cls.__mobileApSSIDPattern) if re.search(patt, ssid, re.IGNORECASE): #log.info("Mobile AP - %s" % ssid) return True return False class CellNode(collections.namedtuple('_CellNode', 'state, cellid, plmnid, cid, lac, celltype, regdtm, geoloc, priority')): def __new__(cls, state, cellid, celltype=0, regdtm='19000101000000', geoloc=None, priority=Priority.NORMAL): # add default values try: plmnid, cid, lac = cellid.split('_') # guard from invalid data if len(plmnid) > 6 or int(plmnid) == 0: plmnid = '0' if not geoloc: geoloc = GeoInfo() except Exception, e: raise e return super(CellNode, cls).__new__(cls, state, cellid, plmnid, cid, lac, celltype, regdtm, geoloc, priority) def addWiFi(cursor, node): strSql = """INSERT INTO apmain.apinfo (bssid, ssid, regdtm, bregap, bmap, lat, lng, acc, geosrc, optrcom, seq) VALUES('%s','%s','%s','%d','%d','%f','%f','%d','%s','%s','%s') ON DUPLICATED UPDATE lat = IF(VALUES(seq) > seq, VALUES(lat), lat), lng = IF(VALUES(seq) > seq, VALUES(lng), lng), seq = IF(VALUES(seq) > seq, VALUES(seq), seq), acc = IF(VALUES(seq) > seq, VALUES(acc), acc), geosrc=VALUES(geosrc)""" try: strSql = strSql % (node.bssid, node.ssid, node.regdtm, int(node.bregap), int(node.bmap), node.geoloc.lat, node.geoloc.lng, node.geoloc.acc, node.geoloc.geosrc, node.optrcom, node.rssi) except Exception, e: log.error("SQL GEN ERR - %s" % bytes(node.ssid)) strSql = strSql % (node.bssid, '', node.regdtm, int(node.bregap), int(node.bmap), node.geoloc.lat, node.geoloc.lng, node.geoloc.acc, node.geoloc.geosrc, node.optrcom, node.rssi) try: cursor.execute(strSql) log.debug("INSERT - %s" % node.bssid) except Exception, e: # Duplicate entry error if e[0] != 1062: log.error(e) log.error(strSql) return False return True netTypeCode = {'gsm':1, 'cdma':2, 'lte':3} def addCellTower(cursor, node): strSql = """INSERT INTO apmain.cellinfo (fullid, plmnid, cellid, lac, celltype, regdtm, lat, lng, acc, geosrc, seq) VALUES('%s','%s','%s','%s','%d','%s','%s','%f','%f','%s', '1') ON DUPLICATED UPDATE lat=((lat*seq)+VALUES(lat))/(seq+1), lng=((lng*seq)+VALUES(lng))/(seq+1), seq=seq+1, geosrc=VALUES(geosrc)""" try: strSql = strSql % (node.cellid, node.plmnid, node.cid, node.lac, 0, node.regdtm, node.geoloc.lat, node.geoloc.lng, node.geoloc.acc, 'cellLoc' if node.geoloc.from_cell else node.geoloc.geosrc) cursor.execute(strSql) log.debug("INSERT - %s" % node.cellid) except Exception, e: # Duplicate entry error if e[0] != 1062: log.error(e) log.error(strSql) return False return True class ProcessNetworkNode(obj
ect): OW_TASK_SUBSCRIBE_EVENT
S = ['evtPlayerLog', 'evtNetworkLog'] OW_TASK_PUBLISH_EVENTS = [] OW_USE_HASHING = False OW_HASH_KEY = None OW_NUM_WORKER = 8 def publishEvent(self, event, params): # THIS METHOD WILL BE OVERRIDE # DO NOT EDIT THIS METHOD pass def __makeCellId(self, plmnid, cid, lac): try: cellId = map(lambda x: str(x) if str(x).isdigit() else '0', [plmnid, cid, lac]) if 0 not in map(int, cellId) and len(cellId[0]) < 7: return '_'.join(cellId) except Exception, e: log.error(e) return None def extractNetworkNode(self, params): # net info structur
ffsdmad/af-web
cgi-bin/plugins2/report/fond_search_report1.py
Python
gpl-3.0
547
0.006085
# -*- coding: utf8 -*- SQL = ( ('list_fonds_report1', """ select F.FKOD,F.FNAME, (F.A16+if(F.A22,A22,0)) as
A16 FROM `af3_fond` F WHERE FNAME like ('%%%(qr)s%%') or A1 like ('%%%(qr)s%%') ORDER BY FKOD;"""), ) FOUND_ROWS = True ROOT = "fonds" ROOT_PREFIX = None ROOT_POSTFIX= None XSL_TEMPLATE = "data/af-web.xsl" EVENT = None WHERE = () PARAM = ("qr",) TITLE="Поиск фондов" MESSAGE="Нет результ
атов по вашему запросу, вернитесь назад" ORDER = None
hforge/itools
itools/database/backends/git.py
Python
gpl-3.0
14,062
0.00313
# -*- coding: UTF-8 -*- # Copyright (C) 2007, 2009, 2011-2012 J. David Ibáñez <jdavid.ibp@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Import from the Standard Library import os from datetime import datetime, timedelta, time from heapq import heappush, heappop from multiprocessing import Process from os.path import abspath, dirname from uuid import uuid4 # Import from pygit2 from pygit2 import TreeBuilder, GIT_FILEMODE_TREE, init_repository # Import from itools from itools.database import Metadata from itools.database.magic_ import magic_from_buffer from itools.database.git import open_worktree from itools.fs import lfs # Import from here from catalog import Catalog, _get_xquery, SearchResults, make_catalog from patchs import PatchsBackend from registry import register_backend TEST_DB_WITHOUT_COMMITS = bool(int(os.environ.get('TEST_DB_WITHOUT_COMMITS') or 0)) TEST_DB_DESACTIVATE_GIT = bool(int(os.environ.get('TEST_DB_DESACTIVATE_GIT') or 0)) class Heap(object): """ This object behaves very much like a sorted dict, but for security only a subset of the dict API is exposed: >>> len(heap) >>> heap[path] = value >>> value = heap.get(path) >>> path, value = heap.popitem() The keys are relative paths as used in Git trees, like 'a/b/c' (and '' for the root). The dictionary is sorted so deeper paths are considered smaller, and so returned first by 'popitem'. The order relation between two paths of equal depth is undefined. This data structure is used by RWDatabase._save_changes to build the tree objects before commit. """ def __init__(self): self._dict = {} self._heap = [] def __len__(self): return len(self._dict) def get(self, path): return self._dict.get(path) def __setitem__(self, path, value): if path not in self._dict: n = -path.count('/') if path else 1 heappush(self._heap, (n, path)) self._dict[path] = value def popitem(self): key = heappop(self._heap) path = key[1] return path, self._dict.pop(path) class GitBackend(object): def __init__(self, path, fields, read_only=False): self.nb_transactions = 0 self.last_transaction_dtime = None self.path = abspath(path) + '/' self.fields = fields self.read_only = read_only # Open database self.path_data = '%s/database/' % self.path # Check if is a folder self.path_data = '%s/database/' % self.path if not lfs.is_folder(self.path_data): error = '"{0}" should be a folder, but it is not'.format(self.path_data) raise ValueError(error) # New interface to Git self.worktree = open_worktree(self.path_data) # Initialize the database, but chrooted self.fs = lfs.open(self.path_data) # Static FS database_static_path = '{0}/database_static'.format(path) if not lfs.exists(database_static_path): self.init_backend_static(path) self.static_fs = lfs.open(database_static_path) # Patchs backend self.patchs_backend = PatchsBackend(path, self.fs, read_only) # Catalog self.catalog = self.get_catalog() @classmethod def init_backend(cls, path, fields, init=False, soft=False): # Metadata database init_repository('{0}/database'.format(path), bare=False) # Init backend static cls.init_backend_static(path) # Make catalog make_catalo
g('{0}/catalog'.format(path), fields) @classmethod def init_backend_static(cls, path): # Static database lfs.make_folder('{0}/database_static'.format(path)) lfs.make_folder('{0}/database_static/.history'.format(path)) ####################################################################### # Database API #########
############################################################## def normalize_key(self, path, __root=None): # Performance is critical so assume the path is already relative to # the repository. key = __root.resolve(path) if key and key[0] == '.git': err = "bad '{0}' path, access to the '.git' folder is denied" raise ValueError(err.format(path)) return '/'.join(key) def handler_exists(self, key): fs = self.get_handler_fs_by_key(key) return fs.exists(key) def get_handler_names(self, key): return self.fs.get_names(key) def get_handler_data(self, key): if not key: return None fs = self.get_handler_fs_by_key(key) with fs.open(key) as f: return f.read() def get_handler_mimetype(self, key): data = self.get_handler_data(key) return magic_from_buffer(data) def handler_is_file(self, key): fs = self.get_handler_fs_by_key(key) return fs.is_file(key) def handler_is_folder(self, key): fs = self.get_handler_fs_by_key(key) return fs.is_folder(key) def get_handler_mtime(self, key): fs = self.get_handler_fs_by_key(key) return fs.get_mtime(key) def save_handler(self, key, handler): data = handler.to_str() # Save the file fs = self.get_handler_fs(handler) # Write and truncate (calls to "_save_state" must be done with the # pointer pointing to the beginning) if not fs.exists(key): with fs.make_file(key) as f: f.write(data) f.truncate(f.tell()) else: with fs.open(key, 'w') as f: f.write(data) f.truncate(f.tell()) # Set dirty = None handler.timestamp = self.get_handler_mtime(key) handler.dirty = None def traverse_resources(self): raise NotImplementedError def get_handler_fs(self, handler): if isinstance(handler, Metadata): return self.fs return self.static_fs def get_handler_fs_by_key(self, key): if key.endswith('metadata'): return self.fs return self.static_fs def add_handler_into_static_history(self, key): the_time = datetime.now().strftime('%Y%m%d%H%M%S') new_key = '.history/{0}.{1}.{2}'.format(key, the_time, uuid4()) parent_path = dirname(new_key) if not self.static_fs.exists(parent_path): self.static_fs.make_folder(parent_path) self.static_fs.copy(key, new_key) def do_transaction(self, commit_message, data, added, changed, removed, handlers, docs_to_index, docs_to_unindex): git_author, git_date, git_msg, docs_to_index, docs_to_unindex = data # Statistics self.nb_transactions += 1 # Add static changed & removed files to ~/database_static/.history/ changed_and_removed = list(changed) + list(removed) for key in changed_and_removed: if not key.endswith('metadata'): self.add_handler_into_static_history(key) # Create patch if there's changed if added or changed or removed: self.patchs_backend.create_patch(added, changed, removed, handlers, git_author) else: # it's a catalog transaction, we have to do nothing pass # Added and changed added_and_changed = list(added) + list(changed) for key in added_and_changed: handler = handlers.get(key) pare
ray-project/ray
release/ray_release/tests/test_cluster_manager.py
Python
apache-2.0
24,218
0.000991
import os import time import unittest from typing import Callable from unittest.mock import patch from uuid import uuid4 from freezegun import freeze_time from ray_release.exception import ( ClusterCreationError, ClusterStartupError, ClusterStartupTimeout, ClusterStartupFailed, ClusterEnvBuildError, ClusterEnvBuildTimeout, ClusterComputeCreateError, ClusterEnvCreateError, ) from ray_release.cluster_manager.full import FullClusterManager from ray_release.cluster_manager.minimal import MinimalClusterManager from ray_release.tests.utils import ( UNIT_TEST_PROJECT_ID, UNIT_TEST_CLOUD_ID, APIDict, fail_always, fail_once, MockSDK, ) from ray_release.util import get_anyscale_sdk TEST_CLUSTER_ENV = { "base_image": "anyscale/ray:nightly-py37", "env_vars": {}, "python": { "pip_packages": [], }, "conda_packages": [], "post_build_cmds": [f"echo {uuid4().hex[:8]}"], } TEST_CLUSTER_COMPUTE = { "cloud_id": UNIT_TEST_CLOUD_ID, "region": "us-west-2", "max_workers": 0, "head_node_type": {"name": "head_node", "instance_type": "m5.4xlarge"}, "worker_node_types": [ { "name": "worker_node", "instance_type": "m5.xlarge", "min_workers": 0, "max_workers": 0, "use_spot": False, } ], } def _fail(*args, **kwargs): raise RuntimeError() class _DelayedResponse: def __init__( self, callback: Callable[[], None], finish_after: float, before: APIDict, after: APIDict, ): self.callback = callback self.finish_after = time.monotonic() + finish_after self.before = before self.after = after def __call__(self, *args, **kwargs): self.callback() if time.monotonic() > self.finish_after: return self.after else: return self.before class MinimalSessionManagerTest(unittest.TestCase): cls = MinimalClusterManager def setUp(self) -> None: self.sdk = MockSDK() self.sdk.returns["get_project"] = APIDict( result=APIDict(name="release_unit_tests") ) self.cluster_env = TEST_CLUSTER_ENV self.cluster_compute = TEST_CLUSTER_COMPUTE self.cluster_manager = self.cls( project_id=UNIT_TEST_PROJECT_ID, sdk=self.sdk, test_name=f"unit_test__{self.__class__.__name__}", ) self.sdk.reset() @patch("time.sleep", lambda *a, **kw: None) def testFindCreateClusterComputeExisting(self): # Find existing compute and succeed self.cluster_manager.set_cluster_compute(self.cluster_compute) self.assertTrue(self.cluster_manager.cluster_compute_name) self.assertFalse(self.cluster_manager.cluster_compute_id) self.sdk.returns["search_cluster_computes"] = APIDict( metadata=APIDict( next_paging_token=None, ), results=[ APIDict( name="no_match", id="wrong", ), APIDict(name=self.cluster_manager.cluster_compute_name, id="correct"), ], ) self.cluster_manager.create_cluster_compute() self.assertEqual(self.cluster_manager.cluster_compute_id, "correct") self.assertEqual(self.sdk.call_counter["search_cluster_computes"], 1) self.assertEqual(len(self.sdk.call_counter), 1) @patch("time.sleep", lambda *a, **kw: None) def testFindCreateClusterComputeCreateFailFail(self): # No existing compute, create new, but fail both times self.cluster_manager.set_cluster_compute(self.cluster_compute) self.assertTrue(self.cluster_manager.cluster_compute_name) self.assertFalse(self.cluster_manager.cluster_compute_id) self.sdk.returns["search_cluster_computes"] = APIDict( metadata=APIDict( next_paging_token=None, ), results=[ APIDict( name="no_match", id="wrong", ), ], ) self.sdk.returns["create_cluster_compute"] = fail_always with self.assertRaises(ClusterComputeCreateError): self.cluster_manager.create_cluster_compute() # No cluster ID found or created self.assertFalse(self.cluster_manager.cluster_compute_id) # Both APIs were called twice (retry after fail) self.assertEqual(self.sdk.call_counter["search_cluster_computes"], 2) self.assertEqual(self.sdk.call_counter["create_cluster_compute"], 2) self.assertEqual(len(self.sdk.call_counter), 2) @patch("time.sleep", lambda *a, **kw: None) def testFindCreateClusterComputeCreateFailSucceed(self): # No existing compute, create new, fail once, succeed afterwards self.cluster_manager.set_cluster_compute(self.cluster_compute) self.assertTrue(self.cluster_manager.cluster_compute_name) self.assertFalse(self.cluster_manager.cluster_compute_id) self.sdk.returns["search_cluster_computes"] = APIDict( metadata=APIDict( next_paging_token=None, ), results=[ APIDict( name="no_match", id="wrong", ), ], ) self.sdk.returns["create_cluster_compute"] = fail_once( result=APIDict( result=APIDict( id="correct", ) ) ) self.cluster_manager.create_cluster_compute() # Both APIs were called twice (retry after fail) self.assertEqual(self.cluster_manager.cluster_compute_id, "correct") self.assertEqual(self.sdk.call_counter["search_cluster_computes"], 2) self.assertEqual(self.sdk.call_counter["create_cluster_compute"], 2) self.assertEqual(len(self.sdk.call_counter), 2) @patch("time.sleep", lambda *a, **kw: None) def testFindCreateClusterComputeCreateSucceed(self): # No existing compute, create new, and succeed self.cluster_manager.set_cluster_compute(self.cluster_compute) self.assertTrue(self.cluster_manager.cluster_compute_name) self.assertFalse(self.cluster_manager.cluster_compute_id) self.sdk.returns["search_cluster_computes"] = APIDict( metadata=APIDict( next_paging_token=None, ), results=[ APIDict( name="no_match", id="wrong", ), ], ) self.sdk.returns["create_cluster_compute"] = APIDict( result=APIDict( id="correct", ) ) self.cluster_manager.create_cluster_compute() # Both APIs were called twice (retry after fail) self.assertEqual(self.cluster_manager.cluster_compute_id, "correct") self.assertEqual(self.sdk.cal
l_counter["search_cluster_computes"], 1) self.assertEqual(self.sdk.call_counter["create_cluster_compute"], 1) self.assertEqual(len(self.sdk.call_counter), 2) @patch("time.sleep", lambda *a, **kw
: None) def testFindCreateClusterEnvExisting(self): # Find existing env and succeed self.cluster_manager.set_cluster_env(self.cluster_env) self.assertTrue(self.cluster_manager.cluster_env_name) self.assertFalse(self.cluster_manager.cluster_env_id) self.sdk.returns["search_cluster_environments"] = APIDict( metadata=APIDict( next_paging_token=None, ), results=[ APIDict( name="no_match", id="wrong", ), APIDict(name=self.cluster_manager.cluster_env_name, id="correct"), ], ) self.cluster_manager.create_cluster_env() self.assertEqual(self.cluster_manager.cluster_env_id, "correct") self.assertEqual(self.sdk.call_counter["search_cluster
sdispater/pendulum
pendulum/time.py
Python
mit
7,783
0.001156
from datetime import time from datetime import timedelta import pendulum from .constants import SECS_PER_HOUR from .constants import SECS_PER_MIN from .constants import USECS_PER_SEC from .duration import AbsoluteDuration from .duration import Duration from .mixins.default import FormattableMixin class Time(FormattableMixin, time): """ Represents a time instance as hour, minute, second, microsecond. """ # String formatting def __repr__(self): us = "" if self.microsecond: us = f", {self.microsecond}" tzinfo = "" if self.tzinfo: tzinfo = ", tzinfo={}".format(repr(self.tzinfo)) return "{}({}, {}, {}{}{})".format( self.__class__.__name__, self.hour, self.minute, self.second, us, tzinfo ) # Comparisons def closest(self, dt1, dt2): """ Get the closest time from the instance. :type dt1: Time or time :type dt2: Time or time :rtype: Time """ dt1 = self.__class__(dt1.hour, dt1.minute, dt1.second, dt1.microsecond) dt2 = self.__class__(dt2.hour, dt2.minute, dt2.second, dt2.microsecond) if self.diff(dt1).in_seconds() < self.diff(dt2).in_seconds(): return dt1 return dt2 def farthest(self, dt1, dt2): """ Get the farthest time from the instance. :type dt1: Time or time :type dt2: Time or time :rtype: Time """ dt1 = self.__class__(dt1.hour, dt1.minute, dt1.second, dt1.microsecond) dt2 = self.__class__(dt2.hour, dt2.minute, dt2.second, dt2.microsecond) if self.diff(dt1).in_seconds() > self.diff(dt2).in_seconds(): return dt1 return dt2 # ADDITIONS AND SUBSTRACTIONS def add(self, hours=0, min
utes=0, seconds=0, microseconds=0): """ Add duration to the instance. :param hours: The number of hours :type hours: int :param minutes: The number of minutes :type minutes: int :param seconds: The number of seconds :type seconds: int :param microseconds: The number of microseconds :type micros
econds: int :rtype: Time """ from .datetime import DateTime return ( DateTime.EPOCH.at(self.hour, self.minute, self.second, self.microsecond) .add( hours=hours, minutes=minutes, seconds=seconds, microseconds=microseconds ) .time() ) def subtract(self, hours=0, minutes=0, seconds=0, microseconds=0): """ Add duration to the instance. :param hours: The number of hours :type hours: int :param minutes: The number of minutes :type minutes: int :param seconds: The number of seconds :type seconds: int :param microseconds: The number of microseconds :type microseconds: int :rtype: Time """ from .datetime import DateTime return ( DateTime.EPOCH.at(self.hour, self.minute, self.second, self.microsecond) .subtract( hours=hours, minutes=minutes, seconds=seconds, microseconds=microseconds ) .time() ) def add_timedelta(self, delta): """ Add timedelta duration to the instance. :param delta: The timedelta instance :type delta: datetime.timedelta :rtype: Time """ if delta.days: raise TypeError("Cannot add timedelta with days to Time.") return self.add(seconds=delta.seconds, microseconds=delta.microseconds) def subtract_timedelta(self, delta): """ Remove timedelta duration from the instance. :param delta: The timedelta instance :type delta: datetime.timedelta :rtype: Time """ if delta.days: raise TypeError("Cannot subtract timedelta with days to Time.") return self.subtract(seconds=delta.seconds, microseconds=delta.microseconds) def __add__(self, other): if not isinstance(other, timedelta): return NotImplemented return self.add_timedelta(other) def __sub__(self, other): if not isinstance(other, (Time, time, timedelta)): return NotImplemented if isinstance(other, timedelta): return self.subtract_timedelta(other) if isinstance(other, time): if other.tzinfo is not None: raise TypeError("Cannot subtract aware times to or from Time.") other = self.__class__( other.hour, other.minute, other.second, other.microsecond ) return other.diff(self, False) def __rsub__(self, other): if not isinstance(other, (Time, time)): return NotImplemented if isinstance(other, time): if other.tzinfo is not None: raise TypeError("Cannot subtract aware times to or from Time.") other = self.__class__( other.hour, other.minute, other.second, other.microsecond ) return other.__sub__(self) # DIFFERENCES def diff(self, dt=None, abs=True): """ Returns the difference between two Time objects as an Duration. :type dt: Time or None :param abs: Whether to return an absolute interval or not :type abs: bool :rtype: Duration """ if dt is None: dt = pendulum.now().time() else: dt = self.__class__(dt.hour, dt.minute, dt.second, dt.microsecond) us1 = ( self.hour * SECS_PER_HOUR + self.minute * SECS_PER_MIN + self.second ) * USECS_PER_SEC us2 = ( dt.hour * SECS_PER_HOUR + dt.minute * SECS_PER_MIN + dt.second ) * USECS_PER_SEC klass = Duration if abs: klass = AbsoluteDuration return klass(microseconds=us2 - us1) def diff_for_humans(self, other=None, absolute=False, locale=None): """ Get the difference in a human readable format in the current locale. :type other: Time or time :param absolute: removes time difference modifiers ago, after, etc :type absolute: bool :param locale: The locale to use for localization :type locale: str :rtype: str """ is_now = other is None if is_now: other = pendulum.now().time() diff = self.diff(other) return pendulum.format_diff(diff, is_now, absolute, locale) # Compatibility methods def replace( self, hour=None, minute=None, second=None, microsecond=None, tzinfo=True ): if tzinfo is True: tzinfo = self.tzinfo hour = hour if hour is not None else self.hour minute = minute if minute is not None else self.minute second = second if second is not None else self.second microsecond = microsecond if microsecond is not None else self.microsecond t = super().replace(hour, minute, second, microsecond, tzinfo=tzinfo) return self.__class__( t.hour, t.minute, t.second, t.microsecond, tzinfo=t.tzinfo ) def __getnewargs__(self): return (self,) def _get_state(self, protocol=3): tz = self.tzinfo return (self.hour, self.minute, self.second, self.microsecond, tz) def __reduce__(self): return self.__reduce_ex__(2) def __reduce_ex__(self, protocol): return self.__class__, self._get_state(protocol) Time.min = Time(0, 0, 0) Time.max = Time(23, 59, 59, 999999) Time.resolution = Duration(microseconds=1)
luotao1/Paddle
python/paddle/distributed/fleet/utils/log_util.py
Python
apache-2.0
1,685
0.00178
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import sys __all__ = [] class LoggerFactory: @staticmethod def build_logger(name=None, level=logging.INFO): assert name is not None, "name for logger should not be None" formatter = logging.Formatter( "%(asctime)s-%(levelname)s: " "[%(filename)s:%(lineno)d:%(funcName)s] %(message)s") _logger = logging.getLogger(name) _logger.setLevel(level) _logger.propagate = False handler = logging.StreamHandler(stream=sys.stderr) handler.setFormatter(formatter) handler.setLevel(level)
_logger.addHandler(handler) return _logger logger = LoggerFactory.build_logger(name="HybridParallel", level=logging.INFO) def layer_to_str(base, *args, **kwargs): name = base + "(" if args: name += ", ".join(str(arg) for arg in args) if kwargs: name += ", " if kwargs: name += ", ".join
("{}={}".format(key, str(value)) for key, value in kwargs.items()) name += ")" return name
markuz/Christine
libchristine/Share.py
Python
gpl-2.0
5,045
0.004559
# -*- coding: utf-8 -*- # # This file is part of the Christine project # # Copyright (c) 2006-2007 Marco Antonio Islas Cruz # # Christine is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Christine is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # # @category libchristine # @package Share # @author Miguel Vazquez Gocobachi <demrit@gnu.org> # @author Marco Antonio Islas Cruz <markuz@islascruz.org> # @copyright 2007-2009 Christine Development Group # @license http://www.gnu.org/licenses/gpl.txt #import gtk.glade # @author Miguel Vazquez Gocobachi <demrit@gnu.org> from libchristine.Validator import * from libchristine.pattern.Singleton import Singleton from libchristine.gui.GtkMisc import glade_xml from libchristine.globalvars import DATADIR, SHARE_PATH from libchristine.Logger import LoggerManager from libchristine.options import options import time import os import gtk import sys import gobject # # Share class manager for images, glade # templates and more files # # @author Miguel Vazquez Gocobachi <demrit@gnu.org> class Share(Singleton): """ Share class manager for images, glade templates and more files """ # # Directory where we have template files # # @var string __PathTemplate = None # # Directory where we have images # # @var string __PathPixmap = None def __init__(self): """ Constructor """ self.setName('Share') self.__logger = LoggerMa
nager().getLogger('Share') self.__PathTemplate = os.path.join(SHARE_PATH, 'gui') self.__PathPixmap = os.path.join(self.__PathTemplate, 'pixmaps') #self.__Pixmaps, used to store a pixmap. if it is here then reuse it #instead of creating another one from the same faile self.__Pixmaps = {} gobject.timeout_add(1000, self.check_pixmap_time_access) def getTemplate(self, file, root = None): """ Gets gla
de template @param string file: file to load @param string root: root widget to return instead the main window """ if file: file = ''.join([file, '.glade']) if isFile(os.path.join(self.__PathTemplate, file)): return glade_xml(os.path.join(self.__PathTemplate, file),root) self.__logger.warning('File %s was not found'%(os.path.join(self.__PathTemplate, file))) return None def getImage(self, name): """ Gets image as path string """ if ((not isNull(file)) or (isStringEmpty(name))): if (isFile(os.path.join(self.__PathPixmap, name+'.png'))): return os.path.join(self.__PathPixmap, name+'.png') elif (isFile(os.path.join(self.__PathPixmap, name+ '.svg'))): return os.path.join(self.__PathPixmap, name+'.svg') return None def getImageFromPix(self, name): """ Gets image from pixbuf """ icon_theme = gtk.icon_theme_get_default() if icon_theme.has_icon(name): pixbuf = icon_theme.load_icon(name, 48, 0) return pixbuf else: return self.load_from_local_dir(name) def load_from_local_dir(self, name): if not name: return files = os.listdir(self.__PathPixmap) filesf = [k for k in files if len(k.split('.')) > 1 \ and k.split('.')[0].startswith(name)] if not filesf: self.__logger.warning('None of this files \n%s\n where found'%repr(name)) return filepath = os.path.join(self.__PathPixmap, filesf[0]) pixdir = self.__Pixmaps.get(filepath,{}) if not pixdir: self.__Pixmaps[filepath] = pixdir pixmap = gtk.gdk.pixbuf_new_from_file(filepath) self.__Pixmaps[filepath]['pixmap'] = pixmap self.__Pixmaps[filepath]['timestamp'] = time.time() return self.__Pixmaps[filepath]['pixmap'] def check_pixmap_time_access(self): ''' Check the last time access to a pixmap, if the diference between the current time and the last access time is more than 600 senconds (10 minutes) then it will erase the pixmap. ''' c ={} ctime = time.time() for key, value in self.__Pixmaps.iteritems(): if ctime - value['timestamp'] < 60: c[key] = value self.__Pixmaps = c.copy() #del c return True
AravindK95/ee106b
project1/src/lab1/src/exp_quat_func.py
Python
mit
7,708
0.018422
#!/usr/bin/env python """Exponential and Quaternion code for Lab 6. Course: EE 106, Fall 2015 Author: Victor Shia, 9/24/15 This Python file is a code skeleton Lab 6 which calculates the rigid body transform given a rotation / translation and computes the twist from rigid body transform. When you think you have the methods implemented correctly, you can test your code by running "python exp_quat_func.py at the command line. This code requires the NumPy and SciPy libraries and kin_func_skeleton which you should have written in lab 3. If you don't already have these installed on your personal computer, you can use the lab machines or the Ubuntu+ROS VM on the course page to complete this portion of the homework. """ import tf import rospy import sys from math import * import numpy as np from tf2_msgs.msg import TFMessage from geometry_msgs.msg import Transform, Vector3 import kin_func_skeleton as kfs def quaternion_to_exp(rot): """ Converts a quaternion vector in 3D to its corresponding omega and theta. This uses the quaternion -> exponential coordinate equation given in Lab 6 Args: rot - a (4,) nd array or 4x1 array: the quaternion vector (\vec{q}, q_o) Returns: omega - (3,) ndarray: the rotation vector theta - a scalar """ #YOUR CODE HERE theta = 2.0 * np.arccos(rot[-1]) if theta == 0: omega = np.array([0.0, 0.0, 0.0]) else: omega = ((1.0/sin(theta/2.0)) * rot[:-1]) return (omega, theta) def create_rbt(omega, theta, p): """ Creates a rigid body transform using omega, theta, and the translation component. g = [R,p; 0,1], where R = exp(omega * theta), p = trans Args: omega - (3,) ndarray : the axis you want to rotate about theta - scalar value trans - (3,) ndarray or 3x1 array: the translation component of the rigid body motion Returns: g - (4,4) ndarray : the rigid body transform """ #YOUR CODE HERE R = kfs.rotation_3d(omega, theta) g = np.array([[R[0][0], R[0][1], R[0][2], p[0]], [R[1][0], R[1][1], R[1][2], p[1]], [R[2][0], R[2][1], R[2][2], p[2]], [0, 0, 0, 1]]) return g def compute_gab(g0a,g0b): """ Creates a rigid body transform g_{ab} the converts between frame A and B given the coordinate frame A,B in relation to the origin Args: g0a - (4,4) ndarray : the rigid body transform from the origin to frame A g0b - (4,4) ndarray : the rigid body transform from the origin to frame B Returns: gab - (4,4) ndarray : the rigid body transform """ #YOUR CODE HERE gab = np.dot(np.linalg.inv(g0a),g0b) return gab def find_omega_theta(R): """ Given a rotation matrix R, finds the omega and theta such that R = exp(omega * theta) Args: R - (3,3) ndarray : the rotational component of the rigid body transform Returns: omega - (3,) ndarray : the axis you want to rotate about theta - scalar value """ #YOUR CODE HERE theta = np.arccos((np.trace(R) - 1)/2) omega = (1/(2*sin(theta)))*np.array([R[2][1] - R[1][2],R[0][2] - R[2][0],R[1][0] - R[0][1]]) return (omega, theta) def find_v(omega, theta, trans): """ Finds the linear velocity term of the twist (v,omega) given omega, theta and translation Args: omega - (3,) ndarray : the axis you want to rotate about theta - scalar value trans - (3,) ndarray of 3x1 list : the translation component of the rigid body transform Returns: v - (3,1) ndarray : the linear velocity term of the twist (v,omega) """ #YOUR CODE HERE A_1 = np.eye(3) - kfs.rotation_3d(omega, theta) #print A_1 A_1 = A_1.dot(kfs.skew_3d(omega)) #print A_1 A_2 = np.outer(omega, omega.T)*theta #print A_2 A = A_1 + A_2 #print A #print np.linalg.inv(A) v = np.dot(np.linalg.inv(A), trans) #print v return np.array([v]).T #-----------------------------Testing code-------------------------------------- #-------------(you shouldn't need to modify anything below here)---------------- def array_func_test(func_name, args, ret_desired): ret_value = func_name(*args) if not isinstance(ret_value, np.ndarray): print('[FAIL] ' + func_name.__name__ + '() returned something other than a NumPy ndarray') elif ret_value.shape != ret_desired.shape: print('[FAIL] ' + func_name.__name__ + '() returned an ndarray with incorrect dimensions') elif not np.allclose(ret_value, ret_desired, rtol=1e-3): print('[FAIL] ' + func_name.__name__ + '() returned an incorrect value') else: print('[PASS] ' + func_name.__name__ + '() returned the correct value!') def array_func_test_two_outputs(func_name, args, ret_desireds): ret_values = func_name(*args) for i in range(2): ret_value = ret_values[i] ret_desired = ret_desireds[i] if i == 0 and not isinstance(ret_value, np.ndarray): print('[FAIL] ' + func_name.__name__ + '() returned something other than a NumPy ndarray') elif i == 1 and not isinstance(ret_value, float): print('[FAIL] ' + func_name.__name__ + '() returned something other than a float') elif i == 0 and ret_value.shape != ret_desired.shape: print('[FAIL] ' + func_name.__name__ + '() returned an ndarray with incorrect dimensions') elif not np.allclose(ret_value, ret_desired, rtol=1e-3): print('[FAIL] ' + func_name.__name__ + '() returned an incorrect value') else: print('[PASS] ' + func_name.__name__ + '() returned the argument %d value!' % i) if __name__ == "__main__": print('Testing...') #Test quaternion_to_exp() arg1 = np.array([1.0, 2, 3, 0.1]) func_args = (arg1,) ret_desired = (np.array([1.005, 2.0101, 3.0151]), 2.94125) array_func_test_two_outputs(quaternion_to_exp, func_args, ret_desired) #Test
create_rbt() arg1 = np.array([1.0, 2, 3]) arg2 = 2 arg3 = np.array([0.5,-0.5,1]) func_args = (arg1,arg2,arg3) ret_desired = np.array( [[ 0.4078, -0.6562, 0.6349, 0.5 ], [
0.8384, 0.5445, 0.0242, -0.5 ], [-0.3616, 0.5224, 0.7722, 1. ], [ 0. , 0. , 0. , 1. ]]) array_func_test(create_rbt, func_args, ret_desired) #Test compute_gab(g0a,g0b) g0a = np.array( [[ 0.4078, -0.6562, 0.6349, 0.5 ], [ 0.8384, 0.5445, 0.0242, -0.5 ], [-0.3616, 0.5224, 0.7722, 1. ], [ 0. , 0. , 0. , 1. ]]) g0b = np.array( [[-0.6949, 0.7135, 0.0893, 0.5 ], [-0.192 , -0.3038, 0.9332, -0.5 ], [ 0.693 , 0.6313, 0.3481, 1. ], [ 0. , 0. , 0. , 1. ]]) func_args = (g0a, g0b) ret_desired = np.array([[-0.6949, -0.192 , 0.693 , 0. ], [ 0.7135, -0.3038, 0.6313, 0. ], [ 0.0893, 0.9332, 0.3481, 0. ], [ 0. , 0. , 0. , 1. ]]) array_func_test(compute_gab, func_args, ret_desired) #Test find_omega_theta R = np.array( [[ 0.4078, -0.6562, 0.6349 ], [ 0.8384, 0.5445, 0.0242 ], [-0.3616, 0.5224, 0.7722 ]]) func_args = (R,) ret_desired = (np.array([ 0.2673, 0.5346, 0.8018]), 1.2001156089449496) array_func_test_two_outputs(find_omega_theta, func_args, ret_desired) #Test find_v arg1 = np.array([1.0, 2, 3]) arg2 = 1 arg3 = np.array([0.5,-0.5,1]) func_args = (arg1,arg2,arg3) ret_desired = np.array([[-0.1255], [ 0.0431], [ 0.0726]]) array_func_test(find_v, func_args, ret_desired)
reminisce/mxnet
python/mxnet/symbol/numpy_extension/__init__.py
Python
apache-2.0
996
0
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You ma
y obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language
governing permissions and limitations # under the License. """Module for the ops not belonging to the official numpy package.""" from . import _op from . import image from . import _register from ._op import * # pylint: disable=wildcard-import __all__ = _op.__all__
kboyd/scons_r
test/basic.py
Python
bsd-2-clause
596
0.001678
# A SCons tool for R scripts # # Copyright (c) 2014 Kendrick Boyd. This is free software. See LICENSE # for details. """ Basic test of producing output using save. """ import TestSCons test = TestSCons.TestSCons() # Add scons_r tool to test figure. test.file_fixture('../__init__.py', 'site_sco
ns/site_tools/scons_r/__init__.py') test.write(['SConstruct'], """\ import os env = Environment(TOOLS = ['scons_r']) e
nv.R('basic.r') """) test.write(['basic.r'], """\ x=rnorm(100) save(x, file='x.rdata') """) test.run(arguments='.', stderr=None) test.must_exist('x.rdata') test.pass_test()
thoslin/django-markitup
markitup/views.py
Python
bsd-3-clause
527
0.00759
from django.shortcuts import render_to_response from django.template import RequestContext from markitup import settings from markitup.markup import filter_func from
markitup.sanitize import sanitize_html def apply_filter(request): cleaned_data = sanitize_html(request.POST.get('data', ''), strip=True) markup = filter_func(cleaned_data) return render_to_response( 'markitup/preview.html', {'preview': markup}, context_insta
nce=RequestContext(request))
aldian/tensorflow
tensorflow/python/keras/applications/xception.py
Python
apache-2.0
13,084
0.006649
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=invalid-name """Xception V1 model for Keras. On ImageNet, this model gets to a top-1 validation accuracy of 0.790 and a top-5 validation accuracy of 0.945. Reference: - [Xception: Deep Learning with Depthwise Separable Convolutions]( https://arxiv.org/abs/1610.02357) (CVPR 2017) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.keras import backend from tensorflow.python.keras.applications import imagenet_utils from tensorflow.python.keras.engine import training from tensorflow.python.keras.layers import VersionAwareLayers from tensorflow.python.keras.utils import data_utils from tensorflow.python.keras.utils import layer_utils from tensorflow.python.lib.io import file_io from tensorflow.python.util.tf_export import keras_export TF_WEIGHTS_PATH = ( 'https://storage.googleapis.com/tensorflow/keras-applications/' 'xception/xception_weights_tf_dim_ordering_tf_kernels.h5') TF_WEIGHTS_PATH_NO_TOP = ( 'https://storage.googleapis.com/tensorflow/keras-applications/' 'xception/xception_weights_tf_dim_ordering_tf_kernels_notop.h5') layers = VersionAwareLayers() @keras_export('keras.applications.xception.Xception', 'keras.applications.Xception') def Xception( include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, classifier_activation='softmax'): """Instantiates
the Xception architecture. Reference: - [Xception: Deep Learning with Depthwise Separable Convolutions]( https://arxiv.org/ab
s/1610.02357) (CVPR 2017) Optionally loads weights pre-trained on ImageNet. Note that the data format convention used by the model is the one specified in your Keras config at `~/.keras/keras.json`. Note that the default input image size for this model is 299x299. Caution: Be sure to properly pre-process your inputs to the application. Please see `applications.xception.preprocess_input` for an example. Arguments: include_top: whether to include the fully-connected layer at the top of the network. weights: one of `None` (random initialization), 'imagenet' (pre-training on ImageNet), or the path to the weights file to be loaded. input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to use as image input for the model. input_shape: optional shape tuple, only to be specified if `include_top` is False (otherwise the input shape has to be `(299, 299, 3)`. It should have exactly 3 inputs channels, and width and height should be no smaller than 71. E.g. `(150, 150, 3)` would be one valid value. pooling: Optional pooling mode for feature extraction when `include_top` is `False`. - `None` means that the output of the model will be the 4D tensor output of the last convolutional block. - `avg` means that global average pooling will be applied to the output of the last convolutional block, and thus the output of the model will be a 2D tensor. - `max` means that global max pooling will be applied. classes: optional number of classes to classify images into, only to be specified if `include_top` is True, and if no `weights` argument is specified. classifier_activation: A `str` or callable. The activation function to use on the "top" layer. Ignored unless `include_top=True`. Set `classifier_activation=None` to return the logits of the "top" layer. Returns: A `keras.Model` instance. Raises: ValueError: in case of invalid argument for `weights`, or invalid input shape. ValueError: if `classifier_activation` is not `softmax` or `None` when using a pretrained top layer. """ if not (weights in {'imagenet', None} or file_io.file_exists_v2(weights)): raise ValueError('The `weights` argument should be either ' '`None` (random initialization), `imagenet` ' '(pre-training on ImageNet), ' 'or the path to the weights file to be loaded.') if weights == 'imagenet' and include_top and classes != 1000: raise ValueError('If using `weights` as `"imagenet"` with `include_top`' ' as true, `classes` should be 1000') # Determine proper input shape input_shape = imagenet_utils.obtain_input_shape( input_shape, default_size=299, min_size=71, data_format=backend.image_data_format(), require_flatten=include_top, weights=weights) if input_tensor is None: img_input = layers.Input(shape=input_shape) else: if not backend.is_keras_tensor(input_tensor): img_input = layers.Input(tensor=input_tensor, shape=input_shape) else: img_input = input_tensor channel_axis = 1 if backend.image_data_format() == 'channels_first' else -1 x = layers.Conv2D( 32, (3, 3), strides=(2, 2), use_bias=False, name='block1_conv1')(img_input) x = layers.BatchNormalization(axis=channel_axis, name='block1_conv1_bn')(x) x = layers.Activation('relu', name='block1_conv1_act')(x) x = layers.Conv2D(64, (3, 3), use_bias=False, name='block1_conv2')(x) x = layers.BatchNormalization(axis=channel_axis, name='block1_conv2_bn')(x) x = layers.Activation('relu', name='block1_conv2_act')(x) residual = layers.Conv2D( 128, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = layers.BatchNormalization(axis=channel_axis)(residual) x = layers.SeparableConv2D( 128, (3, 3), padding='same', use_bias=False, name='block2_sepconv1')(x) x = layers.BatchNormalization(axis=channel_axis, name='block2_sepconv1_bn')(x) x = layers.Activation('relu', name='block2_sepconv2_act')(x) x = layers.SeparableConv2D( 128, (3, 3), padding='same', use_bias=False, name='block2_sepconv2')(x) x = layers.BatchNormalization(axis=channel_axis, name='block2_sepconv2_bn')(x) x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same', name='block2_pool')(x) x = layers.add([x, residual]) residual = layers.Conv2D( 256, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = layers.BatchNormalization(axis=channel_axis)(residual) x = layers.Activation('relu', name='block3_sepconv1_act')(x) x = layers.SeparableConv2D( 256, (3, 3), padding='same', use_bias=False, name='block3_sepconv1')(x) x = layers.BatchNormalization(axis=channel_axis, name='block3_sepconv1_bn')(x) x = layers.Activation('relu', name='block3_sepconv2_act')(x) x = layers.SeparableConv2D( 256, (3, 3), padding='same', use_bias=False, name='block3_sepconv2')(x) x = layers.BatchNormalization(axis=channel_axis, name='block3_sepconv2_bn')(x) x = layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same', name='block3_pool')(x) x = layers.add([x, residual]) residual = layers.Conv2D( 728, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = layers.BatchNormalization(axis=channel_axis)(residual) x = layers.Activation('relu', name='block4_sepconv1_act')(x) x = layers.SeparableConv2D( 728, (3, 3), padding='same', use_bias=False, name='block4_sepconv1')(x) x =
mrocklin/blaze
blaze/expr/utils.py
Python
bsd-3-clause
1,782
0.001684
from __future__ import absolute_import, division, print_function class _slice(object): """ A hashable slice object >>> _slice(0, 10, None) 0:10 """ def __init__(self, start, stop, step): self.start = start self.stop = stop self.step = step def __hash__(self): return hash((slice, self.start, self.stop, self.step)) def __str__(self): s = '' if self.start is not None: s = s + str(self.start) s = s + ':' if self.stop is not None: s = s + str(self.stop) if self.step is not None: s = s + ':' + str(self.step) return s def __eq__(self, other): return (type(self), self.start, self.stop, self.step) == \ (type(other), other.start, other.stop, other.step) def as_slice(self): return slice(self.start, self.stop, self.step) __repr__ = __str__ class hashable_list(tuple): def
__str__(self): return str(list(self)) def hashable_index(index): """ Convert slice-thing into something hashable >>> hashable_index(1) 1 >>> isinstance(hash(hashable_index((1, slice(10)))), int) True """ if type(index) is tuple: # can't do isinstance due to hashable_list return tuple(map(hashable_index, index)) elif isinstance(index, list): return hashable_list(index) elif isinstance(index, slice): return _slice
(index.start, index.stop, index.step) return index def replace_slices(index): if isinstance(index, hashable_list): return list(index) elif isinstance(index, _slice): return index.as_slice() elif isinstance(index, tuple): return tuple(map(replace_slices, index)) return index
scionrep/scioncc
src/pyon/ion/process.py
Python
bsd-2-clause
27,034
0.005401
#!/usr/bin/env python __author__ = 'Adam R. Smith, Michael Meisinger, Dave Foster <dfoster@asascience.com>' import threading import traceback import gevent from gevent import greenlet, Timeout from gevent.event import Event, AsyncResult from gevent.queue import Queue from pyon.core import MSG_HEADER_ACTOR from pyon.core.bootstrap import CFG from pyon.core.exception import IonException, ContainerError from pyon.core.exception import Timeout as IonTimeout from pyon.core.thread import PyonThreadManager, PyonThread, ThreadManager, PyonThreadTraceback, PyonHeartbeatError from pyon.datastore.postgresql.pg_util import init_db_stats, get_db_stats, clear_db_stats from pyon.ion.service import BaseService from pyon.util.containers import get_ion_ts, get_ion_ts_millis from pyon.util.log import log STAT_INTERVAL_LENGTH = 60000 # Interval time for process saturation stats collection stats_callback = None class OperationInterruptedException(BaseException): """ Interrupted exception. Used by external items timing out execution in the IonProcessThread's control thread. Derived from BaseException to specifically avoid try/except Exception blocks, such as in Publisher's publish_event. """ pass class IonProcessError(StandardError): pass class IonProcessThread(PyonThread): """ The control part of an ION process. """ def __init__(self, target=None, listeners=None, name=None, service=None, cleanup_method=None, heartbeat_secs=10, **kwargs): """ Constructs the control part of an ION process. Used by the container's IonProcessThreadManager, as part of spawn_process. @param target A callable to run in the PyonThread. If None (typical), will use the target method defined in this class. @param listeners A list of listening endpoints attached to this thread. @param name The name of this ION process. @param service An instance of the BaseService derived class which contains the business logic for the ION process. @param cleanup_method An optional callable to run when the process is stopping. Runs after all other notify_stop calls have run. Should take one param, this instance. @param heartbeat_secs Number of seconds to wait in between heartbeats. """ self._startup_listeners = listeners or [] self.listeners = [] self._listener_map = {} self.name = name self.service = service self._cleanup_method = cleanup_method self.thread_manager = ThreadManager(failure_notify_callback=self._child_failed) # bubbles up to main thread manager self._dead_children = [] # save any dead children for forensics self._ctrl_thread = None self._ctrl_queue = Queue() self._ready_control = Event() self._errors = [] self._ctrl_current = None # set to the AR generated by _routing_call when in the context of a call # processing vs idle time (ms) self._start_time = None self._proc_time = 0 # busy time since start self._proc_time_prior = 0 # busy time at the beginning of the prior interval self._proc_time_prior2 = 0 # busy time at the beginning of 2 interval's ago self._proc_interval_num = 0 # interval num of last record # for heartbeats, used to detect stuck processes self._heartbeat_secs = heartbeat_secs # amount of time to wait between heartbeats self._heartbeat_stack = None # stacktrace of last heartbeat self._heartbeat_time = None # timestamp of heart beat last matching the current op self._heartbeat_op = None # last operation (by AR) self._heartbeat_count = 0 # number of times this operation has been seen consecutively self._log_call_exception = CFG.get_safe("container.process.log_exceptions", False) self._log_call_dbstats = CFG.get_safe("container.process.log_dbstats", False) self._warn_call_dbstmt_threshold = CFG.get_safe("container.process.warn_dbstmt_threshold", 0) PyonThread.__init__(self, target=target, **kwargs) def heartbeat(self): """ Returns a 3-tuple indicating everything is ok. Should only be called after the process has been started. Checks the following: - All attached endpoints are alive + listening (this means ready) - The control flow greenlet is alive + listening or processing @return 3-tuple indicating (listeners ok, ctrl thread ok, heartbeat status). Use all on it for a boolean indication of success. """ listeners_ok = True for l in self.listeners: if not (l in self._listener_map and not self._listener_map[l].proc.dead and l.get_ready_event().is_set()): listeners_ok = False ctrl_thread_ok = self._ctrl_thread.running # are we currently processing something? heartbeat_ok = True if self._ctrl_current is not None: st = traceback.extract_stack(self._ctrl_thread.proc.gr_frame) if self._ctrl_current == self._heartbeat_op: if st == self._heartbeat_stack: self._heartbeat_count += 1 # we've seen this before! increment count # we've been in this for the last X ticks, or it's been X seconds, fail this part of the heartbeat if self._heartbeat_count > CFG.get_safe('container.timeout.heartbeat_proc_count_threshold', 30) or \ get_ion_ts_millis() - int(self._heartbeat_time) >= CFG.get_safe('containe
r.timeout.heartbeat_proc_time_threshold', 30) * 1000: heartbeat_ok = False else: # it's made some progress self._heartbeat_count = 1 self._heartbeat_stack = st self._heartbeat_time = get_ion_ts() else: self._heartbeat_op = self._ctrl_current self._heartbeat_count = 1 self._heartbeat_
time = get_ion_ts() self._heartbeat_stack = st else: self._heartbeat_op = None self._heartbeat_count = 0 #log.debug("%s %s %s", listeners_ok, ctrl_thread_ok, heartbeat_ok) return (listeners_ok, ctrl_thread_ok, heartbeat_ok) @property def time_stats(self): """ Returns a 5-tuple of (total time, idle time, processing time, time since prior interval start, busy since prior interval start), all in ms (int). """ now = get_ion_ts_millis() running_time = now - self._start_time idle_time = running_time - self._proc_time cur_interval = now / STAT_INTERVAL_LENGTH now_since_prior = now - (cur_interval - 1) * STAT_INTERVAL_LENGTH if cur_interval == self._proc_interval_num: proc_time_since_prior = self._proc_time-self._proc_time_prior2 elif cur_interval-1 == self._proc_interval_num: proc_time_since_prior = self._proc_time-self._proc_time_prior else: proc_time_since_prior = 0 return (running_time, idle_time, self._proc_time, now_since_prior, proc_time_since_prior) def _child_failed(self, child): """ Callback from gevent as set in the TheadManager, when a child greenlet fails. Kills the ION process main greenlet. This propagates the error up to the process supervisor. """ # remove the child from the list of children (so we can shut down cleanly) for x in self.thread_manager.children: if x.proc == child: self.thread_manager.children.remove(x) break self._dead_children.append(child) # kill this
joedeller/pymine
whereamiV2.py
Python
mit
3,643
0.000549
#! /usr/bin/python # Joe Deller 2014 # Finding out where we are in minecraft # Level : Beginner # Uses : Libraries, variables, functions # Minecraft worlds on the Raspberry Pi are smaller than # other minecraft worlds, but are still pretty big # So one of the first things we need to learn to do # is find out where we are in the world # As the player moves around the world, Minecraft keeps track # of the X (left / right ) , Y (height) ,Z (depth) coordinates of the player # You can see these numbers on the main minecraft game screen # The minecraft library has a method called getTilePos() # It tracks where the player is # This program introduces the "while" keyword, our first # example of a loop, to make sure the program never stops # until there is either an error, or we manually stop (break) # the program using Ctrl-C on the keyboard import mcpi.minecraft as minecraft # This program also uses another library, the time library # as we want the program to sleep for a short time - 1 second # so that we don't fill the screen with too much information # We will come across the time library later when we # make a minecraft digital clock import time # Connect to Minecraft mc = minecraft.Minecraft.create() # We will use the getTilePos() method to tell us where we are # and store that information in a variable # Technically this is a special kind of variable, called an "object" # but for now all we need to worry about is what to call it # Most computer languages are very strict about using capital letters # To the computer, playerPos, Playerpos and PlayerPOS are completely # different things, so once you decide on a name, you need to spell # it the same way every time you want to use it playerPos = mc.player.getTilePos() # playerPos now has our 3d position in the minecraft world # it is made up of three parts, x, y & z # There is another similar function called getPos() # The difference is that getTilePos() returns whole numbers # getPos() returns the exact position, to several decimal places # We will stick with whole numbers for now # playerPos = mc.player.getPos() # We will be using a special kind of loop - an infinite loop # Unless there is an error or we manually stop the program # it will run forever # True and False are normally used to compare one or more items and then # make a choice, but for this program the loop is really saying "is true equal true?" # the answer will always be yes so the loop will never stop # We will be using more while loops in later programs # The main thing we need to worry
about is the spacing # Notice playerPos has four spaces before it # This means that it is "inside" the loop # Python is very fussy about spaces, something we will be seeing again and again # However,comments do not care about spaces while True: myLocation = mc.player.getTilePos() # myLocation is variable that contains three variables inside in # we can get these one at a time, or all three at once # Before we can u
se them with postToChat() we need to change them # from numbers, into characters - called a string # There are several ways of doing this, for now we will use a command # called str , which takes a number and hands back a string # of characters. Although to us there isn't any apparent difference # the way the numbers and characters are stored is very different. x = str(myLocation.x) y = str(myLocation.y) z = str(myLocation.z) # We use the postToChat() method from our hello world example mc.postToChat("You are standing at X: " + x + ", Y: " + y + ", Z: " + z) # Take a breath! time.sleep(1)
torbjoernk/pySDC
examples/advection_2d_explicit/playground.py
Python
bsd-2-clause
2,543
0.009044
from pySDC import CollocationClasses as collclass import numpy as np from ProblemClass import sharpclaw #from examples.sharpclaw_burgers1d.TransferClass import mesh_to_mesh_1d from pySDC.datatype_classes.mesh import mesh, rhs_imex_mesh from pySDC.sweeper_classes.imex_1st_order import imex_1st_order import pySDC.Methods as mp from pySDC import Log from pySDC.Stats import grep_stats, sort_stats # Sharpclaw imports from clawpack import pyclaw from clawpack import riemann from matplotlib import pyplot as plt if __name__ == "__main__": # set global logger (remove this if you do not want the output at all) logger = Log.setup_custom_logger('root') num_procs = 1 # This comes as read-in for the level class lparams = {} lparams['restol'] = 1E-10 sparams = {} sparams['maxiter'] = 20 # setup parameters "in time" t0 = 0 dt = 0.001 Tend = 100*dt # This comes as read-in for the problem class pparams = {} pparams['nvars'] = [(2,50,50)] pparams['nu'] = 0.001 # This comes as read-in for the transfer operations tparams = {} tparams['finter'] = True # Fill description dictionary for easy hierarchy creation description = {} description['problem_class'] = sharpclaw description['problem_params'] = pparams description['dtype_u'] = mesh description['dtype_f'] = rhs_imex_mesh description['collocation_class'] = collclass.CollGaussLobatto description['num_nodes'] = 5 description['sweeper_class'] = imex_1st_order description['level_params'] = lparams #description['transfer_class'] = mesh_to_mesh_1d #description['transfer_params'] = tparams # quickly generate block of steps MS = mp.generate_steps(num_procs,sparams,description) # get initial values on finest level P = MS[0].levels[0].prob uinit = P.u_exact(t0) # call main function to get things done... uend,stats = mp.run_pfasst_serial(MS,u0=uinit,t0=t0,dt=dt,Tend=Tend) # compute exact solution and compare uex = P.u_exact(Tend) # print('error at time %s: %s' %(Tend,np.linalg.norm(uex.values-uend.values,np.inf)/np.linal
g.norm( # uex.values,
np.inf))) fig = plt.figure(figsize=(8,8)) plt.imshow(uend.values[0,:,:]) # plt.plot(P.state.grid.x.centers,uend.values, color='b', label='SDC') # plt.plot(P.state.grid.x.centers,uex.values, color='r', label='Exact') # plt.legend() # plt.xlim([0, 1]) # plt.ylim([-1, 1]) plt.show()
owenmorris/pylucene
test/test_PositionIncrement.py
Python
apache-2.0
11,319
0.002297
# ==================================================================== # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ==================================================================== import sys, lucene, unittest from lucene import JArray from PyLuceneTestCase import PyLuceneTestCase from MultiSpansWrapper import MultiSpansWrapper from java.io import StringReader from org.apache.lucene.analysis import Analyzer from org.apache.lucene.analysis.core import \ LowerCaseTokenizer, WhitespaceTokenizer from org.apache.lucene.analysis.tokenattributes import \ CharTermAttribute, OffsetAttribute, PayloadAttribute, \ PositionIncrementAttribute from org.apache.lucene.document import Document, Field, TextField from org.apache.lucene.index import MultiFields, Term from org.apache.lucene.queryparser.classic import QueryParser from org.apache.lucene.search import MultiPhraseQuery, PhraseQuery from org.apache.lucene.search.payloads import PayloadSpanUtil from org.apache.lucene.search.spans import SpanNearQuery, SpanTermQuery from org.apache.lucene.util import BytesRef, Version from org.apache.pylucene.analysis import \ PythonAnalyzer, PythonFilteringTokenFilter, PythonTokenFilter, \ PythonTokenizer class PositionIncrementTestCase(PyLuceneTestCase): """ Unit tests ported from Java Lucene """ def testSetPosition(self): class _tokenizer(PythonTokenizer): def __init__(_self, reader): super(_tokenizer, _self).__init__(reader) _self.TOKENS = ["1", "2", "3", "4", "5"] _self.INCREMENTS = [1, 2, 1, 0, 1] _self.i = 0 _self.posIncrAtt = _self.addAttribute(PositionIncrementAttribute.class_) _self.termAtt = _self.addAttribute(CharTermAttribute.class_) _self.offsetAtt = _self.addAttribute(OffsetAttribute.class_) def incrementToken(_self): if _self.i == len(_self.TOKENS): return False _self.clearAttributes() _self.termAtt.append(_self.TOKENS[_self.i]) _self.offsetAtt.setOffset(_self.i, _self.i) _self.posIncrAtt.setPositionIncrement(_self.INCREMENTS[_self.i]) _self.i += 1 return True def end(_self): pass def reset(_self): pass def close(_self): pass class _analyzer(PythonAnalyzer): def createComponents(_self, fieldName, reader): return Analyzer.TokenStreamComponents(_tokenizer(reader)) writer = self.getWriter(analyzer=_analyzer()) d = Document() d.add(Field("field", "bogus", TextField.TYPE_STORED)) writer.addDocument(d) writer.commit() writer.close() searcher = self.getSearcher() reader = searcher.getIndexReader() pos = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader), "field", BytesRef("1")) pos.nextDoc() # first token should be at position 0 self.assertEqual(0, pos.nextPosition()) pos = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader), "field", BytesRef("2")) pos.nextDoc() # second token should be at position 2 self.assertEqual(2, pos.nextPosition()) q = PhraseQuery() q.add(Term("field", "1")) q.add(Term("field", "2")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(0, len(hits)) # same as previous, just specify positions explicitely. q = PhraseQuery() q.add(Term("field", "1"), 0) q.add(Term("field", "2"), 1) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(0, len(hits)) # specifying correct positions should find the phrase. q = PhraseQuery() q.add(Term("field", "1"), 0) q.add(Term("field", "2"), 2) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(1, len(hits)) q = PhraseQuery() q.add(Term("field", "2")) q.add(Term("field", "3")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(1, len(hits)) q = PhraseQuery() q.add(Term("field", "3")) q.add(Term("field", "4")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(0, len(hits)) # phrase query would find it when correct positions are specified. q = PhraseQuery() q.add(Term("field", "3"), 0) q.add(Term("field", "4"), 0) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(1, len(hits)) # phrase query should fail for non existing searched term # even if there exist another searched terms in the same searched # position. q = PhraseQuery() q.add(Term("field", "3"), 0) q.add(Term("field", "9"), 0) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(0, len(hits)) # multi-phrase query should succed for non existin
g searched term # because there exist another searched terms in the same searched # position. mq = MultiPhraseQuery()
mq.add([Term("field", "3"), Term("field", "9")], 0) hits = searcher.search(mq, None, 1000).scoreDocs self.assertEqual(1, len(hits)) q = PhraseQuery() q.add(Term("field", "2")) q.add(Term("field", "4")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(1, len(hits)) q = PhraseQuery() q.add(Term("field", "3")) q.add(Term("field", "5")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(1, len(hits)) q = PhraseQuery() q.add(Term("field", "4")) q.add(Term("field", "5")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(1, len(hits)) q = PhraseQuery() q.add(Term("field", "2")) q.add(Term("field", "5")) hits = searcher.search(q, None, 1000).scoreDocs self.assertEqual(0, len(hits)) def testPayloadsPos0(self): writer = self.getWriter(analyzer=TestPayloadAnalyzer()) doc = Document() doc.add(Field("content", "a a b c d e a f g h i j a b k k", TextField.TYPE_STORED)) writer.addDocument(doc) reader = writer.getReader() writer.close() tp = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader), "content", BytesRef("a")) count = 0 self.assert_(tp.nextDoc() != tp.NO_MORE_DOCS) # "a" occurs 4 times self.assertEqual(4, tp.freq()) expected = 0 self.assertEqual(expected, tp.nextPosition()) self.assertEqual(1, tp.nextPosition()) self.assertEqual(3, tp.nextPosition()) self.assertEqual(6, tp.nextPosition()) # only one doc has "a" self.assert_(tp.nextDoc() == tp.NO_MORE_DOCS) searcher = self.getSearcher(reader=reader) stq1 = SpanTermQuery(Term("content", "a")) stq2 = SpanTermQuery(Term("content", "k")) sqs = [stq1, stq2] snq = SpanNearQuery(sqs, 30, False) count = 0 sawZero = False pspans = MultiSpansWrapper.wrap(searcher.getTopReaderContext(), snq) while pspans.next(): payloads = pspans.getPayload() sawZero |
quchunguang/test
testpy/parsetab.py
Python
mit
2,575
0.133592
# parsetab.py # This file is automatically generated. Do not edit. _tabversion = '3.2' _lr_method = 'LALR' _lr_signature = '\x91\x95\xa5\xf7\xe0^bz\xc0\xf4\x04\xf9Z\xebA\xba' _lr_action_items = {'NAME':([0,2,5,7,11,12,13,14,],[1,8,8,8,8,8,8,8,]),')':([3,8,9,10,16,17,18,19,20,],[-9,-10,-7,16,-8,-4,-3,-5,-6,]),'(':([0,2,5,7,11,12,13,14,],[5,5,5,5,5,5,5,5,]),'+':([1,3,6,8,9,10,15,16,17,18,19,20,],[-10,-9,12,-10,-7,12,12,-8,-4,-3,-5,-6,]),'*':([1,3,6,8,9,10,15,16,17,18,19,20,],[-10,-9,13,-10,-7,13,13,-8,13,13,-5,-6,]),'-':([0,1,2,3,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,],[2,-10,2,-9,2,11,2,-10,-7,11,2,2,2,2,11,-8,-4,-3,-5,-6,]),'NUMBER':([0,2,5,7,11,12,13,14,],[3,3,3,3,3,3,3,3,]),'/':([1,3,6,8,9,10,15,16,17,18,19,20,],[-10,-9,14,-10,-7,14,14,-8,14,14,-5,-6,]),'=':([1,],[7,]),'$end':([1,3,4,6,8,9,15,16,17,18,19,20,],[-10,-9,0,-2,-10,-7,-1,-8,-4,-3,-5,-6,]),} _lr_action = { } for _k, _v in _lr_action_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_action: _lr_action[_x] = { } _lr_action[_x][_k] = _y del _lr_action_items _lr_goto_items = {'expression':([0,2,5,7,11,12,13,14,],[6,9,10,15,17,18,19,20,]),'statement':([0,],[4,]),} _lr_goto = { } for _k, _v in _lr_goto_items.items(): for _x,_y in zip(_v[0],_v[1]): if not _x in _lr_goto: _lr_goto[_x] = { } _lr_goto[_x][_k] = _y del _lr_goto_items _lr_productions = [ ("S' -> statement","S'",1,None,None,None), ('statement -> NAME = expressi
on','statement',3,'p_statement_assign','D:\\repos\\test\\testpy\\testply.py',58), ('statement -> expression','statement',1,'p_statement_expr','D:\\repos\\test\\testpy\\testply.py',63), ('expression -> expression + expression','expression',3,'p_expression_binop','D:\\repos\\test\\testpy\\testply.py',68), ('expression -> expres
sion - expression','expression',3,'p_expression_binop','D:\\repos\\test\\testpy\\testply.py',69), ('expression -> expression * expression','expression',3,'p_expression_binop','D:\\repos\\test\\testpy\\testply.py',70), ('expression -> expression / expression','expression',3,'p_expression_binop','D:\\repos\\test\\testpy\\testply.py',71), ('expression -> - expression','expression',2,'p_expression_uminus','D:\\repos\\test\\testpy\\testply.py',83), ('expression -> ( expression )','expression',3,'p_expression_group','D:\\repos\\test\\testpy\\testply.py',88), ('expression -> NUMBER','expression',1,'p_expression_number','D:\\repos\\test\\testpy\\testply.py',93), ('expression -> NAME','expression',1,'p_expression_name','D:\\repos\\test\\testpy\\testply.py',98), ]