code
stringlengths
2
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int32
2
1.05M
# -*- coding: utf-8 -*- from __future__ import print_function, absolute_import, division import unittest from pusher import Config, Pusher from pusher.util import GET try: import unittest.mock as mock except ImportError: import mock class TestPusher(unittest.TestCase): def setUp(self): self.pusher = Pusher(config=Config.from_url(u'http://key:secret@somehost/apps/4')) def test_trigger_success_case(self): json_dumped = u'{"message": "hello world"}' with mock.patch('json.dumps', return_value=json_dumped) as json_dumps_mock: request = self.pusher.trigger.make_request([u'some_channel'], u'some_event', {u'message': u'hello world'}) self.assertEqual(request.path, u'/apps/4/events') self.assertEqual(request.method, u'POST') expected_params = { u'channels': [u'some_channel'], u'data': json_dumped, u'name': u'some_event' } self.assertEqual(request.params, expected_params) json_dumps_mock.assert_called_once({u'message': u'hello world'}) def test_trigger_disallow_single_channel(self): self.assertRaises(TypeError, lambda: self.pusher.trigger.make_request(u'some_channel', u'some_event', {u'message': u'hello world'})) def test_trigger_disallow_invalid_channels(self): self.assertRaises(ValueError, lambda: self.pusher.trigger.make_request([u'some_channel!'], u'some_event', {u'message': u'hello world'})) def test_channels_info_default_success_case(self): request = self.pusher.channels_info.make_request() self.assertEqual(request.method, GET) self.assertEqual(request.path, u'/apps/4/channels') self.assertEqual(request.params, {}) def test_channels_info_with_prefix_success_case(self): request = self.pusher.channels_info.make_request(prefix_filter='test') self.assertEqual(request.method, GET) self.assertEqual(request.path, u'/apps/4/channels') self.assertEqual(request.params, {u'filter_by_prefix': u'test'}) def test_channels_info_with_attrs_success_case(self): request = self.pusher.channels_info.make_request(attributes=[u'attr1', u'attr2']) self.assertEqual(request.method, GET) self.assertEqual(request.path, u'/apps/4/channels') self.assertEqual(request.params, {u'info': u'attr1,attr2'}) def test_channel_info_success_case(self): request = self.pusher.channel_info.make_request(u'some_channel') self.assertEqual(request.method, GET) self.assertEqual(request.path, u'/apps/4/channels/some_channel') self.assertEqual(request.params, {}) def test_channel_info_with_attrs_success_case(self): request = self.pusher.channel_info.make_request(u'some_channel', attributes=[u'attr1', u'attr2']) self.assertEqual(request.method, GET) self.assertEqual(request.path, u'/apps/4/channels/some_channel') self.assertEqual(request.params, {u'info': u'attr1,attr2'}) def test_user_info_success_case(self): request = self.pusher.users_info.make_request(u'presence-channel') self.assertEqual(request.method, GET) self.assertEqual(request.path, u'/apps/4/channels/presence-channel/users') self.assertEqual(request.params, {}) if __name__ == '__main__': unittest.main()
pusher/pusher-python-rest
pusher_tests/test_pusher.py
Python
mit
3,426
#!/usr/bin/env python3 '''This should be the command-line pendant of ./test.html By Guillaume Lathoud glathoud@yahoo.fr ''' import subprocess from jsm_build import main from jsm_const import D8 ret = subprocess.check_output( [ D8, '-e', 'load("codeparse_test.js");print(codeparse_test());' ], stderr=subprocess.STDOUT, universal_newlines = True ).strip() assert ret == 'true', 'codeparse_test failed' print("--- codeparse_test succeeded ---") main( [ 'jsm_dev/example_development.jsm' ] ) main( [ 'jsm_dev/expl.js' ] ) main( [ 'jsm_dev/metaret_standalone.js' ] )
glathoud/js.metaret
test.py
Python
mit
663
import _plotly_utils.basevalidators class ColorValidator(_plotly_utils.basevalidators.ColorValidator): def __init__(self, plotly_name="color", parent_name="scatter3d.error_z", **kwargs): super(ColorValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "calc"), role=kwargs.pop("role", "style"), **kwargs )
plotly/python-api
packages/python/plotly/plotly/validators/scatter3d/error_z/_color.py
Python
mit
445
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import Configuration from ._models_py3 import ConfigurationListResult from ._models_py3 import Database from ._models_py3 import DatabaseListResult from ._models_py3 import ErrorAdditionalInfo from ._models_py3 import ErrorResponse from ._models_py3 import FirewallRule from ._models_py3 import FirewallRuleListResult from ._models_py3 import LogFile from ._models_py3 import LogFileListResult from ._models_py3 import NameAvailability from ._models_py3 import NameAvailabilityRequest from ._models_py3 import Operation from ._models_py3 import OperationDisplay from ._models_py3 import OperationListResult from ._models_py3 import PerformanceTierListResult from ._models_py3 import PerformanceTierProperties from ._models_py3 import PerformanceTierServiceLevelObjectives from ._models_py3 import PrivateEndpointConnection from ._models_py3 import PrivateEndpointConnectionListResult from ._models_py3 import PrivateEndpointProperty from ._models_py3 import PrivateLinkResource from ._models_py3 import PrivateLinkResourceListResult from ._models_py3 import PrivateLinkResourceProperties from ._models_py3 import PrivateLinkServiceConnectionStateProperty from ._models_py3 import ProxyResource from ._models_py3 import RecoverableServerResource from ._models_py3 import Resource from ._models_py3 import ResourceIdentity from ._models_py3 import Server from ._models_py3 import ServerAdministratorResource from ._models_py3 import ServerAdministratorResourceListResult from ._models_py3 import ServerForCreate from ._models_py3 import ServerKey from ._models_py3 import ServerKeyListResult from ._models_py3 import ServerListResult from ._models_py3 import ServerPrivateEndpointConnection from ._models_py3 import ServerPrivateEndpointConnectionProperties from ._models_py3 import ServerPrivateLinkServiceConnectionStateProperty from ._models_py3 import ServerPropertiesForCreate from ._models_py3 import ServerPropertiesForDefaultCreate from ._models_py3 import ServerPropertiesForGeoRestore from ._models_py3 import ServerPropertiesForReplica from ._models_py3 import ServerPropertiesForRestore from ._models_py3 import ServerSecurityAlertPolicy from ._models_py3 import ServerSecurityAlertPolicyListResult from ._models_py3 import ServerUpdateParameters from ._models_py3 import Sku from ._models_py3 import StorageProfile from ._models_py3 import TagsObject from ._models_py3 import TrackedResource from ._models_py3 import VirtualNetworkRule from ._models_py3 import VirtualNetworkRuleListResult except (SyntaxError, ImportError): from ._models import Configuration # type: ignore from ._models import ConfigurationListResult # type: ignore from ._models import Database # type: ignore from ._models import DatabaseListResult # type: ignore from ._models import ErrorAdditionalInfo # type: ignore from ._models import ErrorResponse # type: ignore from ._models import FirewallRule # type: ignore from ._models import FirewallRuleListResult # type: ignore from ._models import LogFile # type: ignore from ._models import LogFileListResult # type: ignore from ._models import NameAvailability # type: ignore from ._models import NameAvailabilityRequest # type: ignore from ._models import Operation # type: ignore from ._models import OperationDisplay # type: ignore from ._models import OperationListResult # type: ignore from ._models import PerformanceTierListResult # type: ignore from ._models import PerformanceTierProperties # type: ignore from ._models import PerformanceTierServiceLevelObjectives # type: ignore from ._models import PrivateEndpointConnection # type: ignore from ._models import PrivateEndpointConnectionListResult # type: ignore from ._models import PrivateEndpointProperty # type: ignore from ._models import PrivateLinkResource # type: ignore from ._models import PrivateLinkResourceListResult # type: ignore from ._models import PrivateLinkResourceProperties # type: ignore from ._models import PrivateLinkServiceConnectionStateProperty # type: ignore from ._models import ProxyResource # type: ignore from ._models import RecoverableServerResource # type: ignore from ._models import Resource # type: ignore from ._models import ResourceIdentity # type: ignore from ._models import Server # type: ignore from ._models import ServerAdministratorResource # type: ignore from ._models import ServerAdministratorResourceListResult # type: ignore from ._models import ServerForCreate # type: ignore from ._models import ServerKey # type: ignore from ._models import ServerKeyListResult # type: ignore from ._models import ServerListResult # type: ignore from ._models import ServerPrivateEndpointConnection # type: ignore from ._models import ServerPrivateEndpointConnectionProperties # type: ignore from ._models import ServerPrivateLinkServiceConnectionStateProperty # type: ignore from ._models import ServerPropertiesForCreate # type: ignore from ._models import ServerPropertiesForDefaultCreate # type: ignore from ._models import ServerPropertiesForGeoRestore # type: ignore from ._models import ServerPropertiesForReplica # type: ignore from ._models import ServerPropertiesForRestore # type: ignore from ._models import ServerSecurityAlertPolicy # type: ignore from ._models import ServerSecurityAlertPolicyListResult # type: ignore from ._models import ServerUpdateParameters # type: ignore from ._models import Sku # type: ignore from ._models import StorageProfile # type: ignore from ._models import TagsObject # type: ignore from ._models import TrackedResource # type: ignore from ._models import VirtualNetworkRule # type: ignore from ._models import VirtualNetworkRuleListResult # type: ignore from ._postgre_sql_management_client_enums import ( CreateMode, GeoRedundantBackup, IdentityType, InfrastructureEncryption, MinimalTlsVersionEnum, OperationOrigin, PrivateEndpointProvisioningState, PrivateLinkServiceConnectionStateActionsRequire, PrivateLinkServiceConnectionStateStatus, PublicNetworkAccessEnum, SecurityAlertPolicyName, ServerKeyType, ServerSecurityAlertPolicyState, ServerState, ServerVersion, SkuTier, SslEnforcementEnum, StorageAutogrow, VirtualNetworkRuleState, ) __all__ = [ 'Configuration', 'ConfigurationListResult', 'Database', 'DatabaseListResult', 'ErrorAdditionalInfo', 'ErrorResponse', 'FirewallRule', 'FirewallRuleListResult', 'LogFile', 'LogFileListResult', 'NameAvailability', 'NameAvailabilityRequest', 'Operation', 'OperationDisplay', 'OperationListResult', 'PerformanceTierListResult', 'PerformanceTierProperties', 'PerformanceTierServiceLevelObjectives', 'PrivateEndpointConnection', 'PrivateEndpointConnectionListResult', 'PrivateEndpointProperty', 'PrivateLinkResource', 'PrivateLinkResourceListResult', 'PrivateLinkResourceProperties', 'PrivateLinkServiceConnectionStateProperty', 'ProxyResource', 'RecoverableServerResource', 'Resource', 'ResourceIdentity', 'Server', 'ServerAdministratorResource', 'ServerAdministratorResourceListResult', 'ServerForCreate', 'ServerKey', 'ServerKeyListResult', 'ServerListResult', 'ServerPrivateEndpointConnection', 'ServerPrivateEndpointConnectionProperties', 'ServerPrivateLinkServiceConnectionStateProperty', 'ServerPropertiesForCreate', 'ServerPropertiesForDefaultCreate', 'ServerPropertiesForGeoRestore', 'ServerPropertiesForReplica', 'ServerPropertiesForRestore', 'ServerSecurityAlertPolicy', 'ServerSecurityAlertPolicyListResult', 'ServerUpdateParameters', 'Sku', 'StorageProfile', 'TagsObject', 'TrackedResource', 'VirtualNetworkRule', 'VirtualNetworkRuleListResult', 'CreateMode', 'GeoRedundantBackup', 'IdentityType', 'InfrastructureEncryption', 'MinimalTlsVersionEnum', 'OperationOrigin', 'PrivateEndpointProvisioningState', 'PrivateLinkServiceConnectionStateActionsRequire', 'PrivateLinkServiceConnectionStateStatus', 'PublicNetworkAccessEnum', 'SecurityAlertPolicyName', 'ServerKeyType', 'ServerSecurityAlertPolicyState', 'ServerState', 'ServerVersion', 'SkuTier', 'SslEnforcementEnum', 'StorageAutogrow', 'VirtualNetworkRuleState', ]
Azure/azure-sdk-for-python
sdk/rdbms/azure-mgmt-rdbms/azure/mgmt/rdbms/postgresql/models/__init__.py
Python
mit
9,295
# -*- coding: utf-8 -*- """ Created on Thu Dec 29 14:04:43 2016 @author: 024536 """ from flask import render_template, redirect, url_for, abort, flash, request,\ current_app, make_response, jsonify from flask_login import login_required, current_user from flask_sqlalchemy import get_debug_queries from . import ctaAlgo from .forms import RunForm, UploadForm, StrategyForm, ManageDataForm, \ TDB_CodeTableForm, TDB_TickDataForm, \ TDB_CodeTable, TDB_FutureAB, TDB_TickAB, statDataTable from .. import db, tickmongo, tdbapi from ..models import Permission, Role, User, Post, Comment from ..decorators import admin_required, permission_required from .ctaAlgoBacktesting import ctaBacktestingRun import os import time from datetime import datetime, timedelta import pymongo from threading import Thread @ctaAlgo.route('/ctaRun', methods=['GET', 'POST']) @login_required def ctaRun(): form = RunForm() if form.validate_on_submit(): d = ctaBacktestingRun() return jsonify(d) form.name.data = current_user.name return render_template('ctaAlgo/ctaShow.html', form=form) @ctaAlgo.route('/uploadStrategy', methods=['GET', 'POST']) @login_required def uploadStrategy(): form = UploadForm() if form.validate_on_submit(): filename = form.uploadFile.data.filename print filename strategyPath,strategyList = getStrategyDict(current_user.username) if filename in strategyList: flash('strategy is all ready existed!') else: form.uploadFile.data.save(os.path.join(strategyPath,filename)) flash('strategy is all ready upload!') return render_template('ctaAlgo/uploadStrategy.html', form=form) @ctaAlgo.route('/showStrategy', methods=['GET', 'POST']) @login_required def showStrategy(): strategyPath,tableList = getStrategyDict(current_user.username) form = StrategyForm() form.strategyName.choices = [(i,i) for i in tableList] if form.validate_on_submit(): d = ctaBacktestingRun(current_user.username, form.strategyName.data) return jsonify(d) return render_template('ctaAlgo/showStrategy.html', form=form) @ctaAlgo.route('/manageStrategy', methods=['GET', 'POST']) @login_required def manageStrategy(): strategyPath,tableList = getStrategyDict(current_user.username) form = StrategyForm() form.strategyName.choices = [(i,i) for i in tableList] if form.validate_on_submit(): d = ctaBacktestingRun(current_user.username, form.strategyName.data) return jsonify(d) return render_template('ctaAlgo/manageStrategy.html', form=form) @ctaAlgo.route('/manageData', methods=['GET', 'POST']) @login_required def manageData(): form = ManageDataForm() if form.validate_on_submit(): chCode = str(form.chCode.data) dataType = str(form.dataType.data) begDate = str(form.begDate.data) endDate = str(form.endDate.data) if len(chCode.split('.'))==2 and chCode.split('.')[1]=="CF": szPort = "20004" else: szPort = "20003" pSetting = { 'szIP':"172.22.137.140", 'szPort':szPort, 'szUser':"liyonghan", 'szPassword':"liyo1234", 'nTimeOutVal':10, 'nRetryCount':10, 'nRetryGap':10 } mtdbapi = tdbapi() TDB_Open_Dict = mtdbapi.TDB_Open(pSetting) mtdbapi.TDB_Close() if TDB_Open_Dict['nMarkets'] == 0: flash(TDB_Open_Dict['szInfo']) else: app = current_app._get_current_object() thr = Thread(target=load_async_data, args=[app,pSetting,chCode,dataType,begDate,endDate]) thr.start() flash('Loading data.') return render_template('ctaAlgo/manageData.html', form=form) return render_template('ctaAlgo/manageData.html', form=form) @ctaAlgo.route('/tickData', methods=['GET', 'POST']) @login_required def TDB_TickDataView(): form = TDB_TickDataForm() if form.validate_on_submit(): chCode = str(form.chCode.data) dataType = str(form.dataType.data) strDate = str(form.strDate.data) if len(chCode.split('.'))==2 and chCode.split('.')[1]=="CF": szPort = "20004" else: szPort = "20003" pSetting = { 'szIP':"172.22.137.140", 'szPort':szPort, 'szUser':"liyonghan", 'szPassword':"liyo1234", 'nTimeOutVal':10, 'nRetryCount':10, 'nRetryGap':10 } mtdbapi = tdbapi() TDB_Open_Dict = mtdbapi.TDB_Open(pSetting) if TDB_Open_Dict['nMarkets'] == 0: dataTable = TDB_Open_Dict['szInfo'] # flash(TDB_Open_Dict['szInfo']) else: #证券万得代码(AG1312.SHF) #开始日期(交易日),为0则从当天,例如20130101 #结束日期(交易日),小于等于0则和nBeginDate相同 #开始时间:若<=0则从头,格式:(HHMMSSmmm)例如94500000 表示 9点45分00秒000毫秒 #结束时间:若<=0则至最后 #自动补齐标志:( 0:不自动补齐,1:自动补齐) req = {"chCode":chCode, "nBeginDate":int(strDate.replace('-','')), "nEndDate":int(strDate.replace('-','')), "nBeginTime":0, "nEndTime":0} if dataType in ['FutureAB', 'Future']: req['nAutoComplete'] = 0 data,flag = mtdbapi.TDB_GetFutureAB(req) dataTable = TDB_FutureAB(data) else: data,flag = mtdbapi.TDB_GetTickAB(req) dataTable = TDB_TickAB(data) dataTable = dataTable mtdbapi.TDB_Close() return jsonify(dataTable=dataTable,updateTime="UpdateTime: "+time.asctime()) return render_template('ctaAlgo/TDB_Req_Templete.html', form=form) @ctaAlgo.route('/codeTable', methods=['GET', 'POST']) @login_required def TDB_CodeTableView(): form = TDB_CodeTableForm() if form.validate_on_submit(): szMarket = str(form.szMarket.data) if szMarket == "CF": szPort = "20004" else: szPort = "20003" pSetting = { 'szIP':"172.22.137.140", 'szPort':szPort, 'szUser':"liyonghan", 'szPassword':"liyo1234", 'nTimeOutVal':10, 'nRetryCount':10, 'nRetryGap':10 } mtdbapi = tdbapi() TDB_Open_Dict = mtdbapi.TDB_Open(pSetting) if TDB_Open_Dict['nMarkets'] == 0: codeTable = TDB_Open_Dict['szInfo'] # flash(TDB_Open_Dict['szInfo']) else: codeTable,flagcodeTable = mtdbapi.TDB_GetCodeTable(szMarket) codeTable = TDB_CodeTable(codeTable) mtdbapi.TDB_Close() return jsonify(dataTable=codeTable,updateTime="UpdateTime: "+time.asctime()) return render_template('ctaAlgo/TDB_Req_Templete.html', form=form) @ctaAlgo.route('/statData') @login_required def statData(): statDatas = [] tmpdb = tickmongo.db symbols = tmpdb.collection_names() for symbol in symbols: d = {} d['symbol'] = symbol d['begTime'] = tmpdb[symbol].find().sort("datetime",1).limit(1)[0]['datetime'].isoformat() d['endTime'] = tmpdb[symbol].find().sort("datetime",-1).limit(1)[0]['datetime'].isoformat() statDatas.append(d) statDatas = statDataTable(statDatas) return render_template('ctaAlgo/statData.html', statDataTables = statDatas) def getStrategyDict(username): # strategyPath = os.path.join(os.path.dirname(os.path.abspath(__file__)),"\\strategy\\") strategyPath = 'D:\\Code\\github\\FlaskCTA\\app\\ctaAlgo\\strategy' strategyPath = os.path.join(strategyPath, username) if os.path.exists(strategyPath): strategyList = [i.split('.')[0] for i in os.listdir(strategyPath) if i[0]!="_" and i[-2:]=='py'] else: os.mkdir(strategyPath) with open(os.path.join(strategyPath,"__init__.py"),'w'): pass strategyList = [] return strategyPath, strategyList def load_async_data(app,pSetting,chCode,dataType,begDate,endDate): with app.app_context(): mtdbapi = tdbapi() TDB_Open_Dict = mtdbapi.TDB_Open(pSetting) #证券万得代码(AG1312.SHF) #开始日期(交易日),为0则从当天,例如20130101 #结束日期(交易日),小于等于0则和nBeginDate相同 #开始时间:若<=0则从头,格式:(HHMMSSmmm)例如94500000 表示 9点45分00秒000毫秒 #结束时间:若<=0则至最后 #自动补齐标志:( 0:不自动补齐,1:自动补齐) req = {"chCode":chCode, "nBeginDate":int(begDate.replace('-','')), "nEndDate":int(endDate.replace('-','')), "nBeginTime":0, "nEndTime":0} begDate = datetime.strptime(begDate,'%Y-%m-%d').date() endDate = datetime.strptime(endDate,'%Y-%m-%d').date() if dataType in ['FutureAB', 'Future']: req['nAutoComplete'] = 0 TDB_GetDataAB = mtdbapi.TDB_GetFutureAB else: TDB_GetDataAB = mtdbapi.TDB_GetTickAB # dbName = 'VnTrader_Tick_Db' # host = 'localhost' # port = 27017 # client = pymongo.MongoClient(host, port, connectTimeoutMS=3600) # dbName = 'VnTrader_Tick_Db' symbol = chCode.split('.')[0] # database = client[dbName] database = tickmongo.db collection = database[symbol] if symbol not in database.collection_names(): collection.create_index([('datetime', pymongo.ASCENDING)], unique=True) for i in range((endDate-begDate).days+1): tmpDate = begDate + timedelta(i) req["nBeginDate"] = int(tmpDate.isoformat().replace('-','')) req["nEndDate"] = int(tmpDate.isoformat().replace('-','')) dataList,flag = TDB_GetDataAB(req) dataList = tdbapi.cleanDataFutureAB(dataList) if len(dataList) == 0: continue try: collection.insert_many(dataList) except Exception,e: print chCode+" "+tmpDate.isoformat()+" "+str(e) for d in dataList: d.pop('_id') flt = {'datetime': d['datetime']} collection.update_one(flt, {'$set':d}, upsert=True) mtdbapi.TDB_Close()
lyhrobin00007/FlaskCTA
app/ctaAlgo/views.py
Python
mit
10,753
#!/usr/bin/env python3 import os import time import psutil from typing import Optional import cereal.messaging as messaging from common.realtime import set_core_affinity, set_realtime_priority from selfdrive.swaglog import cloudlog MAX_MODEM_CRASHES = 3 MODEM_PATH = "/sys/devices/soc/2080000.qcom,mss/subsys5" WATCHED_PROCS = ["zygote", "zygote64", "system_server", "/system/bin/servicemanager", "/system/bin/surfaceflinger"] def get_modem_crash_count() -> Optional[int]: try: with open(os.path.join(MODEM_PATH, "crash_count")) as f: return int(f.read()) except Exception: cloudlog.exception("Error reading modem crash count") return None def get_modem_state() -> str: try: with open(os.path.join(MODEM_PATH, "state")) as f: return f.read().strip() except Exception: cloudlog.exception("Error reading modem state") return "" def main(): set_core_affinity(1) set_realtime_priority(1) procs = {} crash_count = 0 modem_killed = False modem_state = "ONLINE" androidLog = messaging.sub_sock('androidLog') while True: # check critical android services if any(p is None or not p.is_running() for p in procs.values()) or not len(procs): cur = {p: None for p in WATCHED_PROCS} for p in psutil.process_iter(attrs=['cmdline']): cmdline = None if not len(p.info['cmdline']) else p.info['cmdline'][0] if cmdline in WATCHED_PROCS: cur[cmdline] = p if len(procs): for p in WATCHED_PROCS: if cur[p] != procs[p]: cloudlog.event("android service pid changed", proc=p, cur=cur[p], prev=procs[p], error=True) procs.update(cur) # log caught NetworkPolicy exceptions msgs = messaging.drain_sock(androidLog) for m in msgs: try: if m.androidLog.tag == "NetworkPolicy" and m.androidLog.message.startswith("problem with advise persist threshold"): cloudlog.event("network policy exception caught", androidLog=m.androidLog, error=True) except UnicodeDecodeError: pass if os.path.exists(MODEM_PATH): # check modem state state = get_modem_state() if state != modem_state and not modem_killed: cloudlog.event("modem state changed", state=state) modem_state = state # check modem crashes cnt = get_modem_crash_count() if cnt is not None: if cnt > crash_count: cloudlog.event("modem crash", count=cnt) crash_count = cnt # handle excessive modem crashes if crash_count > MAX_MODEM_CRASHES and not modem_killed: cloudlog.event("killing modem", error=True) with open("/sys/kernel/debug/msm_subsys/modem", "w") as f: f.write("put") modem_killed = True time.sleep(1) if __name__ == "__main__": main()
commaai/openpilot
selfdrive/hardware/eon/androidd.py
Python
mit
2,807
import Feeder, SourceList, Settings, Source, MyLogger, feedparser reload(Feeder); reload(Settings); reload(Source); reload(SourceList) sets = Settings.Settings(); log = MyLogger.defaultLogger('temp.log', sets); sourceList = SourceList.SourceList(sets=sets, log=log)
lzkelley/Feeder
isetup.py
Python
mit
269
from websocket import create_connection from processReturnMsg import processReturnMsg import json class Connection: def __init__(self, name, url): self.ws = create_connection(url) json_string = { "name": name } self.ws.send(json.dumps(json_string)) #Receive initial server greetings processReturnMsg(self.ws.recv(), "initConn") def send(self, message): self.ws.send(message) def close(self): self.ws.close() def recv(self): return self.ws.recv()
np-overflow/minecraft-commander
mcpy_simplified/connection.py
Python
mit
476
""" Test for network group """ import unittest class TestNetworkGroup(unittest.TestCase): """ Test case for network group """ def test_network_group(self): """Test various types of network groups""" from pybitmessage.protocol import network_group test_ip = '1.2.3.4' self.assertEqual('\x01\x02', network_group(test_ip)) test_ip = '127.0.0.1' self.assertEqual('IPv4', network_group(test_ip)) test_ip = '0102:0304:0506:0708:090A:0B0C:0D0E:0F10' self.assertEqual( '\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C', network_group(test_ip)) test_ip = 'bootstrap8444.bitmessage.org' self.assertEqual( 'bootstrap8444.bitmessage.org', network_group(test_ip)) test_ip = 'quzwelsuziwqgpt2.onion' self.assertEqual( test_ip, network_group(test_ip)) test_ip = None self.assertEqual( None, network_group(test_ip))
PeterSurda/PyBitmessage
src/tests/test_networkgroup.py
Python
mit
1,033
""" Django settings for CMS project. Generated by 'django-admin startproject' using Django 1.9. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'f4@=%17sh*xy(%d)p8rn93n3br5asyx-++m1yz$qpwg1#$n11p' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ #'appmodels.apps.AppmodelsConfig', 'Assessment', 'Communique.apps.CommuniqueConfig', 'Course', 'NewsFeed.apps.NewsfeedConfig', 'Profiler', 'Resource', 'Workspace.apps.WorkspaceConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'CMS.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'CMS.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' # for CORS (Cross-Origin Resource Sharing) CORS_ORIGIN_ALLOW_ALL = False CORS_ORIGIN_WHITELIST = ( 'google.com', 'localhost:8888', ) CORS_ORIGIN_REGEX_WHITELIST = () CORS_URLS_REGEX = '^.*$' CORS_ALLOW_METHODS = ( 'GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS' ) CORS_ALLOW_HEADERS = ( 'x-requested-with', 'content-type', 'accept', 'origin', 'authorization', 'x-csrftoken', 'X-Login-Ajax-call' ) CORS_EXPOSE_HEADERS = () CORS_PREFLIGHT_MAX_AGE = 86400 CORS_ALLOW_CREDENTIALS = False
IEEEDTU/CMS
CMS/settings.py
Python
mit
4,034
import json from pathlib import Path from gravity.defaults import ( DEFAULT_GUNICORN_BIND, DEFAULT_GUNICORN_TIMEOUT, DEFAULT_GUNICORN_WORKERS, DEFAULT_INSTANCE_NAME, CELERY_DEFAULT_CONFIG ) def test_register_defaults(galaxy_yml, galaxy_root_dir, state_dir, default_config_manager): default_config_manager.add([str(galaxy_yml)]) assert str(galaxy_yml) in default_config_manager.state['config_files'] state = default_config_manager.state['config_files'][str(galaxy_yml)] assert state['config_type'] == 'galaxy' assert state['instance_name'] == DEFAULT_INSTANCE_NAME assert state['services'] == [] attributes = state['attribs'] assert attributes['app_server'] == 'gunicorn' assert Path(attributes['log_dir']) == Path(state_dir) / 'log' assert Path(attributes['galaxy_root']) == galaxy_root_dir gunicorn_attributes = attributes['gunicorn'] assert gunicorn_attributes['bind'] == DEFAULT_GUNICORN_BIND assert gunicorn_attributes['workers'] == DEFAULT_GUNICORN_WORKERS assert gunicorn_attributes['timeout'] == DEFAULT_GUNICORN_TIMEOUT assert gunicorn_attributes['extra_args'] == "" assert attributes['celery'] == CELERY_DEFAULT_CONFIG def test_register_non_default(galaxy_yml, default_config_manager): new_bind = 'localhost:8081' concurrency = 4 galaxy_yml.write(json.dumps({ 'galaxy': None, 'gravity': { 'gunicorn': { 'bind': new_bind }, 'celery': { 'concurrency': concurrency } } })) default_config_manager.add([str(galaxy_yml)]) state = default_config_manager.state['config_files'][str(galaxy_yml)] gunicorn_attributes = state['attribs']['gunicorn'] assert gunicorn_attributes['bind'] == new_bind assert gunicorn_attributes['workers'] == DEFAULT_GUNICORN_WORKERS celery_attributes = state['attribs']['celery'] assert celery_attributes['concurrency'] == concurrency def test_deregister(galaxy_yml, default_config_manager): default_config_manager.add([str(galaxy_yml)]) assert str(galaxy_yml) in default_config_manager.state['config_files'] assert default_config_manager.is_registered(str(galaxy_yml)) default_config_manager.remove([str(galaxy_yml)]) assert str(galaxy_yml) not in default_config_manager.state['config_files'] assert not default_config_manager.is_registered(str(galaxy_yml)) def test_rename(galaxy_root_dir, state_dir, default_config_manager): galaxy_yml_sample = galaxy_root_dir / "config" / "galaxy.yml.sample" default_config_manager.add([str(galaxy_yml_sample)]) galaxy_yml = galaxy_root_dir / "config" / "galaxy.yml" galaxy_yml_sample.copy(galaxy_yml) assert default_config_manager.is_registered(str(galaxy_yml_sample.realpath())) assert not default_config_manager.is_registered(str(galaxy_yml)) default_config_manager.rename(str(galaxy_yml_sample.realpath()), str(galaxy_yml)) assert not default_config_manager.is_registered(str(galaxy_yml_sample.realpath())) assert default_config_manager.is_registered(str(galaxy_yml)) def test_auto_register(galaxy_yml, default_config_manager, monkeypatch): monkeypatch.setenv("GALAXY_CONFIG_FILE", str(galaxy_yml)) assert not default_config_manager.is_registered(str(galaxy_yml)) default_config_manager.auto_register() assert default_config_manager.is_registered(str(galaxy_yml))
galaxyproject/gravity
tests/test_config_manager.py
Python
mit
3,452
''' We are given an array asteroids of integers representing asteroids in a row. For each asteroid, the absolute value represents its size, and the sign represents its direction (positive meaning right, negative meaning left). Each asteroid moves at the same speed. Find out the state of the asteroids after all collisions. If two asteroids meet, the smaller one will explode. If both are the same size, both will explode. Two asteroids moving in the same direction will never meet. Example 1: Input: asteroids = [5, 10, -5] Output: [5, 10] Explanation: The 10 and -5 collide resulting in 10. The 5 and 10 never collide. Example 2: Input: asteroids = [8, -8] Output: [] Explanation: The 8 and -8 collide exploding each other. Example 3: Input: asteroids = [10, 2, -5] Output: [10] Explanation: The 2 and -5 collide resulting in -5. The 10 and -5 collide resulting in 10. Example 4: Input: asteroids = [-2, -1, 1, 2] Output: [-2, -1, 1, 2] Explanation: The -2 and -1 are moving left, while the 1 and 2 are moving right. Asteroids moving the same direction never meet, so no asteroids will meet each other. Note: The length of asteroids will be at most 10000. Each asteroid will be a non-zero integer in the range [-1000, 1000]. ''' # there is one tricky part, which is One big asteroid may collide more than once. class Solution(object): def asteroidCollision(self, asteroids): a=asteroids while True: i,n=0,len(a) while i<len(a)-1: if a[i]*a[i+1]>0 or a[i]<0<a[i+1]: i+=1# will not collide elif a[i]>-a[i+1]: del a[i+1] # a[i+1] explodes elif a[i]==-a[i+1]: del a[i:i+2] # both explode else: del a[i] # a[i] explodes if len(a)==n: break # no change after one scan return a
tikael1011/leetcodejava
735. Asteroid Collision.py
Python
mit
1,833
import matplotlib matplotlib.use("agg") import matplotlib.pyplot as plt import seaborn as sns import pandas as pd sns.set(style="ticks", palette="colorblind", context="paper") plt.figure(figsize=snakemake.config["plots"]["figsize"]) for f in snakemake.input: counts = pd.read_table(f, index_col=[0, 1]) plt.scatter(counts["exact"], counts["corrected"], s=2, c="k", alpha=0.6, edgecolors="face", rasterized=True) plt.xlabel("exact counts") plt.ylabel("corrected counts") plt.xlim((0, 1000)) plt.ylim((0, 1000)) plt.locator_params(nbins=4) #ax = plt.gca() #ax.set_yscale('log') #ax.set_xscale('log') sns.despine() plt.savefig(snakemake.output[0], bbox_inches="tight")
merfishtools/merfishtools-evaluation
scripts/plot-exact-vs-corrected.py
Python
mit
679
import _plotly_utils.basevalidators class SideValidator(_plotly_utils.basevalidators.EnumeratedValidator): def __init__( self, plotly_name="side", parent_name="densitymapbox.colorbar.title", **kwargs ): super(SideValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "colorbars"), values=kwargs.pop("values", ["right", "top", "bottom"]), **kwargs )
plotly/plotly.py
packages/python/plotly/plotly/validators/densitymapbox/colorbar/title/_side.py
Python
mit
500
from LinkedList import LinkedList def length_of_linkedlist(ll): if ll.head is None: return 0 current = ll.head count = 0 while current: current = current.next count += 1 return count linked_list = LinkedList() linked_list.__generate__(10,0,9) linked_list.__print__() print(length_of_linkedlist(linked_list))
fahadkaleem/DataStructures
LinkedList/length_of_linkedlist.py
Python
mit
354
# -*- coding: utf-8 -*- import darr da = darr.DoubleArray() words = ['くるま', 'く', 'くる', 'りんご', 'オレンジ', 'baseball', 'soccer'] v = 0 for word in words: v += 1. da.insert(word, v) print('### common prefix search ###') ret = da.common_prefix_search('くるまで') for w in ret: print(w) print('### get values ###') for word in words: v = da.get(word) print(word, v) da.save('dafile') print('### load dumped double array ###') da2 = darr.DoubleArray() da2.load('dafile') for word in words: v = da2.get(word) print(word, v)
tma15/darr
python/sample.py
Python
mit
580
import sys import live from instruments.drums import Drums from instruments.synth_lead import SynthLead from instruments.synth_harmony import SynthHarmony from threading import Thread import time import mido # def start_ableton_thread(): # t = Thread(target=ableton_thread) # t.start() def ableton_thread(): live_set = live.Set() live_set.scan(scan_clip_names=True, scan_devices=True) beat_length = 60 / live_set.tempo # drums = Drums(live_set) melody = SynthLead(live_set) # harmony = SynthHarmony(live_set) melody_output = mido.open_output("IAC Driver Bus 1") clock_input = mido.open_input("IAC Driver IAC Bus 2") tick_count = 0 for message in clock_input: if message.type == "clock": tick_count += 1 if tick_count % 24 == 0: melody_output.send(mido.Message("note_on", note=60, velocity=tick_count % 96 + 10)) elif tick_count % 12 == 0: melody_output.send(mido.Message("note_off", note=60)) elif message.type == "start": tick_count = 0 # while True: # live_set.wait_for_next_beat() # print "Beat" # melody_output.send(mido.Message("note_on", note=60, velocity=64)) # time.sleep(beat_length / 2) # melody_output.send(mido.Message("note_off", note=60)) def main(): ableton_thread() if __name__ == "__main__": main()
matangover/beatogether
ableton_playground.py
Python
mit
1,422
#!/usr/bin/env python3 import argparse import os import subprocess import sys def setup(): global args, workdir programs = ['ruby', 'git', 'make', 'wget', 'curl'] if args.kvm: programs += ['apt-cacher-ng', 'python-vm-builder', 'qemu-kvm', 'qemu-utils'] elif args.docker and not os.path.isfile('/lib/systemd/system/docker.service'): dockers = ['docker.io', 'docker-ce'] for i in dockers: return_code = subprocess.call(['sudo', 'apt-get', 'install', '-qq', i]) if return_code == 0: break if return_code != 0: print('Cannot find any way to install Docker.', file=sys.stderr) sys.exit(1) else: programs += ['apt-cacher-ng', 'lxc', 'debootstrap'] subprocess.check_call(['sudo', 'apt-get', 'install', '-qq'] + programs) if not os.path.isdir('gitian.sigs'): subprocess.check_call(['git', 'clone', 'https://github.com/bitcoin-core/gitian.sigs.git']) if not os.path.isdir('bitcoin-detached-sigs'): subprocess.check_call(['git', 'clone', 'https://github.com/bitcoin-core/bitcoin-detached-sigs.git']) if not os.path.isdir('gitian-builder'): subprocess.check_call(['git', 'clone', 'https://github.com/devrandom/gitian-builder.git']) if not os.path.isdir('bitcoin'): subprocess.check_call(['git', 'clone', 'https://github.com/bitcoin/bitcoin.git']) os.chdir('gitian-builder') make_image_prog = ['bin/make-base-vm', '--suite', 'bionic', '--arch', 'amd64'] if args.docker: make_image_prog += ['--docker'] elif not args.kvm: make_image_prog += ['--lxc'] subprocess.check_call(make_image_prog) os.chdir(workdir) if args.is_bionic and not args.kvm and not args.docker: subprocess.check_call(['sudo', 'sed', '-i', 's/lxcbr0/br0/', '/etc/default/lxc-net']) print('Reboot is required') sys.exit(0) def build(): global args, workdir os.makedirs('bitcoin-binaries/' + args.version, exist_ok=True) print('\nBuilding Dependencies\n') os.chdir('gitian-builder') os.makedirs('inputs', exist_ok=True) subprocess.check_call(['wget', '-O', 'inputs/osslsigncode-2.0.tar.gz', 'https://github.com/mtrojnar/osslsigncode/archive/2.0.tar.gz']) subprocess.check_call(["echo '5a60e0a4b3e0b4d655317b2f12a810211c50242138322b16e7e01c6fbb89d92f inputs/osslsigncode-2.0.tar.gz' | sha256sum -c"], shell=True) subprocess.check_call(['make', '-C', '../bitcoin/depends', 'download', 'SOURCES_PATH=' + os.getcwd() + '/cache/common']) if args.linux: print('\nCompiling ' + args.version + ' Linux') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, '--url', 'bitcoin='+args.url, '../bitcoin/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-linux', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-linux.yml']) subprocess.check_call('mv build/out/bitcoin-*.tar.gz build/out/src/bitcoin-*.tar.gz ../bitcoin-binaries/'+args.version, shell=True) if args.windows: print('\nCompiling ' + args.version + ' Windows') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, '--url', 'bitcoin='+args.url, '../bitcoin/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-unsigned', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-win.yml']) subprocess.check_call('mv build/out/bitcoin-*-win-unsigned.tar.gz inputs/', shell=True) subprocess.check_call('mv build/out/bitcoin-*.zip build/out/bitcoin-*.exe build/out/src/bitcoin-*.tar.gz ../bitcoin-binaries/'+args.version, shell=True) if args.macos: print('\nCompiling ' + args.version + ' MacOS') subprocess.check_call(['bin/gbuild', '-j', args.jobs, '-m', args.memory, '--commit', 'bitcoin='+args.commit, '--url', 'bitcoin='+args.url, '../bitcoin/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-unsigned', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-osx.yml']) subprocess.check_call('mv build/out/bitcoin-*-osx-unsigned.tar.gz inputs/', shell=True) subprocess.check_call('mv build/out/bitcoin-*.tar.gz build/out/bitcoin-*.dmg build/out/src/bitcoin-*.tar.gz ../bitcoin-binaries/'+args.version, shell=True) os.chdir(workdir) if args.commit_files: print('\nCommitting '+args.version+' Unsigned Sigs\n') os.chdir('gitian.sigs') subprocess.check_call(['git', 'add', args.version+'-linux/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-win-unsigned/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-osx-unsigned/'+args.signer]) subprocess.check_call(['git', 'commit', '-m', 'Add '+args.version+' unsigned sigs for '+args.signer]) os.chdir(workdir) def sign(): global args, workdir os.chdir('gitian-builder') if args.windows: print('\nSigning ' + args.version + ' Windows') subprocess.check_call('cp inputs/bitcoin-' + args.version + '-win-unsigned.tar.gz inputs/bitcoin-win-unsigned.tar.gz', shell=True) subprocess.check_call(['bin/gbuild', '--skip-image', '--upgrade', '--commit', 'signature='+args.commit, '../bitcoin/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-win-signed', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-win-signer.yml']) subprocess.check_call('mv build/out/bitcoin-*win64-setup.exe ../bitcoin-binaries/'+args.version, shell=True) if args.macos: print('\nSigning ' + args.version + ' MacOS') subprocess.check_call('cp inputs/bitcoin-' + args.version + '-osx-unsigned.tar.gz inputs/bitcoin-osx-unsigned.tar.gz', shell=True) subprocess.check_call(['bin/gbuild', '--skip-image', '--upgrade', '--commit', 'signature='+args.commit, '../bitcoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call(['bin/gsign', '-p', args.sign_prog, '--signer', args.signer, '--release', args.version+'-osx-signed', '--destination', '../gitian.sigs/', '../bitcoin/contrib/gitian-descriptors/gitian-osx-signer.yml']) subprocess.check_call('mv build/out/bitcoin-osx-signed.dmg ../bitcoin-binaries/'+args.version+'/bitcoin-'+args.version+'-osx.dmg', shell=True) os.chdir(workdir) if args.commit_files: print('\nCommitting '+args.version+' Signed Sigs\n') os.chdir('gitian.sigs') subprocess.check_call(['git', 'add', args.version+'-win-signed/'+args.signer]) subprocess.check_call(['git', 'add', args.version+'-osx-signed/'+args.signer]) subprocess.check_call(['git', 'commit', '-a', '-m', 'Add '+args.version+' signed binary sigs for '+args.signer]) os.chdir(workdir) def verify(): global args, workdir rc = 0 os.chdir('gitian-builder') print('\nVerifying v'+args.version+' Linux\n') if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-linux', '../bitcoin/contrib/gitian-descriptors/gitian-linux.yml']): print('Verifying v'+args.version+' Linux FAILED\n') rc = 1 print('\nVerifying v'+args.version+' Windows\n') if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-win-unsigned', '../bitcoin/contrib/gitian-descriptors/gitian-win.yml']): print('Verifying v'+args.version+' Windows FAILED\n') rc = 1 print('\nVerifying v'+args.version+' MacOS\n') if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-osx-unsigned', '../bitcoin/contrib/gitian-descriptors/gitian-osx.yml']): print('Verifying v'+args.version+' MacOS FAILED\n') rc = 1 print('\nVerifying v'+args.version+' Signed Windows\n') if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-win-signed', '../bitcoin/contrib/gitian-descriptors/gitian-win-signer.yml']): print('Verifying v'+args.version+' Signed Windows FAILED\n') rc = 1 print('\nVerifying v'+args.version+' Signed MacOS\n') if subprocess.call(['bin/gverify', '-v', '-d', '../gitian.sigs/', '-r', args.version+'-osx-signed', '../bitcoin/contrib/gitian-descriptors/gitian-osx-signer.yml']): print('Verifying v'+args.version+' Signed MacOS FAILED\n') rc = 1 os.chdir(workdir) return rc def main(): global args, workdir parser = argparse.ArgumentParser(description='Script for running full Gitian builds.') parser.add_argument('-c', '--commit', action='store_true', dest='commit', help='Indicate that the version argument is for a commit or branch') parser.add_argument('-p', '--pull', action='store_true', dest='pull', help='Indicate that the version argument is the number of a github repository pull request') parser.add_argument('-u', '--url', dest='url', default='https://github.com/bitcoin/bitcoin', help='Specify the URL of the repository. Default is %(default)s') parser.add_argument('-v', '--verify', action='store_true', dest='verify', help='Verify the Gitian build') parser.add_argument('-b', '--build', action='store_true', dest='build', help='Do a Gitian build') parser.add_argument('-s', '--sign', action='store_true', dest='sign', help='Make signed binaries for Windows and MacOS') parser.add_argument('-B', '--buildsign', action='store_true', dest='buildsign', help='Build both signed and unsigned binaries') parser.add_argument('-o', '--os', dest='os', default='lwm', help='Specify which Operating Systems the build is for. Default is %(default)s. l for Linux, w for Windows, m for MacOS') parser.add_argument('-j', '--jobs', dest='jobs', default='2', help='Number of processes to use. Default %(default)s') parser.add_argument('-m', '--memory', dest='memory', default='2000', help='Memory to allocate in MiB. Default %(default)s') parser.add_argument('-k', '--kvm', action='store_true', dest='kvm', help='Use KVM instead of LXC') parser.add_argument('-d', '--docker', action='store_true', dest='docker', help='Use Docker instead of LXC') parser.add_argument('-S', '--setup', action='store_true', dest='setup', help='Set up the Gitian building environment. Only works on Debian-based systems (Ubuntu, Debian)') parser.add_argument('-D', '--detach-sign', action='store_true', dest='detach_sign', help='Create the assert file for detached signing. Will not commit anything.') parser.add_argument('-n', '--no-commit', action='store_false', dest='commit_files', help='Do not commit anything to git') parser.add_argument('signer', nargs='?', help='GPG signer to sign each build assert file') parser.add_argument('version', nargs='?', help='Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified') args = parser.parse_args() workdir = os.getcwd() args.is_bionic = b'bionic' in subprocess.check_output(['lsb_release', '-cs']) if args.kvm and args.docker: raise Exception('Error: cannot have both kvm and docker') # Ensure no more than one environment variable for gitian-builder (USE_LXC, USE_VBOX, USE_DOCKER) is set as they # can interfere (e.g., USE_LXC being set shadows USE_DOCKER; for details see gitian-builder/libexec/make-clean-vm). os.environ['USE_LXC'] = '' os.environ['USE_VBOX'] = '' os.environ['USE_DOCKER'] = '' if args.docker: os.environ['USE_DOCKER'] = '1' elif not args.kvm: os.environ['USE_LXC'] = '1' if 'GITIAN_HOST_IP' not in os.environ.keys(): os.environ['GITIAN_HOST_IP'] = '10.0.3.1' if 'LXC_GUEST_IP' not in os.environ.keys(): os.environ['LXC_GUEST_IP'] = '10.0.3.5' if args.setup: setup() if args.buildsign: args.build = True args.sign = True if not args.build and not args.sign and not args.verify: sys.exit(0) args.linux = 'l' in args.os args.windows = 'w' in args.os args.macos = 'm' in args.os # Disable for MacOS if no SDK found if args.macos and not os.path.isfile('gitian-builder/inputs/MacOSX10.11.sdk.tar.gz'): print('Cannot build for MacOS, SDK does not exist. Will build for other OSes') args.macos = False args.sign_prog = 'true' if args.detach_sign else 'gpg --detach-sign' script_name = os.path.basename(sys.argv[0]) if not args.signer: print(script_name+': Missing signer') print('Try '+script_name+' --help for more information') sys.exit(1) if not args.version: print(script_name+': Missing version') print('Try '+script_name+' --help for more information') sys.exit(1) # Add leading 'v' for tags if args.commit and args.pull: raise Exception('Cannot have both commit and pull') args.commit = ('' if args.commit else 'v') + args.version os.chdir('bitcoin') if args.pull: subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) os.chdir('../gitian-builder/inputs/bitcoin') subprocess.check_call(['git', 'fetch', args.url, 'refs/pull/'+args.version+'/merge']) args.commit = subprocess.check_output(['git', 'show', '-s', '--format=%H', 'FETCH_HEAD'], universal_newlines=True, encoding='utf8').strip() args.version = 'pull-' + args.version print(args.commit) subprocess.check_call(['git', 'fetch']) subprocess.check_call(['git', 'checkout', args.commit]) os.chdir(workdir) os.chdir('gitian-builder') subprocess.check_call(['git', 'pull']) os.chdir(workdir) if args.build: build() if args.sign: sign() if args.verify: os.chdir('gitian.sigs') subprocess.check_call(['git', 'pull']) os.chdir(workdir) sys.exit(verify()) if __name__ == '__main__': main()
tjps/bitcoin
contrib/gitian-build.py
Python
mit
14,454
""" .. module:: kmeans :synopsis: python wrapper for a basic c implementation of the k-means algorithm. .. moduleauthor:: Joe Cross <joe.mcross@gmail.com> """ import os import ctypes import random import sysconfig from ctypes import Structure, c_uint8, c_uint32, c_uint64, byref __all__ = ['kmeans'] # ==================================================== # Hook up c module HERE = os.path.dirname(os.path.realpath(__file__)) '''http://www.python.org/dev/peps/pep-3149/''' SUFFIX = sysconfig.get_config_var('SO') if not SUFFIX: # pragma: no cover SOABI = sysconfig.get_config_var('SOABI') SUFFIX = ".{}.so".format(SOABI) SO_PATH = os.path.join(HERE, 'lib' + SUFFIX) LIB = ctypes.CDLL(SO_PATH) # ==================================================== class Point(Structure): _fields_ = [ ('r', c_uint8), ('g', c_uint8), ('b', c_uint8), ('center', c_uint8), ('count', c_uint32) ] class Center(Structure): _fields_ = [ ('r', c_uint64), ('g', c_uint64), ('b', c_uint64), ('count', c_uint32) ] def _kmeans(points, k, centers, tolerance, max_iterations): if centers: if k != len(centers): raise ValueError( "Provided {} centers but k is {}".format(len(centers), k)) else: centers = random.sample(points, k) results = pcenters = (Center * k)() for i, center in enumerate(centers): (r, g, b), count = center pcenters[i] = Center(r=r, g=g, b=b, count=count) pcenters = byref(pcenters) # Generate points n = len(points) ppoints = (Point * n)() for i, point in enumerate(points): (r, g, b), count = point ppoints[i] = Point(r=r, g=g, b=b, center=-1, count=count) ppoints = byref(ppoints) # Compute centers LIB.kmeans(ppoints, n, pcenters, k, tolerance, max_iterations) # Translate return [[result.r, result.g, result.b] for result in results] def kmeans(points, k, centers=None, tolerance=1, max_iterations=0): """Return a list of *k* centers (means). Initial centers are optional. :param points: (values, weight) tuples to find centers of. value is a list of integer values. :type points: list :param k: number of centers to calculate :type k: int :param centers: initial centers, leave blank to randomly select :type centers: list :param tolerance: maximum delta to consider the centers stable :type tolerance: int :param max_iterations: maximum assign/update iterations. 0 to loop until tolerance is met. :type max_iterations: int :rtype: list """ return _kmeans(points=points, k=k, centers=centers, tolerance=tolerance, max_iterations=max_iterations)
numberoverzero/kmeans
kmeans/__init__.py
Python
mit
2,812
# The MIT License # # Copyright (c) 2010 Jeffrey Jenkins # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import print_function from ommongo.py3compat import * from functools import wraps from pymongo import ASCENDING, DESCENDING from copy import copy, deepcopy from ommongo.exceptions import BadValueException from ommongo.query_expression import QueryExpression, BadQueryException, flatten from ommongo.util import resolve_name class UpdateExpression(object): def __init__(self, query): self.query = query self.session = query.session self.update_data = {} self.__upsert = False self.__multi = False self.__safe = False def upsert(self): ''' If a document matching the query doesn't exist, create one ''' self.__upsert = True return self def multi(self): ''' Update multiple documents. The Mongo default is to only update the first matching document ''' self.__multi = True return self def safe(self, safe=True): """ Mark the query as a "safe" query with pymongo. :param safe: Defaults to True. Force "safe" on or off """ self.__safe = safe return self def set(self, *args, **kwargs): ''' Usage is either: set(self, qfield, value): Atomically set ``qfield`` to ``value`` OR set(key1=value1, key2=value2): Atomically set the named arguments on the current object to the values given. This form cannot update a sub-document ''' if len(args) == 2: qfield, value = args return self._atomic_op('$set', qfield, value) elif len(kwargs) != 0: ret = self for key, value in kwargs.items(): ret = ret._atomic_op('$set', key, value) return ret else: raise UpdateException('Invalid arguments for set. Requires either two positional arguments or at least one keyword argument') def unset(self, qfield): ''' Atomically delete the field ``qfield`` .. note:: Requires server version **>= 1.3.0+**. ''' # TODO: assert server version is >1.3.0 return self._atomic_generic_op('$unset', qfield, True) def inc(self, *args, **kwargs): ''' Atomically increment ``qfield`` by ``value`` ''' pairs = [] if len(args) == 1: pairs.append((args[0], 1)) elif len(args) == 2: pairs.append(args) elif len(kwargs) != 0: pairs.extend([(k, v) for k, v in kwargs.items()]) else: raise UpdateException('Invalid arguments for set. Requires either two positional arguments or at least one keyword argument') ret = self for qfield, value in pairs: ret = self._atomic_op('$inc', qfield, value) return ret def append(self, qfield, value): ''' Atomically append ``value`` to ``qfield``. The operation will if the field is not a list field''' return self._atomic_list_op('$push', qfield, value) def extend(self, qfield, *value): ''' Atomically append each value in ``value`` to the field ``qfield`` ''' return self._atomic_list_op_multivalue('$pushAll', qfield, *value) def remove(self, qfield, value): ''' Atomically remove ``value`` from ``qfield``''' if isinstance(value, QueryExpression): return self._atomic_expression_op('$pull', qfield, value) return self._atomic_list_op('$pull', qfield, value) def remove_all(self, qfield, *value): ''' Atomically remove each value in ``value`` from ``qfield``''' return self._atomic_list_op_multivalue('$pullAll', qfield, *value) def add_to_set(self, qfield, value): ''' Atomically add ``value`` to ``qfield``. The field represented by ``qfield`` must be a set .. note:: Requires server version **1.3.0+**. ''' # TODO: check version > 1.3.3 return self._atomic_list_op('$addToSet', qfield, value) def pop_last(self, qfield): ''' Atomically pop the last item in ``qfield.`` .. note:: Requires version **1.1+**''' return self._atomic_generic_op('$pop', qfield, 1) def pop_first(self, qfield): ''' Atomically pop the first item in ``qfield.`` .. note:: Requires version **1.1+**''' return self._atomic_generic_op('$pop', qfield, -1) def _atomic_list_op_multivalue(self, op, qfield, *value): qfield = resolve_name(self.query.type, qfield) if op not in qfield.valid_modifiers: raise InvalidModifierException(qfield, op) wrapped = [] for v in value: wrapped.append(qfield.get_type().item_type.wrap(v)) if op not in self.update_data: self.update_data[op] = {} self.update_data[op][qfield.get_absolute_name()] = value return self def _atomic_list_op(self, op, qfield, value): qfield = resolve_name(self.query.type, qfield) if op not in qfield.valid_modifiers: raise InvalidModifierException(qfield, op) if op not in self.update_data: self.update_data[op] = {} self.update_data[op][qfield.get_absolute_name()] = qfield.child_type().wrap(value) return self def _atomic_expression_op(self, op, qfield, value): qfield = resolve_name(self.query.type, qfield) if op not in qfield.valid_modifiers: raise InvalidModifierException(qfield, op) if op not in self.update_data: self.update_data[op] = {} self.update_data[op][qfield.get_absolute_name()] = flatten(value.obj) return self def _atomic_op(self, op, qfield, value): qfield = resolve_name(self.query.type, qfield) if op not in qfield.valid_modifiers: raise InvalidModifierException(qfield, op) if op not in self.update_data: self.update_data[op] = {} self.update_data[op][qfield.get_absolute_name()] = qfield.wrap(value) return self def _atomic_generic_op(self, op, qfield, value): qfield = resolve_name(self.query.type, qfield) if op not in qfield.valid_modifiers: raise InvalidModifierException(qfield, op) if op not in self.update_data: self.update_data[op] = {} self.update_data[op][qfield.get_absolute_name()] = value return self def _get_upsert(self): return self.__upsert def _get_multi(self): return self.__multi def execute(self): ''' Execute the update expression on the database ''' self.session.execute_update(self, safe=self.__safe) class FindAndModifyExpression(UpdateExpression): def __init__(self, query, new, remove): self.__new = new self.__remove = remove UpdateExpression.__init__(self, query) def _get_remove(self): return self.__remove def _get_new(self): return self.__new def execute(self): ''' Execute the find and modify expression on the database ''' return self.session.execute_find_and_modify(self) class UpdateException(Exception): ''' Base class for exceptions related to updates ''' pass class InvalidModifierException(UpdateException): ''' Exception raised if a modifier was used which isn't valid for a field ''' def __init__(self, field, op): UpdateException.__init__(self, 'Invalid modifier for %s field: %s' % (field.__class__.__name__, op)) class ConflictingModifierException(UpdateException): ''' Exception raised if conflicting modifiers are being used in the update expression ''' pass
bapakode/OmMongo
ommongo/update_expression.py
Python
mit
8,848
import errno import os import socket import sys import time import warnings import eventlet from eventlet.hubs import trampoline, notify_opened, IOClosed from eventlet.support import get_errno, six __all__ = [ 'GreenSocket', '_GLOBAL_DEFAULT_TIMEOUT', 'set_nonblocking', 'SOCKET_BLOCKING', 'SOCKET_CLOSED', 'CONNECT_ERR', 'CONNECT_SUCCESS', 'shutdown_safe', 'SSL', ] BUFFER_SIZE = 4096 CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK)) CONNECT_SUCCESS = set((0, errno.EISCONN)) if sys.platform[:3] == "win": CONNECT_ERR.add(errno.WSAEINVAL) # Bug 67 if six.PY2: _python2_fileobject = socket._fileobject _original_socket = eventlet.patcher.original('socket').socket def socket_connect(descriptor, address): """ Attempts to connect to the address, returns the descriptor if it succeeds, returns None if it needs to trampoline, and raises any exceptions. """ err = descriptor.connect_ex(address) if err in CONNECT_ERR: return None if err not in CONNECT_SUCCESS: raise socket.error(err, errno.errorcode[err]) return descriptor def socket_checkerr(descriptor): err = descriptor.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) if err not in CONNECT_SUCCESS: raise socket.error(err, errno.errorcode[err]) def socket_accept(descriptor): """ Attempts to accept() on the descriptor, returns a client,address tuple if it succeeds; returns None if it needs to trampoline, and raises any exceptions. """ try: return descriptor.accept() except socket.error as e: if get_errno(e) == errno.EWOULDBLOCK: return None raise if sys.platform[:3] == "win": # winsock sometimes throws ENOTCONN SOCKET_BLOCKING = set((errno.EAGAIN, errno.EWOULDBLOCK,)) SOCKET_CLOSED = set((errno.ECONNRESET, errno.ENOTCONN, errno.ESHUTDOWN)) else: # oddly, on linux/darwin, an unconnected socket is expected to block, # so we treat ENOTCONN the same as EWOULDBLOCK SOCKET_BLOCKING = set((errno.EAGAIN, errno.EWOULDBLOCK, errno.ENOTCONN)) SOCKET_CLOSED = set((errno.ECONNRESET, errno.ESHUTDOWN, errno.EPIPE)) def set_nonblocking(fd): """ Sets the descriptor to be nonblocking. Works on many file-like objects as well as sockets. Only sockets can be nonblocking on Windows, however. """ try: setblocking = fd.setblocking except AttributeError: # fd has no setblocking() method. It could be that this version of # Python predates socket.setblocking(). In that case, we can still set # the flag "by hand" on the underlying OS fileno using the fcntl # module. try: import fcntl except ImportError: # Whoops, Windows has no fcntl module. This might not be a socket # at all, but rather a file-like object with no setblocking() # method. In particular, on Windows, pipes don't support # non-blocking I/O and therefore don't have that method. Which # means fcntl wouldn't help even if we could load it. raise NotImplementedError("set_nonblocking() on a file object " "with no setblocking() method " "(Windows pipes don't support non-blocking I/O)") # We managed to import fcntl. fileno = fd.fileno() orig_flags = fcntl.fcntl(fileno, fcntl.F_GETFL) new_flags = orig_flags | os.O_NONBLOCK if new_flags != orig_flags: fcntl.fcntl(fileno, fcntl.F_SETFL, new_flags) else: # socket supports setblocking() setblocking(0) try: from socket import _GLOBAL_DEFAULT_TIMEOUT except ImportError: _GLOBAL_DEFAULT_TIMEOUT = object() class GreenSocket(object): """ Green version of socket.socket class, that is intended to be 100% API-compatible. It also recognizes the keyword parameter, 'set_nonblocking=True'. Pass False to indicate that socket is already in non-blocking mode to save syscalls. """ # This placeholder is to prevent __getattr__ from creating an infinite call loop fd = None def __init__(self, family_or_realsock=socket.AF_INET, *args, **kwargs): should_set_nonblocking = kwargs.pop('set_nonblocking', True) if isinstance(family_or_realsock, six.integer_types): fd = _original_socket(family_or_realsock, *args, **kwargs) # Notify the hub that this is a newly-opened socket. notify_opened(fd.fileno()) else: fd = family_or_realsock # import timeout from other socket, if it was there try: self._timeout = fd.gettimeout() or socket.getdefaulttimeout() except AttributeError: self._timeout = socket.getdefaulttimeout() if should_set_nonblocking: set_nonblocking(fd) self.fd = fd # when client calls setblocking(0) or settimeout(0) the socket must # act non-blocking self.act_non_blocking = False # Copy some attributes from underlying real socket. # This is the easiest way that i found to fix # https://bitbucket.org/eventlet/eventlet/issue/136 # Only `getsockopt` is required to fix that issue, others # are just premature optimization to save __getattr__ call. self.bind = fd.bind self.close = fd.close self.fileno = fd.fileno self.getsockname = fd.getsockname self.getsockopt = fd.getsockopt self.listen = fd.listen self.setsockopt = fd.setsockopt self.shutdown = fd.shutdown self._closed = False @property def _sock(self): return self if six.PY3: def _get_io_refs(self): return self.fd._io_refs def _set_io_refs(self, value): self.fd._io_refs = value _io_refs = property(_get_io_refs, _set_io_refs) # Forward unknown attributes to fd, cache the value for future use. # I do not see any simple attribute which could be changed # so caching everything in self is fine. # If we find such attributes - only attributes having __get__ might be cached. # For now - I do not want to complicate it. def __getattr__(self, name): if self.fd is None: raise AttributeError(name) attr = getattr(self.fd, name) setattr(self, name, attr) return attr def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None): """ We need to trampoline via the event hub. We catch any signal back from the hub indicating that the operation we were waiting on was associated with a filehandle that's since been invalidated. """ if self._closed: # If we did any logging, alerting to a second trampoline attempt on a closed # socket here would be useful. raise IOClosed() try: return trampoline(fd, read=read, write=write, timeout=timeout, timeout_exc=timeout_exc, mark_as_closed=self._mark_as_closed) except IOClosed: # This socket's been obsoleted. De-fang it. self._mark_as_closed() raise def accept(self): if self.act_non_blocking: return self.fd.accept() fd = self.fd while True: res = socket_accept(fd) if res is not None: client, addr = res set_nonblocking(client) return type(self)(client), addr self._trampoline(fd, read=True, timeout=self.gettimeout(), timeout_exc=socket.timeout("timed out")) def _mark_as_closed(self): """ Mark this socket as being closed """ self._closed = True def __del__(self): # This is in case self.close is not assigned yet (currently the constructor does it) close = getattr(self, 'close', None) if close is not None: close() def connect(self, address): if self.act_non_blocking: return self.fd.connect(address) fd = self.fd if self.gettimeout() is None: while not socket_connect(fd, address): try: self._trampoline(fd, write=True) except IOClosed: raise socket.error(errno.EBADFD) socket_checkerr(fd) else: end = time.time() + self.gettimeout() while True: if socket_connect(fd, address): return if time.time() >= end: raise socket.timeout("timed out") try: self._trampoline(fd, write=True, timeout=end - time.time(), timeout_exc=socket.timeout("timed out")) except IOClosed: # ... we need some workable errno here. raise socket.error(errno.EBADFD) socket_checkerr(fd) def connect_ex(self, address): if self.act_non_blocking: return self.fd.connect_ex(address) fd = self.fd if self.gettimeout() is None: while not socket_connect(fd, address): try: self._trampoline(fd, write=True) socket_checkerr(fd) except socket.error as ex: return get_errno(ex) except IOClosed: return errno.EBADFD else: end = time.time() + self.gettimeout() while True: try: if socket_connect(fd, address): return 0 if time.time() >= end: raise socket.timeout(errno.EAGAIN) self._trampoline(fd, write=True, timeout=end - time.time(), timeout_exc=socket.timeout(errno.EAGAIN)) socket_checkerr(fd) except socket.error as ex: return get_errno(ex) except IOClosed: return errno.EBADFD def dup(self, *args, **kw): sock = self.fd.dup(*args, **kw) newsock = type(self)(sock, set_nonblocking=False) newsock.settimeout(self.gettimeout()) return newsock if six.PY3: def makefile(self, *args, **kwargs): return _original_socket.makefile(self, *args, **kwargs) else: def makefile(self, *args, **kwargs): dupped = self.dup() res = _python2_fileobject(dupped, *args, **kwargs) if hasattr(dupped, "_drop"): dupped._drop() return res def makeGreenFile(self, *args, **kw): warnings.warn("makeGreenFile has been deprecated, please use " "makefile instead", DeprecationWarning, stacklevel=2) return self.makefile(*args, **kw) def _read_trampoline(self): self._trampoline( self.fd, read=True, timeout=self.gettimeout(), timeout_exc=socket.timeout("timed out")) def _recv_loop(self, recv_meth, *args): fd = self.fd if self.act_non_blocking: return recv_meth(*args) while True: try: # recv: bufsize=0? # recv_into: buffer is empty? # This is needed because behind the scenes we use sockets in # nonblocking mode and builtin recv* methods. Attempting to read # 0 bytes from a nonblocking socket using a builtin recv* method # does not raise a timeout exception. Since we're simulating # a blocking socket here we need to produce a timeout exception # if needed, hence the call to trampoline. if not args[0]: self._read_trampoline() return recv_meth(*args) except socket.error as e: if get_errno(e) in SOCKET_BLOCKING: pass elif get_errno(e) in SOCKET_CLOSED: return b'' else: raise try: self._read_trampoline() except IOClosed as e: # Perhaps we should return '' instead? raise EOFError() def recv(self, bufsize, flags=0): return self._recv_loop(self.fd.recv, bufsize, flags) def recvfrom(self, bufsize, flags=0): return self._recv_loop(self.fd.recvfrom, bufsize, flags) def recv_into(self, buffer, nbytes=0, flags=0): return self._recv_loop(self.fd.recv_into, buffer, nbytes, flags) def recvfrom_into(self, buffer, nbytes=0, flags=0): return self._recv_loop(self.fd.recvfrom_into, buffer, nbytes, flags) def _send_loop(self, send_method, data, *args): if self.act_non_blocking: return send_method(data, *args) while 1: try: return send_method(data, *args) except socket.error as e: eno = get_errno(e) if eno == errno.ENOTCONN or eno not in SOCKET_BLOCKING: raise try: self._trampoline(self.fd, write=True, timeout=self.gettimeout(), timeout_exc=socket.timeout("timed out")) except IOClosed: raise socket.error(errno.ECONNRESET, 'Connection closed by another thread') def send(self, data, flags=0): return self._send_loop(self.fd.send, data, flags) def sendto(self, data, *args): return self._send_loop(self.fd.sendto, data, *args) def sendall(self, data, flags=0): tail = self.send(data, flags) len_data = len(data) while tail < len_data: tail += self.send(data[tail:], flags) def setblocking(self, flag): if flag: self.act_non_blocking = False self._timeout = None else: self.act_non_blocking = True self._timeout = 0.0 def settimeout(self, howlong): if howlong is None or howlong == _GLOBAL_DEFAULT_TIMEOUT: self.setblocking(True) return try: f = howlong.__float__ except AttributeError: raise TypeError('a float is required') howlong = f() if howlong < 0.0: raise ValueError('Timeout value out of range') if howlong == 0.0: self.act_non_blocking = True self._timeout = 0.0 else: self.act_non_blocking = False self._timeout = howlong def gettimeout(self): return self._timeout if "__pypy__" in sys.builtin_module_names: def _reuse(self): getattr(self.fd, '_sock', self.fd)._reuse() def _drop(self): getattr(self.fd, '_sock', self.fd)._drop() def _operation_on_closed_file(*args, **kwargs): raise ValueError("I/O operation on closed file") greenpipe_doc = """ GreenPipe is a cooperative replacement for file class. It will cooperate on pipes. It will block on regular file. Differneces from file class: - mode is r/w property. Should re r/o - encoding property not implemented - write/writelines will not raise TypeError exception when non-string data is written it will write str(data) instead - Universal new lines are not supported and newlines property not implementeded - file argument can be descriptor, file name or file object. """ # import SSL module here so we can refer to greenio.SSL.exceptionclass try: from OpenSSL import SSL except ImportError: # pyOpenSSL not installed, define exceptions anyway for convenience class SSL(object): class WantWriteError(Exception): pass class WantReadError(Exception): pass class ZeroReturnError(Exception): pass class SysCallError(Exception): pass def shutdown_safe(sock): """ Shuts down the socket. This is a convenience method for code that wants to gracefully handle regular sockets, SSL.Connection sockets from PyOpenSSL and ssl.SSLSocket objects from Python 2.6 interchangeably. Both types of ssl socket require a shutdown() before close, but they have different arity on their shutdown method. Regular sockets don't need a shutdown before close, but it doesn't hurt. """ try: try: # socket, ssl.SSLSocket return sock.shutdown(socket.SHUT_RDWR) except TypeError: # SSL.Connection return sock.shutdown() except socket.error as e: # we don't care if the socket is already closed; # this will often be the case in an http server context if get_errno(e) not in (errno.ENOTCONN, errno.EBADF): raise
collinstocks/eventlet
eventlet/greenio/base.py
Python
mit
17,181
from django.contrib import admin from .models import * class CaseAdmin(admin.ModelAdmin): list_display = ('name', 'firm', 'owner', 'active', 'created',) search_fields = ['owner__first_name', 'owner__email', 'firm__name', 'name'] admin.site.register(Move) admin.site.register(Case, CaseAdmin)
Maachi/Gestion
cases/admin.py
Python
mit
301
def foo(): pass class TestProcessor(unittest.TestCase): def test_does_something(self): pass def test_something_else(self): pass
bebraw/speccer
tests/testcases/expected_hoisting.py
Python
mit
159
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'gui/species_prompt.ui' # # Created by: PyQt5 UI code generator 5.5.1 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_SpeciesPrompt(object): def setupUi(self, SpeciesPrompt): SpeciesPrompt.setObjectName("SpeciesPrompt") SpeciesPrompt.resize(325, 132) SpeciesPrompt.setFocusPolicy(QtCore.Qt.StrongFocus) self.centralwidget = QtWidgets.QWidget(SpeciesPrompt) self.centralwidget.setObjectName("centralwidget") self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget) self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 321, 122)) self.verticalLayoutWidget.setObjectName("verticalLayoutWidget") self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget) self.verticalLayout.setObjectName("verticalLayout") self.gridLayout = QtWidgets.QGridLayout() self.gridLayout.setObjectName("gridLayout") self.label = QtWidgets.QLabel(self.verticalLayoutWidget) self.label.setObjectName("label") self.gridLayout.addWidget(self.label, 3, 0, 1, 1) self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") self.species_label = QtWidgets.QLabel(self.verticalLayoutWidget) self.species_label.setObjectName("species_label") self.horizontalLayout.addWidget(self.species_label) self.gridLayout.addLayout(self.horizontalLayout, 2, 0, 1, 1) self.dataset_name = QtWidgets.QLineEdit(self.verticalLayoutWidget) self.dataset_name.setObjectName("dataset_name") self.gridLayout.addWidget(self.dataset_name, 3, 1, 1, 1, QtCore.Qt.AlignHCenter) self.species_selection = QtWidgets.QComboBox(self.verticalLayoutWidget) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.species_selection.sizePolicy().hasHeightForWidth()) self.species_selection.setSizePolicy(sizePolicy) self.species_selection.setObjectName("species_selection") self.gridLayout.addWidget(self.species_selection, 2, 1, 1, 1, QtCore.Qt.AlignHCenter) self.verticalLayout.addLayout(self.gridLayout) self.horizontalLayout_9 = QtWidgets.QHBoxLayout() self.horizontalLayout_9.setObjectName("horizontalLayout_9") spacerItem = QtWidgets.QSpacerItem(40, 10, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_9.addItem(spacerItem) self.apply_button = QtWidgets.QPushButton(self.verticalLayoutWidget) self.apply_button.setDefault(True) self.apply_button.setObjectName("apply_button") self.horizontalLayout_9.addWidget(self.apply_button) self.verticalLayout.addLayout(self.horizontalLayout_9) SpeciesPrompt.setCentralWidget(self.centralwidget) self.retranslateUi(SpeciesPrompt) QtCore.QMetaObject.connectSlotsByName(SpeciesPrompt) def retranslateUi(self, SpeciesPrompt): _translate = QtCore.QCoreApplication.translate SpeciesPrompt.setWindowTitle(_translate("SpeciesPrompt", "Species Prompt")) self.label.setText(_translate("SpeciesPrompt", "Dataset Name:")) self.species_label.setText(_translate("SpeciesPrompt", "Ion Species:")) self.apply_button.setText(_translate("SpeciesPrompt", "Apply"))
DanielWinklehner/py_particle_processor
py_particle_processor_qt/gui/species_prompt.py
Python
mit
3,621
#!/usr/bin/env python import unittest from src.lib import wsplit class TestUrlWord(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_url_split(self): url = "http://example.com/foo345-/43/23-1/bar" words = ["foo", "bar"] clean_url = wsplit.url_split(url) self.assertEqual(words[0], clean_url[0]) self.assertEqual(words[1], clean_url[1]) if __name__ == '__main__': unittest.main()
mad01/hermit
tests/test_url_words.py
Python
mit
484
''' Created on 04.04.2011 @author: michi ''' from sqlalchemy import Table, select, Column from sqlalchemy.sql.expression import _UnaryExpression,Alias from sqlalchemy.sql.operators import asc_op,desc_op import sqlalchemy.schema from sqlalchemy.sql import func class FromCalculator(object): def __init__(self, fromConstruct): self._fromConstruct = fromConstruct def getFromObj(self, columnList=[], where=None, orderBy=None): return self._fromConstruct def getFromConstruct(self): return self._fromConstruct def setFromConstruct(self, fromConstruct): self._fromConstruct = fromConstruct class FromCalculatorMinimal(FromCalculator): def __init__(self, mainTable): self._mainTable = mainTable self._joins = {} def addJoin(self,tableOrAlias,onClause=None,dependency=None,name=None): if name is None: name = tableOrAlias.description self._joins[name] = {'table':tableOrAlias, 'onClause':onClause, 'dependency':dependency} @property def joins(self): return self._joins @property def mainTable(self): return self._mainTable def _searchExpressionTables(self, exp, tables): if isinstance(exp, Column): colName = str(exp) tables.append(colName.split(".")[0]) elif hasattr(exp, 'get_children'): for child in exp.get_children(): if isinstance(child, Column): colName = str(child) tables.append(colName.split(".")[0]) else: self._searchExpressionTables(child, tables) def getCompleteFromObj(self): completeColumnList = [] for name in self._joins: for col in self._joins[name]['table'].c: completeColumnList.append(col) return self.getFromObj(completeColumnList, None, None) def getFromObj(self, columnList=[], where=None, orderBy=None): neededTableNames = [] for c in columnList: tableName = unicode(c).split('.')[0] if tableName not in neededTableNames: neededTableNames.append(tableName) whereTables = [] self._searchExpressionTables(where, whereTables) for tableName in whereTables: if tableName not in neededTableNames: neededTableNames.append(tableName) orderByTables = [] if orderBy is not None: for ob in orderBy: self._searchExpressionTables(ob, orderByTables) for tableName in orderByTables: if tableName not in neededTableNames: neededTableNames.append(tableName) fromObj = self._mainTable addedJoins = [] for tableName in neededTableNames: if tableName != self._mainTable: fromObj = self._addJoin(fromObj, tableName, addedJoins) return fromObj def _addJoin(self, fromObj, tableName, addedJoins): if tableName in addedJoins: return fromObj if self._joins.has_key(tableName): if self._joins[tableName]['dependency'] is not None: fromObj = self._addJoin(fromObj, self._joins[tableName]['dependency'], addedJoins) if self._joins[tableName]['onClause'] is None: fromObj = fromObj.join(self._joins[tableName]['table']) addedJoins.append(tableName) else: fromObj = fromObj.join(self._joins[tableName]['table'], onclause=self._joins[tableName]['onClause']) addedJoins.append(tableName) return fromObj class QueryBuilder(object): def __init__(self, fromCalculator=None, fromObj=None, where=None, orderBy=None, currentColumnlist=[], possibleColumns=None, dirtyListener=None): self.__fromObj = fromObj self.__dirty = True self.__currentColumnList = currentColumnlist self.__possibleColumns = possibleColumns self.__where = where self._dirtyListener = dirtyListener if orderBy is not None: self.orderBy = orderBy else: self.__orderBy = orderBy self.__query = None if fromCalculator is None: if fromObj is None: raise TypeError("Either assign fromObj or FromCalculator") self.__fromCalculator = FromCalculator(fromObj) else: self.__fromCalculator = fromCalculator def setDirtyListener(self, clbl): self._dirtyListener = clbl def setFromObj(self, fromObj): self.__fromCalculator.setFromConstruct(fromObj) self.__fromObj = fromObj def getFromObj(self): # return self.__fromObj return self.__fromCalculator.getFromObj(self.currentColumnList, self.where,self.orderBy) fromObj = property(getFromObj,setFromObj,None,"Set from Part of query") def getWhere(self): return self.__where def setWhere(self,whereCondition): self.__where = whereCondition self._setDirty() def delWhere(self): self.__where = None self._setDirty() where = property(getWhere,setWhere,delWhere,"Set where part of tplQuery") def getOrderBy(self): return self.__orderBy def setOrderBy(self, orderBy): if isinstance(orderBy,tuple): self.__orderBy = orderBy else: self.__orderBy = (orderBy,) self._setDirty() def delOrderBy(self): self.__orderBy = None self._setDirty() orderBy = property(getOrderBy, setOrderBy, delOrderBy, "Set order_by part of tplQuery") def getPossibleColumns(self): if self.__possibleColumns is None: self.__possibleColumns = self.getCompleteColumnList() return self.__possibleColumns def setPossibleColumns(self,columns): self.__possibleColumns = columns possibleColumns = property(getPossibleColumns,setPossibleColumns,None,"") def getCompleteColumnList(self): completeColumnList = [] for fromCond in select(from_obj=self.__fromObj).locate_all_froms(): if isinstance(fromCond, Table): for column in fromCond.columns: completeColumnList.append(column) elif isinstance(fromCond,Alias): if isinstance(fromCond.original,Table): for column in fromCond.c: completeColumnList.append(column) return completeColumnList completeColumnList = property(getCompleteColumnList,None,None,'') def getCurrentColumnList(self): if not len(self.__currentColumnList): return self.possibleColumns return self.__currentColumnList def setCurrentColumnList(self, cList): self.__currentColumnList = cList self._setDirty() def delCurrentColumnList(self): self.__currentColumnList = [] self._setDirty() currentColumnList = property(getCurrentColumnList,setCurrentColumnList, delCurrentColumnList,"Set current columnlist") def _setDirty(self): self.__dirty = True if self._dirtyListener is not None: self._dirtyListener() @property def dirty(self): return self.__dirty def getQuery(self): if self.__dirty: query = select(from_obj=self.getFromObj()).with_only_columns(self.currentColumnList) if self.__where is not None: query = query.where(self.__where) if self.__orderBy is not None: query = query.order_by(*self.__orderBy) self.__query = query self.__dirty = False return self.__query
mtils/ems
ems/model/alchemy/querybuilder.py
Python
mit
8,236
#!/usr/bin/python import struct, array, fcntl class struxx: _fields = None _format = None _buffer = None def __init__(self): self.reset() def __len__(self): """binary represntation length, for fields, use __dict__ or something""" return struct.calcsize(self._format) def __iter__(self): return [getattr(self, field) for field in self._fields.split(";")].__iter__() def reset(self): for field in self._fields.split(";"): setattr(self, field, 0) self._buffer = array.array('B', [0]*len(self)) def pack(self): self._buffer = array.array('B', struct.pack(self._format, *self)) def unpack(self): rv = struct.unpack(self._format, self._buffer) for i in range(len(rv)): setattr(self, self._fields.split(";")[i], rv[i]) def ioctl(self, fd, ioctlno): self.pack() rv = fcntl.ioctl(fd, ioctlno, self._buffer, True) self.unpack() return rv class uint(struxx): _fields = "uint" _format = "I" def get_version(self, fd): return self.ioctl(fd, HIDIOCGVERSION) def get_flags(self, fd): return self.ioctl(fd, HIDIOCGFLAG) def set_flags(self, fd): return self.ioctl(fd, HIDIOCSFLAG) class hiddev_devinfo(struxx): _fields = "bustype;busnum;devnum;ifnum;vendor;product;version;num_applications" _format = "IIIIhhhI" def get(self, fd): return self.ioctl(fd, HIDIOCGDEVINFO) class hiddev_string_descriptor(struxx): _fields = "index;value" _format = "i256c" def reset(self): self.index = 0 self.value = '\0'*256 def pack(self): tmp = struct.pack("i", self.index) + self.value[:256].ljust(256, '\0') self._buffer = array.array('B', tmp) def unpack(self): self.index = struct.unpack("i", self._buffer[:4]) self.value = self._buffer[4:].tostring() def get_string(self, fd, idx): self.index = idx return self.ioctl(fd, HIDIOCGSTRING) class hiddev_report_info(struxx): _fields = "report_type;report_id;num_fields" _format = "III" def get_info(self, fd): return self.ioctl(fd, HIDIOCGREPORTINFO) class hiddev_field_info(struxx): _fields = "report_type;report_id;field_index;maxusage;flags;physical;logical;application;logical_minimum;logical_maximum;physical_minimum;physical_maximum;unit_exponent;unit" _format = "I"*8+"i"*4+"II" def get_info(self, fd): return self.ioctl(fd, HIDIOCGFIELDINFO) class hiddev_usage_ref(struxx): _fields = "report_type;report_id;field_index;usage_index;usage_code;value" _format = "I"*5+"i" class hiddev_collection_info(struxx): _fields = "index;type;usage;level" _format = "I"*4 def get_info(self, fd, index): self.index = index return self.ioctl(fd, HIDIOCGCOLLECTIONINFO) class hiddev_event(struxx): _fields = "hid;value" _format = "Hi" IOCPARM_MASK = 0x7f IOC_NONE = 0x20000000 IOC_WRITE = 0x40000000 IOC_READ = 0x80000000 def FIX(x): return struct.unpack("i", struct.pack("I", x))[0] def _IO(x,y): return FIX(IOC_NONE|(ord(x)<<8)|y) def _IOR(x,y,t): return FIX(IOC_READ|((t&IOCPARM_MASK)<<16)|(ord(x)<<8)|y) def _IOW(x,y,t): return FIX(IOC_WRITE|((t&IOCPARM_MASK)<<16)|(ord(x)<<8)|y) def _IOWR(x,y,t): return FIX(IOC_READ|IOC_WRITE|((t&IOCPARM_MASK)<<16)|(ord(x)<<8)|y) HIDIOCGVERSION =_IOR('H', 0x01, struct.calcsize("I")) HIDIOCAPPLICATION =_IO('H', 0x02) HIDIOCGDEVINFO =_IOR('H', 0x03, len(hiddev_devinfo())) HIDIOCGSTRING =_IOR('H', 0x04, len(hiddev_string_descriptor())) HIDIOCINITREPORT =_IO('H', 0x05) def HIDIOCGNAME(buflen): return _IOR('H', 0x06, buflen) HIDIOCGREPORT =_IOW('H', 0x07, len(hiddev_report_info())) HIDIOCSREPORT =_IOW('H', 0x08, len(hiddev_report_info())) HIDIOCGREPORTINFO =_IOWR('H', 0x09, len(hiddev_report_info())) HIDIOCGFIELDINFO =_IOWR('H', 0x0A, len(hiddev_field_info())) HIDIOCGUSAGE =_IOWR('H', 0x0B, len(hiddev_usage_ref())) HIDIOCSUSAGE =_IOW('H', 0x0C, len(hiddev_usage_ref())) HIDIOCGUCODE =_IOWR('H', 0x0D, len(hiddev_usage_ref())) HIDIOCGFLAG =_IOR('H', 0x0E, struct.calcsize("I")) HIDIOCSFLAG =_IOW('H', 0x0F, struct.calcsize("I")) HIDIOCGCOLLECTIONINDEX =_IOW('H', 0x10, len(hiddev_usage_ref())) HIDIOCGCOLLECTIONINFO =_IOWR('H', 0x11, len(hiddev_collection_info())) def HIDIOCGPHYS(buflen): return _IOR('H', 0x12, buflen) HID_REPORT_TYPE_INPUT =1 HID_REPORT_TYPE_OUTPUT =2 HID_REPORT_TYPE_FEATURE =3 HID_REPORT_TYPE_MIN =1 HID_REPORT_TYPE_MAX =3 HID_REPORT_ID_UNKNOWN =0xffffffff HID_REPORT_ID_FIRST =0x00000100 HID_REPORT_ID_NEXT =0x00000200 HID_REPORT_ID_MASK =0x000000ff HID_REPORT_ID_MAX =0x000000ff def enum_reports(fd): for report_type in (HID_REPORT_TYPE_INPUT, HID_REPORT_TYPE_OUTPUT, HID_REPORT_TYPE_FEATURE): for i in range(HID_REPORT_ID_MAX+1): try: ri = hiddev_report_info() ri.report_type = report_type ri.report_id = i #print "trying", ri.__dict__ ri.get_info(fd) print "%s(%s): %s fields" % ({1: 'input', 2:'output', 3:'feature'}.get(ri.report_type), ri.report_id, ri.num_fields) for field in range(ri.num_fields): fi = hiddev_field_info() fi.report_type = ri.report_type fi.report_id = ri.report_id fi.field_index = field fi.get_info(fd) print ", ".join(["%s:%s" % (key, fi.__dict__[key]) for key in fi.__dict__ if key not in ("report_type", "report_id", "_buffer") and fi.__dict__[key] ]) #print report_info.__dict__ print except IOError: pass if __name__=="__main__": # name = "" # for name in globals(): # if name.startswith("HID"): # if type(globals()[name]) == int: # print name, "\t%x" % globals()[name] f = open("/dev/usb/hiddev0", "r") tmp = uint() tmp.get_version(f) print "version 0x%x" % tmp.uint tmp.get_flags(f) print "flags 0x%x" % tmp.uint tmp.uint = 3 tmp.set_flags(f) tmp.get_flags(f) print "flags 0x%x" % tmp.uint devinfo = hiddev_devinfo() devinfo.get(f) print "devinfo", devinfo.__dict__ enum_reports(f) def get_device_name(f): a = array.array('B', [0]*1024) fcntl.ioctl(f, HIDIOCGNAME(1024), a, True) print a def get_some_strings(f): for i in range(-10000, 10000): try: string = hiddev_string_descriptor() string.get_string(f, i) print "string %s: %s", string.index, repr(string.value) except IOError: pass def show_all_collections(f): for i in range(256): try: collection_info = hiddev_collection_info() collection_info.get_info(f, i) print "coll %s" % i, collection_info.__dict__ print """ idnex: %(index)s type: %(type)s level: %(level)s usage: 0x%(usage)x""" % collection_info.__dict__ except IOError: pass
ActiveState/code
recipes/Python/576834_Interrogating_linux_devusbhiddev0/recipe-576834.py
Python
mit
6,781
# Copyright (c) 2011-2013, Alexander Kulakov # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import with_statement import six import sys import logging from collections import namedtuple from tempfile import SpooledTemporaryFile from zope.interface import implementer from gevent import socket from gevent.event import Event from .interfaces import IConnection from .const import ( FCGI_VERSION, FCGI_STDIN, FCGI_STDOUT, FCGI_STDERR, FCGI_DATA, FCGI_NULL_REQUEST_ID, FCGI_RECORD_HEADER_LEN, FCGI_RECORD_TYPES, FCGI_MAX_CONTENT_LEN, ) from .utils import pack_header, unpack_header if sys.version_info > (3,): buffer = memoryview __all__ = ( 'PartialRead', 'BufferedReader', 'Record', 'Connection', 'InputStream', 'StdoutStream', 'StderrStream', ) logger = logging.getLogger(__name__) class PartialRead(Exception): """ Raised by buffered_reader when it fails to read requested length of data """ def __init__(self, requested_size, partial_data): super(PartialRead, self).__init__( 'Expected {0} but received {1} bytes only'.format( requested_size, len(partial_data))) self.requested_size = requested_size self.partial_data = partial_data class BufferedReader(object): """ Allows to receive data in large chunks """ def __init__(self, read_callable, buffer_size): self._reader = self._reader_generator(read_callable, buffer_size) next(self._reader) # advance generator to first yield statement def read_bytes(self, max_len): return self._reader.send(max_len) @staticmethod def _reader_generator(read, buf_size): buf = b'' blen = 0 chunks = [] size = (yield) while True: if blen >= size: data, buf = buf[:size], buf[size:] blen -= size else: while blen < size: chunks.append(buf) buf = read( (size - blen + buf_size - 1) // buf_size * buf_size) if not buf: raise PartialRead(size, b''.join(chunks)) blen += len(buf) blen -= size if blen: chunks.append(buf[:-blen]) buf = buf[-blen:] else: chunks.append(buf) buf = b'' data = b''.join(chunks) chunks = [] size = (yield data) class Record(namedtuple('Record', ('type', 'content', 'request_id'))): def __str__(self): return '<Record {0}, req id {1}, {2} bytes>'.format( FCGI_RECORD_TYPES.get(self.type, self.type), self.request_id, len(self.content)) @implementer(IConnection) class Connection(object): def __init__(self, sock, buffer_size=4096): self._sock = sock self.buffered_reader = BufferedReader(sock.recv, buffer_size) def write_record(self, record): send = self._sock.send content_len = len(record.content) if content_len > FCGI_MAX_CONTENT_LEN: raise ValueError('Record content length exceeds {0}'.format( FCGI_MAX_CONTENT_LEN)) header = pack_header( FCGI_VERSION, record.type, record.request_id, content_len, 0) for buf, length in ( (header, FCGI_RECORD_HEADER_LEN), (record.content, content_len), ): if isinstance(buf, six.text_type): buf = buf.encode("ISO-8859-1") sent = 0 while sent < length: sent += send(buffer(buf[sent:])) def read_record(self): read_bytes = self.buffered_reader.read_bytes try: header = read_bytes(FCGI_RECORD_HEADER_LEN) except PartialRead as x: if x.partial_data: logger.exception('Partial header received: {0}'.format(x)) raise # Remote side closed connection after sending all records logger.debug('Connection closed by peer') return None except StopIteration: # Connection closed unexpectedly logger.debug('Connection closed by peer') return None version, record_type, request_id, content_len, padding = ( unpack_header(header)) if content_len: content = read_bytes(content_len) else: content = '' if padding: # pragma: no cover read_bytes(padding) if isinstance(content, six.text_type): content = content.encode("ISO-8859-1") return Record(record_type, content, request_id) def __iter__(self): return iter(self.read_record, None) def close(self): if self._sock: self._sock.close() self._sock = None def done_writing(self): self._sock.shutdown(socket.SHUT_WR) class InputStream(object): """ FCGI_STDIN or FCGI_DATA stream. Uses temporary file to store received data once max_mem bytes have been received. """ def __init__(self, max_mem=1024): self._file = SpooledTemporaryFile(max_mem) self._eof_received = Event() def __del__(self): self._file.close() def feed(self, data): if self._eof_received.is_set(): raise IOError('Feeding file beyond EOF mark') if not data: # EOF mark self._file.seek(0) self._eof_received.set() else: if isinstance(data, six.text_type): data = data.encode("ISO-8859-1") self._file.write(data) def __iter__(self): self._eof_received.wait() return iter(self._file) def read(self, size=-1): self._eof_received.wait() return self._file.read(size) def readline(self, size=-1): self._eof_received.wait() return self._file.readline(size) def readlines(self, sizehint=0): self._eof_received.wait() return self._file.readlines(sizehint) @property def eof_received(self): return self._eof_received.is_set() class OutputStream(object): """ FCGI_STDOUT or FCGI_STDERR stream. """ def __init__(self, conn, request_id): self.conn = conn self.request_id = request_id self.closed = False def write(self, data): if self.closed: raise IOError('Writing to closed stream {0}'.format(self)) if not data: return write_record = self.conn.write_record record_type = self.record_type request_id = self.request_id size = len(data) if size <= FCGI_MAX_CONTENT_LEN: record = Record(record_type, data, request_id) write_record(record) else: data = buffer(data) sent = 0 while sent < size: record = Record(record_type, data[sent:sent + FCGI_MAX_CONTENT_LEN], request_id) write_record(record) sent += FCGI_MAX_CONTENT_LEN def writelines(self, lines): if self.closed: raise IOError('Writing to closed stream {0}'.format(self)) write_record = self.conn.write_record record_type = self.record_type request_id = self.request_id buf = [] remainder = FCGI_MAX_CONTENT_LEN for line in lines: if not line: # skip empty lines continue line_len = len(line) if isinstance(line, six.text_type): line = line.encode("ISO-8859-1") if line_len >= remainder: buf.append(line[:remainder]) record = Record(record_type, b''.join(buf), request_id) write_record(record) buf = [line[remainder:]] remainder = FCGI_MAX_CONTENT_LEN else: buf.append(line) remainder -= line_len if buf: record = Record(record_type, b''.join(buf), request_id) write_record(record) def flush(self): pass def close(self): if not self.closed: self.closed = True self.conn.write_record( Record(self.record_type, b'', self.request_id)) class StdoutStream(OutputStream): record_type = FCGI_STDOUT def writelines(self, lines): # WSGI server must not buffer application iterable if isinstance(lines, (list, tuple)): # ...unless we have all output readily available OutputStream.writelines(self, lines) else: if self.closed: raise IOError('Writing to closed stream {0}'.format(self)) write_record = self.conn.write_record record_type = self.record_type request_id = self.request_id for line in lines: if line: record = Record(record_type, line, request_id) write_record(record) class StderrStream(OutputStream): record_type = FCGI_STDERR
momyc/gevent-fastcgi
gevent_fastcgi/base.py
Python
mit
10,374
import pprint import sublime import sublime_plugin def unexpanduser(path): from os.path import expanduser return path.replace(expanduser('~'), '~') try: import os, sys # stupid python module system sys.path.append(os.path.dirname(os.path.realpath(__file__))) from .editorconfig import get_properties, EditorConfigError except: # Python 2 from editorconfig import get_properties, EditorConfigError LINE_ENDINGS = { 'lf': 'unix', 'crlf': 'windows', 'cr': 'cr' } CHARSETS = { 'latin1': 'Western (ISO 8859-1)', 'utf-8': 'utf-8', 'utf-8-bom': 'utf-8 with bom', 'utf-16be': 'utf-16 be', 'utf-16le': 'utf-16 le' } def log(msg): print('EditorConfig: %s' % msg) def debug(msg): if sublime.load_settings('EditorConfig.sublime-settings').get('debug', False): log(msg) class EditorConfig(sublime_plugin.EventListener): MARKER = 'editorconfig' def on_load(self, view): if not view.settings().has(self.MARKER): self.init(view, 'load') def on_activated(self, view): if not view.settings().has(self.MARKER): self.init(view, 'activated') def on_pre_save(self, view): self.init(view, 'pre_save') def on_post_save(self, view): if not view.settings().has(self.MARKER): self.init(view, 'post_save') def init(self, view, event): path = view.file_name() if not path: return try: config = get_properties(path) except EditorConfigError: print('Error occurred while getting EditorConfig properties') else: if config: if event == 'activated' or event == 'load': debug('File Path \n%s' % unexpanduser(path)) debug('Applied Settings \n%s' % pprint.pformat(config)) if event == 'pre_save': self.apply_pre_save(view, config) else: self.apply_config(view, config) def apply_pre_save(self, view, config): settings = view.settings() spaces = settings.get('translate_tabs_to_spaces') charset = config.get('charset') end_of_line = config.get('end_of_line') indent_style = config.get('indent_style') insert_final_newline = config.get('insert_final_newline') if charset in CHARSETS: view.set_encoding(CHARSETS[charset]) if end_of_line in LINE_ENDINGS: view.set_line_endings(LINE_ENDINGS[end_of_line]) if indent_style == 'space' and spaces == False: view.run_command('expand_tabs', {'set_translate_tabs': True}) elif indent_style == 'tab' and spaces == True: view.run_command('unexpand_tabs', {'set_translate_tabs': True}) if insert_final_newline == 'false': view.run_command('remove_final_newlines') def apply_config(self, view, config): settings = view.settings() indent_style = config.get('indent_style') indent_size = config.get('indent_size') trim_trailing_whitespace = config.get('trim_trailing_whitespace') insert_final_newline = config.get('insert_final_newline') if indent_style == 'space': settings.set('translate_tabs_to_spaces', True) elif indent_style == 'tab': settings.set('translate_tabs_to_spaces', False) if indent_size: try: settings.set('tab_size', int(indent_size)) except ValueError: pass if trim_trailing_whitespace == 'true': settings.set('trim_trailing_white_space_on_save', True) elif trim_trailing_whitespace == 'false': settings.set('trim_trailing_white_space_on_save', False) if insert_final_newline == 'true': settings.set('ensure_newline_at_eof_on_save', True) elif insert_final_newline == 'false': settings.set('ensure_newline_at_eof_on_save', False) view.settings().set(self.MARKER, True) class RemoveFinalNewlinesCommand(sublime_plugin.TextCommand): def run(self, edit): region = self.view.find('\n*\Z', 0) self.view.erase(edit, region)
sindresorhus/editorconfig-sublime
EditorConfig.py
Python
mit
3,648
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('cfp', '0047_auto_20150412_0647'), ] operations = [ migrations.RemoveField( model_name='conference', name='topics', ), migrations.RemoveField( model_name='savedsearch', name='topic', ), ]
kyleconroy/speakers
cfp/migrations/0048_auto_20150412_0740.py
Python
mit
459
''' Created on 7 juin 2016 @author: saldenisov ''' from PyQt5.Qt import QMainWindow from PyQt5.QtGui import QCloseEvent from utility import MainObserver from utility import Meta from views import Ui_MainWindow from _functools import partial class MainView(QMainWindow, MainObserver, metaclass=Meta): """ """ def __init__(self, in_controller, in_model, parent=None): """ """ super(QMainWindow, self).__init__(parent) self.controller = in_controller self.model = in_model self.ui = Ui_MainWindow() self.ui.setupUi(self) self.model.add_observer(self) self.ui.actionHelp.triggered.connect(self.controller.help_clicked) self.ui.actionAuthor.triggered.connect(self.controller.author_clicked) self.ui.actionOpen.triggered.connect(self.controller.open_clicked) self.ui.actionQuit.triggered.connect(partial(self.controller.quit_clicked, QCloseEvent())) self.ui.button_empty.clicked.connect(self.controller.empty_clicked) self.ui.button_open.clicked.connect(self.controller.open_clicked) def loading(self): self.ui.button_open.click() def closeEvent(self, event): self.controller.quit_clicked(event)
Saldenisov/QY_itegrating_sphere
views/windows_views/main_view.py
Python
mit
1,248
"""Routines for I/O.""" import fcntl import os if False: from typing import IO # noqa: F401 def set_nonblock(fd): # type: (int) -> None """Set the given file descriptor to non-blocking mode.""" fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) class PushbackReader(object): """Wrapper for streams with push back operations.""" def __init__(self, raw): # type: (IO[str]) -> None """Initialize the reader.""" self._raw = raw self._buf = '' def close(self): # type: () -> None """Close the stream.""" self._raw.close() def fileno(self): # type: () -> int """Return the file descriptor.""" return self._raw.fileno() def read(self): # type: () -> str """Read data from the stream.""" s = self._buf + self._raw.read() self._buf = '' return s def unread(self, s): # type: (str) -> None """Push back a string. Push back the given string to the internal buffer, which will be used for the next ``read()`` or ``read0()``. """ self._buf = s + self._buf def read0(self): # type: () -> str """Read the pushed-back string. Read a string pushed-back by a previous ``unread()``. No call to the underlying raw stream's ``read()`` occurs. """ s = self._buf self._buf = '' return s
tueda/python-form
form/ioutil.py
Python
mit
1,515
from random import randrange from ai import tah_pocitace from tahnuti import tah def vyhodnot(herni_pole): "Vratí jednoznakový řetězec podle stavu hry" if 'xxx' in herni_pole: return 'x' if 'ooo' in herni_pole: return 'o' if '-' not in herni_pole: return '!' return '-' def tah(herni_pole, cislo_policka, symbol): "Vrátí herní pole s daným symbolem umístěným na danou pozici" herni_pole = zamen(herni_pole, cislo_policka, symbol) return herni_pole #zacatek = retezec[:pozice] , konec = retezec[pozice + 1:], return zacatek+symbol+konec def zamen(retezec, pozice, znak): """Zamění znak na dané pozici Vrátí řetězec, který má na dané pozici daný znak; jinak je stejný jako vstupní retezec """ return retezec[:pozice] + znak + retezec[pozice + 1:] def tah_hrace(herni_pole): symbol_hrace = 'o' while True: try: pozice = int(input('Na kterou pozici chceš hrát?')) except ValueError: print('To není číslo!') else: pozice = pozice - 1 if pozice < 0 or pozice >= len(herni_pole): print('Tam hrát nejde, tam není pole!') elif herni_pole[pozice] != '-': print('Tam hrát nejde, pozice je obsazena.') else: return tah(herni_pole, pozice, symbol_hrace) def piskvorky1d(): "Hra piškvorky" herni_pole = '-' * 20 while vyhodnot(herni_pole) == '-': print(herni_pole) herni_pole = tah_hrace(herni_pole) if vyhodnot(herni_pole) != '-': break print(herni_pole) herni_pole = tah_pocitace(herni_pole) print(herni_pole) if vyhodnot(herni_pole) == '!': print('Remíza!') else: print('Vyhrál {}'.format(vyhodnot(herni_pole))) piskvorky1d()
Ajuska/pyladies
06/ukoly/piskvorky.py
Python
mit
1,883
def info_file_parser(filename, verbose=False): results = {} infile = open(filename,'r') for iline, line in enumerate(infile): if line[0]=='#': continue lines = line.split() if len(lines)<2 : continue if lines[0][0]=='#': continue infotype = lines[0] infotype=infotype.replace(':','') info = lines[1] results[infotype]=info return results
carlomt/dicom_tools
dicom_tools/info_file_parser.py
Python
mit
421
#coding: utf-8 from django.test import TestCase from mock import patch from ..models import MangoPayBankAccount from .factories import (MangoPayIBANBankAccountFactory, MangoPayUSBankAccountFactory, MangoPayOTHERBankAccountFactory) from .client import MockMangoPayApi from ..constants import (BA_BIC_IBAN, BA_US, BA_OTHER) class MangoPayBankAccountTests(TestCase): def setUp(self): self.bank_account_iban = MangoPayIBANBankAccountFactory() self.bank_account_us = MangoPayUSBankAccountFactory() self.bank_account_other = MangoPayOTHERBankAccountFactory() @patch("mangopay.models.get_mangopay_api_client") def test_create_iban_bank_account(self, mock_client): id_ = 33322 mock_client.return_value = MockMangoPayApi(bank_account_id=id_) self.assertIsNone(self.bank_account_iban.mangopay_id) self.bank_account_iban.create() MangoPayBankAccount.objects.get(id=self.bank_account_iban.id, mangopay_id=id_) self.assertEqual(self.bank_account_iban.account_type, BA_BIC_IBAN) self.assertIsNone(self.bank_account_iban.account_number) self.assertIsNotNone(self.bank_account_iban.iban) @patch("mangopay.models.get_mangopay_api_client") def test_create_us_bank_account(self, mock_client): id_ = 42333 mock_client.return_value = MockMangoPayApi(bank_account_id=id_) self.assertIsNone(self.bank_account_us.mangopay_id) self.bank_account_us.create() MangoPayBankAccount.objects.get(id=self.bank_account_us.id, mangopay_id=id_) self.assertEqual(self.bank_account_us.account_type, BA_US) self.assertIsNone(self.bank_account_us.iban) self.assertIsNotNone(self.bank_account_us.aba) self.assertIsNotNone(self.bank_account_us.deposit_account_type) self.assertIsNotNone(self.bank_account_us.account_number) @patch("mangopay.models.get_mangopay_api_client") def test_create_other_bank_account(self, mock_client): id_ = 22333 mock_client.return_value = MockMangoPayApi(bank_account_id=id_) self.assertIsNone(self.bank_account_other.mangopay_id) self.bank_account_other.create() MangoPayBankAccount.objects.get(id=self.bank_account_other.id, mangopay_id=id_) self.assertEqual(self.bank_account_other.account_type, BA_OTHER) self.assertIsNone(self.bank_account_other.iban) self.assertIsNotNone(self.bank_account_other.account_number) @patch("mangopay.models.get_mangopay_api_client") def test_create_with_unicode_characters(self, mock_client): id_ = 33322 mock_client.return_value = MockMangoPayApi(bank_account_id=id_) self.assertIsNone(self.bank_account_iban.mangopay_id) self.bank_account_iban.address = u"Sveavägen 41" self.bank_account_iban.save() self.bank_account_iban.create() MangoPayBankAccount.objects.get(id=self.bank_account_iban.id, mangopay_id=id_) self.assertEqual(self.bank_account_iban.account_type, BA_BIC_IBAN) self.assertIsNone(self.bank_account_iban.account_number) self.assertIsNotNone(self.bank_account_iban.iban)
FundedByMe/django-mangopay
mangopay/tests/bank_account.py
Python
mit
3,532
''' August 15 2014 James Houghton <james.p.houghton@gmail.com> Major edits June 22 2015 ''' from pysd.translators.SMILE2Py import SMILEParser from lxml import etree from pysd import builder def translate_xmile(xmile_file): """ Translate an xmile model file into a python class. Functionality is currently limited. """ py_model_file = build_python(xmile_file) return py_model_file def build_python(xmile_file): """ Load the xml file and pass the relevant elements to the builder class """ smile_parser = SMILEParser() xml_parser = etree.XMLParser(encoding="utf-8", recover=True) root = etree.parse(xmile_file, parser=xml_parser).getroot() NS = root.nsmap.values()[0] filename = '.'.join(xmile_file.split('.')[:-1])+'.py' builder.new_model(filename) # add aux and flow nodes flaux_xpath = '//ns:model/ns:variables/ns:aux|//ns:model/ns:variables/ns:flow' for element in root.xpath(flaux_xpath, namespaces={'ns':NS}): identifier = smile_parser.parse(element.attrib['name'], context='defn') pyeqn = smile_parser.parse(element.xpath('ns:eqn', namespaces={'ns':NS})[0].text) builder.add_flaux(filename, identifier, pyeqn) # add nodes for the derivatives of stocks stock_xpath = '//ns:model/ns:variables/ns:stock' for element in root.xpath(stock_xpath,namespaces={'ns':NS}): identifier = smile_parser.parse(element.attrib['name'], context='defn') inflows = [smile_parser.parse(e.text) for e in element.xpath('ns:inflow', namespaces={'ns':NS})] outflows = [smile_parser.parse(e.text) for e in element.xpath('ns:outflow', namespaces={'ns':NS})] pyeqn = ' + '.join(inflows) if inflows else '' pyeqn += ' - '+' - '.join(outflows) if outflows else '' initial_value = smile_parser.parse(element.xpath('ns:eqn', namespaces={'ns':NS})[0].text) builder.add_stock(filename, identifier, pyeqn, initial_value) #Get timeseries information from the XMILE file tstart = smile_parser.parse(root.xpath('//ns:sim_specs/ns:start',namespaces={'ns':NS})[0].text) builder.add_flaux(filename, 'initial_time', tstart) tstop = smile_parser.parse(root.xpath('//ns:sim_specs/ns:stop',namespaces={'ns':NS})[0].text) builder.add_flaux(filename, 'final_time', tstop) dt = smile_parser.parse(root.xpath('//ns:sim_specs/ns:dt',namespaces={'ns':NS})[0].text) builder.add_flaux(filename, 'time_step', dt) return filename
bpowers/pysd
pysd/translators/XMILE2Py.py
Python
mit
2,492
x = 0 def incr_x(): x = x + 1 # does not work def incr_x2(): global x x = x + 1 # does work
schmit/intro-python-course
lectures/code/functions_global.py
Python
mit
108
# ----------------------------------------------------------------------------- # LTL -> NBA # Copyright (C) Carsten Fritz, Björn Teegen 2002-2003 # Sponsored by Deutsche Forschungsgemeinschaft (DFG) # Distributed under the terms of the GNU Lesser General Public License # See the files README.txt and LICENSE.txt for more information # ----------------------------------------------------------------------------- LexerError = 'Lexer error' class Lexer: def __init__(self): self.WHITESPACES = '\t\n\f\r ' self.UPPER_CASE = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' self.LOWER_CASE = 'abcdefghijklmnopqrstuvwxyz' self.DIGITS = '0123456789' self.ID_FIRST = self.UPPER_CASE + self.LOWER_CASE self.ID_REST = self.ID_FIRST + self.DIGITS + '_' self.NUM_FIRST = self.DIGITS self.NUM_REST = self.NUM_FIRST self.STR_FIRST = '' self.COMMENT_STARTERS = [] self.KEYWORDS = [] self.TERMINALS = [] def getTokenClass(self, token): if not token: return None elif token in self.KEYWORDS: return 'keyword' elif token in self.TERMINALS: return token elif token[0] in self.ID_FIRST: return 'id' elif token[0] in self.NUM_FIRST: return 'num' elif token[0] in self.STR_FIRST: return 'str' else: raise LexerError, 'Illegal token: ' + repr(token) def skipComment(self, commentStarter): pass def skipWhitespacesAndComments(self): pass def readStr(self): starter = self.s[0] token = starter self.s = self.s[1:] while self.s and (self.s[0] != starter): token += self.s[0] self.s = self.s[1:] token += self.s[0] self.s = self.s[1:] return token def readRest(self, rest): token = self.s[0] self.s = self.s[1:] while self.s and self.s[0] in rest: token = token + self.s[0] self.s = self.s[1:] return token def readToken(self): self.skipWhitespacesAndComments() if not self.s: return None for terminal in self.TERMINALS: if self.s[:len(terminal)] == terminal: token = terminal self.s = self.s[len(terminal):] return token if self.s[0] in self.ID_FIRST: return self.readRest(self.ID_REST) elif self.s[0] in self.NUM_FIRST: return self.readRest(self.NUM_REST) elif self.s[0] in self.STR_FIRST: return self.readStr() raise LexerError, 'Illegal character: ' + repr(self.s[0]) class StringLexer(Lexer): def __init__(self, s): Lexer.__init__(self) self.s = s def skipWhitespacesAndComments(self): ready = 0 while not ready: while self.s and self.s[0] in self.WHITESPACES: self.s = self.s[1:] ready = 1 for commentStarter in self.COMMENT_STARTERS: if self.s[:len(commentStarter)] == commentStarter: ready = 0 self.s = self.s[len(commentStarter):] self.skipComment(commentStarter) break class FileLexer(Lexer): def __init__(self, f): Lexer.__init__(self) self.f = f self.s = f.readline() def skipWhitespacesAndComments(self): while self.s: ready = 0 while not ready: while self.s and self.s[0] in self.WHITESPACES: self.s = self.s[1:] ready = 1 for commentStarter in self.COMMENT_STARTERS: if self.s[:len(commentStarter)] == commentStarter: ready = 0 self.s = self.s[len(commentStarter):] self.skipComment(commentStarter) break if self.s: break self.s = self.f.readline()
arafato/ltl2fsm
tools/ltlnba/lexlib.py
Python
mit
4,174
from django.db import models from django.utils import timezone class MyRelatedModel(models.Model): name = models.CharField(max_length=255) key = models.IntegerField() def __str__(self): return str(self.key) class MyModel(models.Model): char_field = models.CharField(max_length=255, default='') text_field = models.TextField(default='') bool_field = models.BooleanField(default=False) int_field = models.IntegerField(default=0) datetime_field = models.DateTimeField(default=timezone.now) date_field = models.DateField(default=timezone.now) related_obj = models.ForeignKey(MyRelatedModel, null=True, related_name='+') many_related_objs = models.ManyToManyField(MyRelatedModel)
onebit0fme/django-loadjson
loadjson/tests/models.py
Python
mit
734
# coding: utf-8 # In[1]: import numpy as np import numpy.random as npr import matplotlib.pyplot as plt get_ipython().magic(u'matplotlib inline') # In[34]: from sklearn.datasets import load_digits data = load_digits() X_tot,Y_tot = load_digits().data,load_digits().target # In[8]: len(X_tot) # In[9]: split = 500 X = X_tot[:split] Y = Y_tot[:split] X_test = X_tot[split:] Y_test = Y_tot[split:] # In[10]: from scipy.spatial.distance import pdist,squareform # In[11]: # stochastic neighbor assignments def stoch_neighbor_assignments(X): P = squareform(np.exp(-(pdist(X)**2))) P -= np.diag(P) return np.nan_to_num(P/P.sum(1)) # columns sum to 1 # In[62]: P = squareform(np.exp(-(pdist(X)**2))) P -= np.diag(P) sum(P.sum(1)==0) # In[ ]: # In[12]: P = stoch_neighbor_assignments(X) np.sum(P != 0) # In[13]: np.max(P),np.min(P) # In[14]: np.sum(np.nan_to_num(P)!=0) # In[15]: plt.imshow(P,interpolation='none',cmap='Blues') plt.colorbar() # In[16]: # probability that point 0 will be classified correctly sum(P.T[10,Y==Y[10]]) # In[17]: def correct_classification_prob(P,Y): p = np.array([sum(P.T[i,Y==Y[i]]) for i in range(len(P))]) return p # In[18]: def correct_classification_prob_vec(P,Y): Y_ = np.vstack([Y==y for y in Y]) return P[Y_] # In[19]: def exp_class_accuracy_vectorized(P,Y): Y_ = np.vstack([Y==y for y in Y]) return P[Y_].sum()/len(Y) # In[ ]: # In[20]: get_ipython().magic(u'timeit sum(correct_classification_prob(P,Y))') # In[21]: sum(correct_classification_prob(P,Y)) # In[22]: get_ipython().magic(u'timeit correct_classification_prob_vec(P,Y)') # In[23]: get_ipython().magic(u'timeit exp_class_accuracy_vectorized(P,Y)') # In[24]: sum(correct_classification_prob_vec(P,Y)) # In[ ]: # In[25]: sum(correct_classification_prob_vec(P,Y))/len(Y) # In[26]: get_ipython().magic(u'timeit np.vstack([Y==y for y in Y])') # In[27]: # expected number of points correctly classified p = correct_classification_prob(P,Y) correct_class_expectation = sum(p) / len(p) correct_class_expectation # In[28]: plt.hist(p,bins=50); # In[29]: sum(p<0.5) # In[35]: outlier_images = data.images[p<0.5] # In[36]: for image in outlier_images[:5]: plt.figure() plt.imshow(-image,cmap='gray',interpolation='none') # In[37]: np.argmin(p) # In[38]: plt.imshow(-data.images[np.argmin(p)],cmap='gray',interpolation='none') # In[39]: sum(p==1) # In[40]: from sklearn.decomposition import PCA pca = PCA(n_components=2) X_ = pca.fit_transform(X) X_.shape # In[41]: P_ = stoch_neighbor_assignments(X_) p_ = correct_classification_prob(P_,Y) correct_class_expectation_ = sum(p_) / len(p_) correct_class_expectation_ # In[42]: plt.hist(p_,bins=50); # In[43]: # objective: find a transformation f(X) that # maximizes correct_classification_prob(f(X),Y) # In[44]: A = np.random.randn(2,X.shape[1]) # In[45]: A.dot(X.T).shape # In[46]: np.sum(X.dot(A.T) - A.dot(X.T).T) # In[47]: X.dot(A.T),A.dot(X.T).T # In[48]: X_ = X.dot(A.T) X_.shape # In[49]: (A.T).shape # In[50]: X.dot(A.T) # In[51]: def objective(A,X,Y): assert(X.shape[1]==A.shape[0]) X_ = X.dot(A) P_ = stoch_neighbor_assignments(X_) #p_ = correct_classification_prob(P_,Y) #correct_class_expectation_ = sum(p_) / len(p_) #return correct_class_expectation_ return exp_class_accuracy_vectorized(P_,Y) # In[52]: A = npr.randn(64,2) # In[53]: objective(A,X,Y) # In[54]: plt.scatter(X.dot(A)[:,0],X.dot(A)[:,1],c=Y) # In[55]: get_ipython().magic(u'timeit objective(A,X,Y)') # In[56]: get_ipython().magic(u'prun objective(A,X,Y)') # In[276]: # for large inputs, use ball-trees instead of computing the full P_ij matrix # In[57]: # construct a function we can pass to scipy optimize def objective_vec(A): A_ = np.reshape(A,(X.shape[1],2)) return objective(A_,X,Y) # In[58]: A = npr.randn(X.shape[1]*2) A_ = np.reshape(A,(X.shape[1],2)) A_.shape # In[59]: objective_vec(npr.randn(64*2)) # In[ ]: # In[303]: from scipy.optimize import minimize,basinhopping # In[296]: A_init = pca.components_.T A_init.shape # In[298]: objective(A_init,X,Y) # In[299]: A_init_vec = np.reshape(A_init,np.prod(A_init.shape)) # In[301]: obj_min = lambda A:-objective_vec(A) # In[306]: obj_min(A_init_vec) # In[307]: res = minimize(obj_min,A_init_vec,options={'maxiter':2,'disp':True}) # In[310]: def gradient(func,x0,h=0.001): x0 = np.array(x0)#,dtype=float) y = func(x0) deriv = np.zeros(len(x0)) for i in range(len(x0)): x = np.array(x0) x[i] += h deriv[i] = (func(x) - y)/h return deriv # In[311]: get_ipython().magic(u'timeit obj_min(A_init_vec)') # In[312]: len(A_init_vec) # In[313]: gradient(obj_min,A_init_vec) # In[ ]: def obj_grad(A,X,Y) # In[ ]: def gradient(A,X,Y): X_ = X.dot(A) P_ = stoch_neighbor_assignments(X_) s = 0 for i in range(len(X_)): s+= return # In[302]: res = basinhopping(obj_min,A_init_vec,disp=True) # In[308]: from scipy.optimize import minimize, rosen, rosen_der # In[309]: x0 = [1.3, 0.7, 0.8, 1.9, 1.2] res = minimize(rosen, x0, method='Nelder-Mead') res.x # In[63]: from autograd import grad import autograd.numpy as np # In[64]: grad(objective_vec)(npr.randn(64*2)) # In[ ]:
maxentile/msm-learn
projects/metric-learning/Neighborhood components analysis.py
Python
mit
5,397
"""Remove the first and last char from a string.""" def remove_char(s): """Remove the first and last char from a string.""" return s[1:-1]
pasaunders/code-katas
src/remove_chars.py
Python
mit
149
# -*- coding: utf8 -*- # Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # 内部错误。 INTERNALERROR = 'InternalError' # 批改错误。 INTERNALERROR_CORRECTERROR = 'InternalError.CorrectError' # 识别错误。 INTERNALERROR_OCRERROR = 'InternalError.OcrError' # 服务器内部错误,初始化失败。 INTERNALERROR_OCRSERVERINTERNERROR = 'InternalError.OcrServerInternError' # 其它错误。 INTERNALERROR_OTHERERROR = 'InternalError.OtherError' # 服务器过载,请联系相关客服。 INTERNALERROR_OVERLOADERROR = 'InternalError.OverLoadError' # 图片识别错误。 INTERNALERROR_RECOGNIZEERROR = 'InternalError.RecognizeError' # 无法连接图像下载服务器。 INTERNALERROR_SERVERCONNECTDOWNLOADERROR = 'InternalError.ServerConnectDownloadError' # 图片切割错误。 INTERNALERROR_SPLITERROR = 'InternalError.SplitError' # 参数为空。 INVALIDPARAMETER_EMPTYPARAMETERERROR = 'InvalidParameter.EmptyParameterError' # 传入的参数有误。 INVALIDPARAMETER_INPUTERROR = 'InvalidParameter.InputError' # 任务不存在。 INVALIDPARAMETER_TASKNOTFOUND = 'InvalidParameter.TaskNotFound' # Appid无效。 INVALIDPARAMETERVALUE_APPIDINVALIDERROR = 'InvalidParameterValue.AppidInvalidError' # 图片解码失败,请核实输入信息。 INVALIDPARAMETERVALUE_DECODEIMAGEERROR = 'InvalidParameterValue.DecodeImageError' # url图片下载失败。 INVALIDPARAMETERVALUE_DOWNLOADIMAGEFAILERROR = 'InvalidParameterValue.DownloadImageFailError' # 图片数据为空。 INVALIDPARAMETERVALUE_EMPTYIMAGEERROR = 'InvalidParameterValue.EmptyImageError' # 图片下载失败。 INVALIDPARAMETERVALUE_IMAGEDOWNLOADFAILERROR = 'InvalidParameterValue.ImageDownloadFailError' # 图片超出下载限制。 INVALIDPARAMETERVALUE_IMAGESIZEEXCEEDERROR = 'InvalidParameterValue.ImageSizeExceedError' # 图片尺寸太大。 INVALIDPARAMETERVALUE_IMAGETOOBIGERROR = 'InvalidParameterValue.ImageTooBigError' # 输入错误,请核实InputType参数。 INVALIDPARAMETERVALUE_INPUTTYPEVALUEERROR = 'InvalidParameterValue.InputTypeValueError' # SessionId无效。 INVALIDPARAMETERVALUE_SESSIONERROR = 'InvalidParameterValue.SessionError' # 图像请求URL的格式错误。 INVALIDPARAMETERVALUE_URLFROMATIVADLIDERROR = 'InvalidParameterValue.UrlFromatIvadlidError' # 频率限制。 LIMITEXCEEDED_FREQLIMITFORBIDDENACCESSERROR = 'LimitExceeded.FreqLimitForbiddenAccessError' # 无法找到用户,请确认已在控制台开通服务并使用了正确的 ECCAPPID。 RESOURCENOTFOUND_CANNOTFINDUSER = 'ResourceNotFound.CannotFindUser' # 无效的服务名称。 RESOURCENOTFOUND_SERVERNAMENOTEXISTINLICENSEERROR = 'ResourceNotFound.ServerNameNotExistInLicenseError' # 服务未开通或已欠费。 RESOURCEUNAVAILABLE_AUTHORIZEERROR = 'ResourceUnavailable.AuthorizeError' # license无效。 UNAUTHORIZEDOPERATION_LICENSEINVALIDFORBIDDENACCESSERROR = 'UnauthorizedOperation.LicenseInvalidForbiddenAccessError' # license中未授权该服务。 UNAUTHORIZEDOPERATION_SERVERNAMEUNAUTHORIZEDINERROR = 'UnauthorizedOperation.ServerNameUnauthorizedInError'
tzpBingo/github-trending
codespace/python/tencentcloud/ecc/v20181213/errorcodes.py
Python
mit
3,671
import cmath from math import pi, ceil import numpy as np from numpy import sin, cos from scipy.interpolate import interp1d """ References: [Majkrzak2003] C. F. Majkrzak, N. F. Berk: Physica B 336 (2003) 27-38 Phase sensitive reflectometry and the unambiguous determination of scattering length density profiles """ def interpolate(x, fx): return interp1d(x, fx, bounds_error=False, fill_value=0) def refr_idx(q, sld): """ Calculates the refractive index with given SLD [\AA^{-2}] and wavevector transfer q [ \AA^{-1}]. The units can be arbitrary choosen, but they must satisfy that sld/q**2 has unit [1]. The arguments should not be scaled by any constants. For example q = 0.01 sld = 1e-6 The refractive index is complex if q < q_c (being the critical edge) and it is completely real if q >= q_c. """ return cmath.sqrt(1 - 16 * pi * sld / (q ** 2)) def reflection_matrix(q, sld, thickness, as_matrix=False): """ Calculates a reflection matrix used for calculating the reflectivity of a slab of material (sld, thickness) for the wave vector transfer q. See C.F. Majkrzak, N. F. Berk: Physical Review B Vol. 52 Nr 15, 1995: Exact determination of the phase in neutron reflectometry, Equation (1) If as_matrix is True, a matrix 2x2 will be returned, if not, then the matrix indices are returned as a, b, c, d """ n = refr_idx(q, sld) theta = 0.5 * q * n * thickness a, b, c, d = cos(theta), 1 / n * sin(theta), -n * sin(theta), cos(theta) if as_matrix: return np.array([[a, b], [c, d]]) return a, b, c, d class SLDProfile(object): def __init__(self): pass def as_matrix(self, q): """ Returns the matrix coefficients in the abeles formalism. Returns w, x, y, z corresponding to the matrix [[w, x], [y, z]] """ return 0, 0, 0, 0 class ConstantSLDProfile(SLDProfile): def __init__(self, sld, thickness, sigma=0): self._sld = float(sld) self._d = float(thickness) self._r = float(sigma) if self._r > 0: raise NotImplementedError("Roughness not implemented yet") def as_matrix(self, q): return reflection_matrix(q, self._sld, self._d) class ConcatSLDProfile(SLDProfile): """ The first element in sld_profiles is closest to the substrate """ def __init__(self, sld_profiles, reverse=False): self._slds = sld_profiles self._reverse = reverse def as_matrix(self, q): m = len(self._slds) * [None] for i in range(0, len(self._slds)): a, b, c, d = self._slds[i].as_matrix(q) m[i] = np.array([[a, b], [c, d]]) if self._reverse: m = list(reversed(m)) m = np.linalg.multi_dot(m) return m[0][0], m[0][1], m[1][0], m[1][1] class FunctionSLDProfile(SLDProfile): def __init__(self, function, support, dx=0.1): self._f = function self._supp = support self._dx = dx self._xspace = np.linspace(support[0], support[1], ceil((support[1] - support[0]) * 1 / dx)) self._feval = [self._f(x) for x in self._xspace] self._m = [ConstantSLDProfile(fx, dx) for fx in self._feval] self._concat = ConcatSLDProfile(self._m, reverse=False) def as_matrix(self, q): return self._concat.as_matrix(q) class SlabsSLDProfile(SLDProfile): def __init__(self, z, rho): self._z = z self._rho = rho @classmethod def from_sample(cls, sample, dz=0.1, dA=1e-4, probe=None): from refl1d.probe import NeutronProbe from refl1d.profile import Microslabs if probe is None: # The values T and L do not matter for 'just' building the SLD profile probe = NeutronProbe(T=[1.0], L=[1.0]) slabs = Microslabs(1, dz) sample.render(probe, slabs) slabs.finalize(True, dA) # ignore the imaginary part, this should be zero anyway z, rho, irho = slabs.smooth_profile(dz) if any(irho >= 1e-2): raise RuntimeWarning("Sample contains absorptive SLD (imag >= 1e-2). " "Reconstruction techniques do not support this.") # refl1d likes to use SLD * 1e6 return cls(z, rho * 1e-6) @classmethod def from_slabs(cls, thickness, sld, roughness, precision=1): # You should rather use the from_sample method, since this easier to # understand. This method here is just a kind of 'fallback' # if you dont wanna have the overhead of building the Stacks in refl1d # just to put the data in here.. # WARNING: from_slabs and from_sample do not create the same slab profile # they are shifted profiles (by I'd guess 3*roughness[0]) from refl1d.profile import build_profile w = thickness sld = sld # Means, the first layer is the substrate and we only have to include # the roughness effect. To do so, select a proper thickness (> 0) such # that the convolution with the gaussian kernel is sufficiently approximated if w[0] == 0: # refl1d uses 3 sigma usually # why 3? # that's 3 sigma and the gaussian smoothing is nearly zero out there # thus the 'substrate' layer is big enough to be approximated by this # ofc bigger sigma values (>= 5) are better, but they need more # computation w[0] = 3 * roughness[0] z = np.linspace(0, sum(w) + roughness[-1] * 5, int(precision * sum(w)) + 1) offsets = np.cumsum(w) rho = build_profile(z, offsets, roughness, sld) return cls(z, rho) def thickness(self): return max(self._z) - min(self._z) def plot_profile(self, offset=0, reverse=False): import pylab rho = self._rho if reverse: rho = list(reversed(self._rho)) pylab.plot(self._z + offset, rho) def as_matrix(self, q): from functools import reduce # len(dz) = len(self._z) - 1 dz = np.diff(self._z) m = len(dz) * [None] for idx in range(0, len(dz)): m[idx] = reflection_matrix(q, self._rho[idx], dz[idx], as_matrix=True) # There is still some potential here # Whats happening here: # m1 * m2 * m3 * m4 * m5 ... in a sequentially manner # maybe it's faster if you do something like # (m1 * m2) * (m3 * m4) * ... # and redo the grouping in the next step. this should be then O(log n) # compared to the seq. multiplication which is O(n).... # BUT: this has to be done in C code, not in a python implementation :/ m = reduce(np.dot, m) return m[0][0], m[0][1], m[1][0], m[1][1] class Reflectivity(object): def __init__(self, sld_profile, fronting, backing): assert isinstance(sld_profile, SLDProfile) self._sld = sld_profile self._f, self._b = fronting, backing # The input should be of the magnitude 1e-6 ... 1e-5 if any(abs(np.array([fronting, backing])) >= 1e-1): raise RuntimeWarning("Given fronting/backing SLD values are too high") def reflection(self, q_space, as_function=True): r = np.ones(len(q_space), dtype=complex) for idx, q in enumerate(q_space): if abs(q) < 1e-10: continue # See [Majkrzak2003] equation (17) f, h = refr_idx(q, self._f), refr_idx(q, self._b) A, B, C, D = self._sld.as_matrix(q) r[idx] = (f * h * B + C + 1j * (f * D - h * A)) / \ (f * h * B - C + 1j * (f * D + h * A)) if as_function: return self.to_function(r, q_space, square=False) else: return r @staticmethod def to_function(r, q_space, square=False): real = interpolate(q_space, r.real) imag = interpolate(q_space, r.imag) if square: return lambda q: real(q)**2 + imag(q)**2 else: return lambda q: real(q) + 1j * imag(q) def reflectivity(self, q_space): r = self.reflection(q_space) return lambda q: abs(r(q)) ** 2 def plot(self, q_space): import pylab R = self.reflectivity(q_space) pylab.plot(q_space, R(q_space)) return R
reflectometry/direfl
direfl/api/sld_profile.py
Python
mit
8,500
# coding: utf-8 from sqlalchemy import BINARY, Column, Float, Index, Integer, String, VARBINARY from sqlalchemy import String, Unicode, ForeignKey from sqlalchemy.orm import relationship, backref from dbdatetime import dbdatetime from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() metadata = Base.metadata class SystemActionLog(Base): __tablename__ = 'system_actionlog' __table_args__ = ( Index('key_action', 'actorHash', 'action', 'epoch'), ) id = Column(Integer, primary_key=True) actorHash = Column(BINARY(12), nullable=False) actorIdentity = Column(Unicode(255), nullable=False) action = Column(Unicode(32), nullable=False) score = Column(Float(asdecimal=True), nullable=False) epoch = Column(Integer, nullable=False, index=True) class SystemDestructionLog(Base): __tablename__ = 'system_destructionlog' id = Column(Integer, primary_key=True) objectClass = Column(Unicode(128), nullable=False) rootLogID = Column(Integer) objectPHID = Column(String) objectMonogram = Column(Unicode(64)) epoch = Column(Integer, nullable=False, index=True)
veblush/PyPhabricatorDb
pyphabricatordb/system.py
Python
mit
1,153
import collections import numbers import re import warnings from rope.base import ast, codeanalyze, exceptions from rope.base.utils import pycompat try: basestring except NameError: basestring = (str, bytes) def get_patched_ast(source, sorted_children=False): """Adds ``region`` and ``sorted_children`` fields to nodes Adds ``sorted_children`` field only if `sorted_children` is True. """ return patch_ast(ast.parse(source), source, sorted_children) def patch_ast(node, source, sorted_children=False): """Patches the given node After calling, each node in `node` will have a new field named `region` that is a tuple containing the start and end offsets of the code that generated it. If `sorted_children` is true, a `sorted_children` field will be created for each node, too. It is a list containing child nodes as well as whitespaces and comments that occur between them. """ if hasattr(node, 'region'): return node walker = _PatchingASTWalker(source, children=sorted_children) ast.call_for_nodes(node, walker) return node def node_region(patched_ast_node): """Get the region of a patched ast node""" return patched_ast_node.region def write_ast(patched_ast_node): """Extract source form a patched AST node with `sorted_children` field If the node is patched with sorted_children turned off you can use `node_region` function for obtaining code using module source code. """ result = [] for child in patched_ast_node.sorted_children: if isinstance(child, ast.AST): result.append(write_ast(child)) else: result.append(child) return ''.join(result) class MismatchedTokenError(exceptions.RopeError): pass class _PatchingASTWalker(object): def __init__(self, source, children=False): self.source = _Source(source) self.children = children self.lines = codeanalyze.SourceLinesAdapter(source) self.children_stack = [] Number = object() String = object() semicolon_or_as_in_except = object() exec_open_paren_or_space = object() exec_close_paren_or_space = object() exec_in_or_comma = object() def __call__(self, node): method = getattr(self, '_' + node.__class__.__name__, None) if method is not None: return method(node) # ???: Unknown node; what should we do here? warnings.warn('Unknown node type <%s>; please report!' % node.__class__.__name__, RuntimeWarning) node.region = (self.source.offset, self.source.offset) if self.children: node.sorted_children = ast.get_children(node) def _handle(self, node, base_children, eat_parens=False, eat_spaces=False): if hasattr(node, 'region'): # ???: The same node was seen twice; what should we do? warnings.warn( 'Node <%s> has been already patched; please report!' % node.__class__.__name__, RuntimeWarning) return base_children = collections.deque(base_children) self.children_stack.append(base_children) children = collections.deque() formats = [] suspected_start = self.source.offset start = suspected_start first_token = True while base_children: child = base_children.popleft() if child is None: continue offset = self.source.offset if isinstance(child, ast.AST): ast.call_for_nodes(child, self) token_start = child.region[0] else: if child is self.String: region = self.source.consume_string( end=self._find_next_statement_start()) elif child is self.Number: region = self.source.consume_number() elif child == '!=': # INFO: This has been added to handle deprecated ``<>`` region = self.source.consume_not_equal() elif child == self.semicolon_or_as_in_except: # INFO: This has been added to handle deprecated # semicolon in except region = self.source.consume_except_as_or_semicolon() elif child == self.exec_open_paren_or_space: # These three cases handle the differences between # the deprecated exec statement and the exec # function. region = self.source.consume_exec_open_paren_or_space() elif child == self.exec_in_or_comma: region = self.source.consume_exec_in_or_comma() elif child == self.exec_close_paren_or_space: region = self.source.consume_exec_close_paren_or_space() else: if hasattr(ast, 'JoinedStr') and isinstance(node, (ast.JoinedStr, ast.FormattedValue)): region = self.source.consume_joined_string(child) else: region = self.source.consume(child) child = self.source[region[0]:region[1]] token_start = region[0] if not first_token: formats.append(self.source[offset:token_start]) if self.children: children.append(self.source[offset:token_start]) else: first_token = False start = token_start if self.children: children.append(child) start = self._handle_parens(children, start, formats) if eat_parens: start = self._eat_surrounding_parens( children, suspected_start, start) if eat_spaces: if self.children: children.appendleft(self.source[0:start]) end_spaces = self.source[self.source.offset:] self.source.consume(end_spaces) if self.children: children.append(end_spaces) start = 0 if self.children: node.sorted_children = children node.region = (start, self.source.offset) self.children_stack.pop() def _handle_parens(self, children, start, formats): """Changes `children` and returns new start""" opens, closes = self._count_needed_parens(formats) old_end = self.source.offset new_end = None for i in range(closes): new_end = self.source.consume(')')[1] if new_end is not None: if self.children: children.append(self.source[old_end:new_end]) new_start = start for i in range(opens): new_start = self.source.rfind_token('(', 0, new_start) if new_start != start: if self.children: children.appendleft(self.source[new_start:start]) start = new_start return start def _eat_surrounding_parens(self, children, suspected_start, start): index = self.source.rfind_token('(', suspected_start, start) if index is not None: old_start = start old_offset = self.source.offset start = index if self.children: children.appendleft(self.source[start + 1:old_start]) children.appendleft('(') token_start, token_end = self.source.consume(')') if self.children: children.append(self.source[old_offset:token_start]) children.append(')') return start def _count_needed_parens(self, children): start = 0 opens = 0 for child in children: if not isinstance(child, basestring): continue if child == '' or child[0] in '\'"': continue index = 0 while index < len(child): if child[index] == ')': if opens > 0: opens -= 1 else: start += 1 if child[index] == '(': opens += 1 if child[index] == '#': try: index = child.index('\n', index) except ValueError: break index += 1 return start, opens def _find_next_statement_start(self): for children in reversed(self.children_stack): for child in children: if isinstance(child, ast.stmt): return child.col_offset \ + self.lines.get_line_start(child.lineno) return len(self.source.source) _operators = {'And': 'and', 'Or': 'or', 'Add': '+', 'Sub': '-', 'Mult': '*', 'Div': '/', 'Mod': '%', 'Pow': '**', 'LShift': '<<', 'RShift': '>>', 'BitOr': '|', 'BitAnd': '&', 'BitXor': '^', 'FloorDiv': '//', 'Invert': '~', 'Not': 'not', 'UAdd': '+', 'USub': '-', 'Eq': '==', 'NotEq': '!=', 'Lt': '<', 'LtE': '<=', 'Gt': '>', 'GtE': '>=', 'Is': 'is', 'IsNot': 'is not', 'In': 'in', 'NotIn': 'not in'} def _get_op(self, node): return self._operators[node.__class__.__name__].split(' ') def _Attribute(self, node): self._handle(node, [node.value, '.', node.attr]) def _Assert(self, node): children = ['assert', node.test] if node.msg: children.append(',') children.append(node.msg) self._handle(node, children) def _Assign(self, node): children = self._child_nodes(node.targets, '=') children.append('=') children.append(node.value) self._handle(node, children) def _AugAssign(self, node): children = [node.target] children.extend(self._get_op(node.op)) children.extend(['=', node.value]) self._handle(node, children) def _AnnAssign(self, node): children = [node.target, ':', node.annotation] if node.value is not None: children.append('=') children.append(node.value) self._handle(node, children) def _Repr(self, node): self._handle(node, ['`', node.value, '`']) def _BinOp(self, node): children = [node.left] + self._get_op(node.op) + [node.right] self._handle(node, children) def _BoolOp(self, node): self._handle(node, self._child_nodes(node.values, self._get_op(node.op)[0])) def _Break(self, node): self._handle(node, ['break']) def _Call(self, node): def _arg_sort_key(node): if isinstance(node, ast.keyword): return (node.value.lineno, node.value.col_offset) return (node.lineno, node.col_offset) children = [node.func, '('] unstarred_args = [] starred_and_keywords = list(node.keywords) for i, arg in enumerate(node.args): if hasattr(ast, 'Starred') and isinstance(arg, ast.Starred): starred_and_keywords.append(arg) else: unstarred_args.append(arg) if getattr(node, 'starargs', None): starred_and_keywords.append(node.starargs) starred_and_keywords.sort(key=_arg_sort_key) children.extend(self._child_nodes(unstarred_args, ',')) # positional args come before keywords, *args comes after all # positional args, and **kwargs comes last if starred_and_keywords: if len(children) > 2: children.append(',') for i, arg in enumerate(starred_and_keywords): if arg == getattr(node, 'starargs', None): children.append('*') children.append(arg) if i + 1 < len(starred_and_keywords): children.append(',') if getattr(node, 'kwargs', None): if len(children) > 2: children.append(',') children.extend(['**', node.kwargs]) children.append(')') self._handle(node, children) def _ClassDef(self, node): children = [] if getattr(node, 'decorator_list', None): for decorator in node.decorator_list: children.append('@') children.append(decorator) children.extend(['class', node.name]) if node.bases: children.append('(') children.extend(self._child_nodes(node.bases, ',')) children.append(')') children.append(':') children.extend(node.body) self._handle(node, children) def _Compare(self, node): children = [] children.append(node.left) for op, expr in zip(node.ops, node.comparators): children.extend(self._get_op(op)) children.append(expr) self._handle(node, children) def _Delete(self, node): self._handle(node, ['del'] + self._child_nodes(node.targets, ',')) def _Constant(self, node): if isinstance(node.value, basestring): self._handle(node, [self.String]) return if any(node.value is v for v in [True, False, None]): self._handle(node, [str(node.value)]) return if isinstance(node.value, numbers.Number): self._handle(node, [self.Number]) return if node.value is Ellipsis: self._handle(node, ['...']) return assert False def _Num(self, node): self._handle(node, [self.Number]) def _Str(self, node): self._handle(node, [self.String]) def _Bytes(self, node): self._handle(node, [self.String]) def _JoinedStr(self, node): def start_quote_char(): possible_quotes = [(self.source.source.find(q, start, end), q) for q in QUOTE_CHARS] quote_pos, quote_char = min((pos, q) for pos, q in possible_quotes if pos != -1) return self.source[start:quote_pos + len(quote_char)] def end_quote_char(): possible_quotes = [(self.source.source.rfind(q, start, end), q) for q in reversed(QUOTE_CHARS)] _, quote_pos, quote_char = max((len(q), pos, q) for pos, q in possible_quotes if pos != -1) return self.source[end - len(quote_char):end] QUOTE_CHARS = ['"""', "'''", '"', "'"] offset = self.source.offset start, end = self.source.consume_string( end=self._find_next_statement_start(), ) self.source.offset = offset children = [] children.append(start_quote_char()) for part in node.values: if isinstance(part, ast.FormattedValue): children.append(part) children.append(end_quote_char()) self._handle(node, children) def _FormattedValue(self, node): children = [] children.append('{') children.append(node.value) if node.format_spec: children.append(':') for val in node.format_spec.values: if isinstance(val, ast.FormattedValue): children.append(val.value) else: children.append(val.s) children.append('}') self._handle(node, children) def _Continue(self, node): self._handle(node, ['continue']) def _Dict(self, node): children = [] children.append('{') if node.keys: for index, (key, value) in enumerate(zip(node.keys, node.values)): if key is None: # PEP-448 dict unpacking: {a: b, **unpack} children.extend(['**', value]) else: children.extend([key, ':', value]) if index < len(node.keys) - 1: children.append(',') children.append('}') self._handle(node, children) def _Ellipsis(self, node): self._handle(node, ['...']) def _Expr(self, node): self._handle(node, [node.value]) def _NamedExpr(self, node): children = [node.target, ':=', node.value] self._handle(node, children) def _Exec(self, node): children = ['exec', self.exec_open_paren_or_space, node.body] if node.globals: children.extend([self.exec_in_or_comma, node.globals]) if node.locals: children.extend([',', node.locals]) children.append(self.exec_close_paren_or_space) self._handle(node, children) def _ExtSlice(self, node): children = [] for index, dim in enumerate(node.dims): if index > 0: children.append(',') children.append(dim) self._handle(node, children) def _handle_for_loop_node(self, node, is_async): if is_async: children = ['async', 'for'] else: children = ['for'] children.extend([node.target, 'in', node.iter, ':']) children.extend(node.body) if node.orelse: children.extend(['else', ':']) children.extend(node.orelse) self._handle(node, children) def _For(self, node): self._handle_for_loop_node(node, is_async=False) def _AsyncFor(self, node): self._handle_for_loop_node(node, is_async=True) def _ImportFrom(self, node): children = ['from'] if node.level: children.append('.' * node.level) # see comment at rope.base.ast.walk children.extend([node.module or '', 'import']) children.extend(self._child_nodes(node.names, ',')) self._handle(node, children) def _alias(self, node): children = [node.name] if node.asname: children.extend(['as', node.asname]) self._handle(node, children) def _handle_function_def_node(self, node, is_async): children = [] try: decorators = getattr(node, 'decorator_list') except AttributeError: decorators = getattr(node, 'decorators', None) if decorators: for decorator in decorators: children.append('@') children.append(decorator) if is_async: children.extend(['async', 'def']) else: children.extend(['def']) children.extend([node.name, '(', node.args]) children.extend([')', ':']) children.extend(node.body) self._handle(node, children) def _FunctionDef(self, node): self._handle_function_def_node(node, is_async=False) def _AsyncFunctionDef(self, node): self._handle_function_def_node(node, is_async=True) def _arguments(self, node): children = [] args = list(node.args) defaults = [None] * (len(args) - len(node.defaults)) + \ list(node.defaults) for index, (arg, default) in enumerate(zip(args, defaults)): if index > 0: children.append(',') self._add_args_to_children(children, arg, default) if node.vararg is not None: if args: children.append(',') children.extend(['*', pycompat.get_ast_arg_arg(node.vararg)]) if node.kwarg is not None: if args or node.vararg is not None: children.append(',') children.extend(['**', pycompat.get_ast_arg_arg(node.kwarg)]) self._handle(node, children) def _add_args_to_children(self, children, arg, default): if isinstance(arg, (list, tuple)): self._add_tuple_parameter(children, arg) else: children.append(arg) if default is not None: children.append('=') children.append(default) def _add_tuple_parameter(self, children, arg): children.append('(') for index, token in enumerate(arg): if index > 0: children.append(',') if isinstance(token, (list, tuple)): self._add_tuple_parameter(children, token) else: children.append(token) children.append(')') def _GeneratorExp(self, node): children = [node.elt] children.extend(node.generators) self._handle(node, children, eat_parens=True) def _comprehension(self, node): children = ['for', node.target, 'in', node.iter] if node.ifs: for if_ in node.ifs: children.append('if') children.append(if_) self._handle(node, children) def _Global(self, node): children = self._child_nodes(node.names, ',') children.insert(0, 'global') self._handle(node, children) def _If(self, node): if self._is_elif(node): children = ['elif'] else: children = ['if'] children.extend([node.test, ':']) children.extend(node.body) if node.orelse: if len(node.orelse) == 1 and self._is_elif(node.orelse[0]): pass else: children.extend(['else', ':']) children.extend(node.orelse) self._handle(node, children) def _is_elif(self, node): if not isinstance(node, ast.If): return False offset = self.lines.get_line_start(node.lineno) + node.col_offset word = self.source[offset:offset + 4] # XXX: This is a bug; the offset does not point to the first alt_word = self.source[offset - 5:offset - 1] return 'elif' in (word, alt_word) def _IfExp(self, node): return self._handle(node, [node.body, 'if', node.test, 'else', node.orelse]) def _Import(self, node): children = ['import'] children.extend(self._child_nodes(node.names, ',')) self._handle(node, children) def _keyword(self, node): children = [] if node.arg is None: children.append(node.value) else: children.extend([node.arg, '=', node.value]) self._handle(node, children) def _Lambda(self, node): self._handle(node, ['lambda', node.args, ':', node.body]) def _List(self, node): self._handle(node, ['['] + self._child_nodes(node.elts, ',') + [']']) def _ListComp(self, node): children = ['[', node.elt] children.extend(node.generators) children.append(']') self._handle(node, children) def _Set(self, node): if node.elts: self._handle(node, ['{'] + self._child_nodes(node.elts, ',') + ['}']) return # Python doesn't have empty set literals warnings.warn('Tried to handle empty <Set> literal; please report!', RuntimeWarning) self._handle(node, ['set(', ')']) def _SetComp(self, node): children = ['{', node.elt] children.extend(node.generators) children.append('}') self._handle(node, children) def _DictComp(self, node): children = ['{'] children.extend([node.key, ':', node.value]) children.extend(node.generators) children.append('}') self._handle(node, children) def _Module(self, node): self._handle(node, list(node.body), eat_spaces=True) def _Name(self, node): self._handle(node, [node.id]) def _NameConstant(self, node): self._handle(node, [str(node.value)]) def _arg(self, node): self._handle(node, [node.arg]) def _Pass(self, node): self._handle(node, ['pass']) def _Print(self, node): children = ['print'] if node.dest: children.extend(['>>', node.dest]) if node.values: children.append(',') children.extend(self._child_nodes(node.values, ',')) if not node.nl: children.append(',') self._handle(node, children) def _Raise(self, node): def get_python3_raise_children(node): children = ['raise'] if node.exc: children.append(node.exc) if node.cause: children.append(node.cause) return children def get_python2_raise_children(node): children = ['raise'] if node.type: children.append(node.type) if node.inst: children.append(',') children.append(node.inst) if node.tback: children.append(',') children.append(node.tback) return children if pycompat.PY2: children = get_python2_raise_children(node) else: children = get_python3_raise_children(node) self._handle(node, children) def _Return(self, node): children = ['return'] if node.value: children.append(node.value) self._handle(node, children) def _Sliceobj(self, node): children = [] for index, slice in enumerate(node.nodes): if index > 0: children.append(':') if slice: children.append(slice) self._handle(node, children) def _Index(self, node): self._handle(node, [node.value]) def _Subscript(self, node): self._handle(node, [node.value, '[', node.slice, ']']) def _Slice(self, node): children = [] if node.lower: children.append(node.lower) children.append(':') if node.upper: children.append(node.upper) if node.step: children.append(':') children.append(node.step) self._handle(node, children) def _TryFinally(self, node): # @todo fixme is_there_except_handler = False not_empty_body = True if len(node.finalbody) == 1: if pycompat.PY2: is_there_except_handler = isinstance(node.body[0], ast.TryExcept) not_empty_body = not bool(len(node.body)) elif pycompat.PY3: try: is_there_except_handler = isinstance(node.handlers[0], ast.ExceptHandler) not_empty_body = True except IndexError: pass children = [] if not_empty_body or not is_there_except_handler: children.extend(['try', ':']) children.extend(node.body) if pycompat.PY3: children.extend(node.handlers) children.extend(['finally', ':']) children.extend(node.finalbody) self._handle(node, children) def _TryExcept(self, node): children = ['try', ':'] children.extend(node.body) children.extend(node.handlers) if node.orelse: children.extend(['else', ':']) children.extend(node.orelse) self._handle(node, children) def _Try(self, node): if len(node.finalbody): self._TryFinally(node) else: self._TryExcept(node) def _ExceptHandler(self, node): self._excepthandler(node) def _excepthandler(self, node): # self._handle(node, [self.semicolon_or_as_in_except]) children = ['except'] if node.type: children.append(node.type) if node.name: children.append(self.semicolon_or_as_in_except) children.append(node.name) children.append(':') children.extend(node.body) self._handle(node, children) def _Tuple(self, node): if node.elts: self._handle(node, self._child_nodes(node.elts, ','), eat_parens=True) else: self._handle(node, ['(', ')']) def _UnaryOp(self, node): children = self._get_op(node.op) children.append(node.operand) self._handle(node, children) def _Await(self, node): children = ['await'] if node.value: children.append(node.value) self._handle(node, children) def _Yield(self, node): children = ['yield'] if node.value: children.append(node.value) self._handle(node, children) def _While(self, node): children = ['while', node.test, ':'] children.extend(node.body) if node.orelse: children.extend(['else', ':']) children.extend(node.orelse) self._handle(node, children) def _With(self, node): children = [] for item in pycompat.get_ast_with_items(node): children.extend(['with', item.context_expr]) if item.optional_vars: children.extend(['as', item.optional_vars]) children.append(':') children.extend(node.body) self._handle(node, children) def _child_nodes(self, nodes, separator): children = [] for index, child in enumerate(nodes): children.append(child) if index < len(nodes) - 1: children.append(separator) return children def _Starred(self, node): self._handle(node, [node.value]) class _Source(object): def __init__(self, source): self.source = source self.offset = 0 def consume(self, token, skip_comment=True): try: while True: new_offset = self.source.index(token, self.offset) if self._good_token(token, new_offset) or not skip_comment: break else: self._skip_comment() except (ValueError, TypeError) as e: raise MismatchedTokenError( 'Token <%s> at %s cannot be matched' % (token, self._get_location())) self.offset = new_offset + len(token) return (new_offset, self.offset) def consume_joined_string(self, token): new_offset = self.source.index(token, self.offset) self.offset = new_offset + len(token) return (new_offset, self.offset) def consume_string(self, end=None): if _Source._string_pattern is None: string_pattern = codeanalyze.get_string_pattern() formatted_string_pattern = codeanalyze.get_formatted_string_pattern() original = r'(?:%s)|(?:%s)' % (string_pattern, formatted_string_pattern) pattern = r'(%s)((\s|\\\n|#[^\n]*\n)*(%s))*' % \ (original, original) _Source._string_pattern = re.compile(pattern) repattern = _Source._string_pattern return self._consume_pattern(repattern, end) def consume_number(self): if _Source._number_pattern is None: _Source._number_pattern = re.compile( self._get_number_pattern()) repattern = _Source._number_pattern return self._consume_pattern(repattern) def consume_not_equal(self): if _Source._not_equals_pattern is None: _Source._not_equals_pattern = re.compile(r'<>|!=') repattern = _Source._not_equals_pattern return self._consume_pattern(repattern) def consume_except_as_or_semicolon(self): repattern = re.compile(r'as|,') return self._consume_pattern(repattern) def consume_exec_open_paren_or_space(self): repattern = re.compile(r'\(|') return self._consume_pattern(repattern) def consume_exec_in_or_comma(self): repattern = re.compile(r'in|,') return self._consume_pattern(repattern) def consume_exec_close_paren_or_space(self): repattern = re.compile(r'\)|') return self._consume_pattern(repattern) def _good_token(self, token, offset, start=None): """Checks whether consumed token is in comments""" if start is None: start = self.offset try: comment_index = self.source.rindex('#', start, offset) except ValueError: return True try: new_line_index = self.source.rindex('\n', start, offset) except ValueError: return False return comment_index < new_line_index def _skip_comment(self): self.offset = self.source.index('\n', self.offset + 1) def _get_location(self): lines = self.source[:self.offset].split('\n') return (len(lines), len(lines[-1])) def _consume_pattern(self, repattern, end=None): while True: if end is None: end = len(self.source) match = repattern.search(self.source, self.offset, end) if self._good_token(match.group(), match.start()): break else: self._skip_comment() self.offset = match.end() return match.start(), match.end() def till_token(self, token): new_offset = self.source.index(token, self.offset) return self[self.offset:new_offset] def rfind_token(self, token, start, end): index = start while True: try: index = self.source.rindex(token, start, end) if self._good_token(token, index, start=start): return index else: end = index except ValueError: return None def from_offset(self, offset): return self[offset:self.offset] def find_backwards(self, pattern, offset): return self.source.rindex(pattern, 0, offset) def __getitem__(self, index): return self.source[index] def __getslice__(self, i, j): return self.source[i:j] def _get_number_pattern(self): # HACK: It is merely an approaximation and does the job integer = r'\-?(0x[\da-fA-F]+|\d+)[lL]?' return r'(%s(\.\d*)?|(\.\d+))([eE][-+]?\d+)?[jJ]?' % integer _string_pattern = None _number_pattern = None _not_equals_pattern = None
ruchee/vimrc
vimfiles/bundle/vim-python/submodules/rope/rope/refactor/patchedast.py
Python
mit
34,226
from socket import * import picamera serverSocket = socket(AF_INET, SOCK_STREAM) HOST = '' PORT = 8080 serverSocket.bind((HOST,PORT)) serverSocket.listen(1) camera = picamera.PiCamera() camera.vflip = True camera.hflip = True while True: print 'Ready to serve...' connectionSocket, addr = serverSocket.accept() print 'Got connection from', addr try: camera.capture('pic.jpg') image = open('pic.jpg', 'rb') image_data = image.read() image.close() connectionSocket.send('HTTP/1.1 200 OK\r\n') connectionSocket.send('Content-Type: image/jpg\r\n\r\n') connectionSocket.send(image_data) connectionSocket.close() except IOError: f = open('404.html') fof = f.read() connectionSocket.send('HTTP/1.1 404 Not Found\r\n\r\n') connectionSocket.send(fof) connectionSocket.close() break except KeyboardInterrupt: connectionSocket.close() print "Program stopped" break serverSocket.close() sys.exit()
larwef/Stuff
Raspberry Pi/webcam/webcam.py
Python
mit
941
#!/usr/bin/env python from setuptools import setup setup( name="beerpy", version="0.1.0", packages=["beerpy"], scripts=[], url="", license="MIT", author="Stefan Lehmann", author_email="Stefan.St.Lehmann@gmail.com", description="", install_requires=["pandas", "scipy"], maintainer="Stefan Lehmann", )
MrLeeh/beerpy
setup.py
Python
mit
345
# # This is an example plugin that isn't bundled inside Plumeria. # Add "example_plugin" to the list of plugins in your config file to # load this plugin. Plugins can be regular Python packages and do not # need to be inside this plugins directory. # from plumeria.command import commands from plumeria.util.ratelimit import rate_limit @commands.create('example', category='Fun') @rate_limit() async def example(message): """ This is an example plugin. """ return "Hello world" def setup(): # This method is called to start your plugin if it's enabled commands.add(example)
sk89q/Plumeria
plugins/example_plugin.py
Python
mit
604
# The MIT License (MIT) # # Copyright (c) 2016-2018 Albert Kottke # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import collections import os import re import numpy as np import pandas as pd import scipy.constants as C from . import motion from . import propagation from . import site def to_str(s): """Parse a string and strip the extra characters.""" return str(s).strip() def to_float(s): """Try to parse a float.""" try: return float(s) except ValueError: return np.nan def parse_fixed_width(types, lines): """Parse a fixed width line.""" values = [] line = [] for width, parser in types: if not line: line = lines.pop(0).replace("\n", "") values.append(parser(line[:width])) line = line[width:] return values def split_line(line, parsers, sep=" "): """Split a line into pieces and parse the strings.""" parts = [part for part in line.split(sep) if part] values = [parser(part) for parser, part in zip(parsers, parts)] return values if len(values) > 1 else values[0] def _parse_curves(block, **kwargs): """Parse nonlinear curves block.""" count = int(block.pop(0)) curves = [] for i in range(count): for param in ["mod_reduc", "damping"]: length, name = parse_fixed_width([(5, int), (65, to_str)], block) curves.append( site.NonlinearProperty( name, parse_fixed_width(length * [(10, float)], block), parse_fixed_width(length * [(10, float)], block), param, ) ) length = int(block[0][:5]) soil_types = parse_fixed_width((length + 1) * [(5, int)], block)[1:] # Group soil type number and curves together return {(soil_types[i // 2], c.param): c for i, c in enumerate(curves)} def _parse_soil_profile(block, units, curves, **kwargs): """Parse soil profile block.""" wt_layer, length, _, name = parse_fixed_width( 3 * [(5, int)] + [(55, to_str)], block ) layers = [] soil_types = [] for i in range(length): ( index, soil_idx, thickness, shear_mod, damping, unit_wt, shear_vel, ) = parse_fixed_width( [(5, int), (5, int), (15, to_float)] + 4 * [(10, to_float)], block ) st = site.SoilType( soil_idx, unit_wt, curves[(soil_idx, "mod_reduc")], curves[(soil_idx, "damping")], ) try: # Try to find previously added soil type st = soil_types[soil_types.index(st)] except ValueError: soil_types.append(st) layers.append(site.Layer(st, thickness, shear_vel)) if units == "english": # Convert from English to metric for st in soil_types: st.unit_wt *= 0.00015708746 for l in layers: l.thickness *= 0.3048 l.shear_vel *= 0.3048 p = site.Profile(layers) p.update_layers() p.wt_depth = p[wt_layer - 1].depth return p def _parse_motion(block, **kwargs): """Parse motin specification block.""" _, fa_length, time_step, name, fmt = parse_fixed_width( [(5, int), (5, int), (10, float), (30, to_str), (30, to_str)], block ) scale, pga, _, header_lines, _ = parse_fixed_width( 3 * [(10, to_float)] + 2 * [(5, int)], block ) m = re.search(r"(\d+)\w(\d+)\.\d+", fmt) count_per_line = int(m.group(1)) width = int(m.group(2)) fname = os.path.join(os.path.dirname(kwargs["fname"]), name) accels = np.genfromtxt( fname, delimiter=(count_per_line * [width]), skip_header=header_lines, ) if np.isfinite(scale): pass elif np.isfinite(pga): scale = pga / np.abs(accels).max() else: scale = 1.0 accels *= scale m = motion.TimeSeriesMotion(fname, "", time_step, accels, fa_length) return m def _parse_input_loc(block, profile, **kwargs): """Parse input location block.""" layer, wave_field = parse_fixed_width(2 * [(5, int)], block) return profile.location( motion.WaveField[wave_field], index=(layer - 1), ) def _parse_run_control(block): """Parse run control block.""" _, max_iterations, strain_ratio, _, _ = parse_fixed_width( 2 * [(5, int)] + [(10, float)] + 2 * [(5, int)], block ) return propagation.EquivalentLinearCalculation( strain_ratio, max_iterations, tolerance=10.0 ) def _parse_output_accel(block): raise NotImplementedError def _parse_output_stress(block): raise NotImplementedError def _parse_output_spectra(block): raise NotImplementedError def load_shake_inp(fname): with open(fname) as fp: lines = fp.readlines() lines.pop(0) units = lines.pop(0) # Parse the option blocks option, block = None, [] options = [] for l in lines: m = re.match(r"^\s+(\d+)$", l) if m: if option and not block: block.append(l) else: if option and block: # Save the previous block options.append((option, block)) block = [] option = int(m.group(1)) else: block.append(l) parsers = { 1: ("curves", _parse_curves), 2: ("profile", _parse_soil_profile), 3: ("motion", _parse_motion), 4: ("input_loc", _parse_input_loc), 5: ("run_control", _parse_run_control), 6: ("output", _parse_output_accel), 7: ("output", _parse_output_stress), 9: ("output", _parse_output_spectra), } input = collections.OrderedDict( { "fname": fname, "units": units, } ) for option, block in options: key, parser = parsers[option] input[key] = parser(block, **input) return input def read_nrattle_ctl(fpath): """Read an nrattle control file.""" lines = list(fpath.open()) lines = [line for line in lines if line[0] != "!"] d = {k: lines.pop(0) for k in ["revision", "prefix"]} d["freq_count"], d["freq_max"] = split_line(lines.pop(0), [int, float]) d["out_depth"] = split_line( lines.pop(0), [ float, ], ) profile = [] while line := lines.pop(0): try: profile.append(split_line(line, [int, float, float, float, float])) except ValueError: break d["profile"] = np.rec.fromrecords( profile, names="layer,thickness,vel_shear,density,inv_qual" ) d["hs_vel_shear"], d["hs_density"] = split_line(line, [float, float]) d["hs_layer"], d["inci_angle"] = split_line(lines.pop(0), [int, float]) return d def profile_from_nrattle_ctl(ctl): df = pd.DataFrame(ctl["profile"]).set_index("layer") # Here the index is based on layer number which starts at 1 in Fortran # convention. df.loc[len(df) + 1] = [0, ctl["hs_vel_shear"], ctl["hs_density"], 0] # Scale from km to m df["vel_shear"] *= 1000 df["thickness"] *= 1000 # Convert Q to damping: # damping (dec) = 0.5 * 1 / Q = 1 / (2 * Q) df["damping"] = df["inv_qual"].apply( lambda iq: 0 if np.isclose(iq, 0) else 0.5 * 1 / (iq if iq > 1 else 1 / iq) ) df["unit_wt"] = C.g * df["density"] return site.Profile.from_dataframe(df, 0)
arkottke/pysra
pysra/tools.py
Python
mit
8,584
__author__ = 'Michael Fisher'
mikefishr/euchre-engine
Euchre-PY/euchre.py
Python
mit
30
# Import Flask from the flask library. from flask import Flask # Create a new Flask instance as a variable named app. # The name you pass to the Flask app should be __name__. app = Flask(__main__) # Add a view function named index. Give this view a route of "/". # Make the view return your name. You do not need to use app.run(). @app.route('/') def index(): return "Hello World!" # Import request from Flask. # Then update the index view to return "Hello {name}", replacing {name} with # a name argument in the query string. from flask import request @app.route('/') def index(name='Treehouse'): name = request.args.get('name', name) return 'Hello {}'.format(name) # Add a new route to hello() that expects a name argument. # The view will need to accept a name argument, too. # Update the response from hello() to say "Hello {name}", replacing {name} with the passed-in name. # Now give hello() a default name argument of "Treehouse". @app.route('/') @app.route('/<name>') def hello(name='Treehouse'): return 'Hello {}'.format(name) # Add an import for render_template. It comes directly from the flask library. from flask import render_template # Use render_template() to render the "hello.html" template in hello(). # Pass the name argument to the template. Print the name variable in the <h1> in the template. @app.route('/hello/<name>') def hello(name="Treehouse"): return render_template('hello.html', name=name)
CaseyNord/Treehouse
Flask Basics/flask_app.py
Python
mit
1,505
#!/usr/bin/env python # Copyright 2017 Brook Boese, Finn Ellis, Jacob Martin, Matthew Popescu, Rubin Stricklin, and Sage Callon # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and # to permit persons to whom the Software is furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all copies or substantial portions of # the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED # TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF # CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv)
NovelTorpedo/noveltorpedo
website/manage.py
Python
mit
1,958
# Plugin for gallery_get. import re from gallery_utils import * # Each definition can be one of the following: # - a string # - a regex string # - a function that takes source as a parameter and returns an array or a string. # If you comment out a parameter, it will use the default defined in __init__.py # identifier (default = name of this plugin after "plugin_") : If there's a match, we'll attempt to download images using this plugin. # title: parses the gallery page for a title. This will be the folder name of the output gallery. title = r'<title>(.+?)</title>' # redirect: if the links in the gallery page go to an html instead of an image, use this to parse the gallery page. def redirect(source): redirects = [] urls = re.findall(r'href=\'(.+?)\'\>', source) if not urls: return redirects cur_url = urls[0].split("?")[0] index = 0 while True: indexed_page = cur_url + "?page=%d" % index print("Crawling " + indexed_page) indexed_source = urlopen_text(indexed_page) links = re.findall('href=[\"\'](/photo/.+)[\"\']',indexed_source) if links: redirects += map(lambda x: 'http://www.imagefap.com' + x, links) index += 1 else: break return redirects # direct_links: if redirect is non-empty, this parses each redirect page for a single image. Otherwise, this parses the gallery page for all images. direct_links = r'name=\"mainPhoto\".*?(https?://.*?\.imagefap.*?\.com/.*?\.(jpe?g?|png|jfif|gif).*?)\"' # same_filename (default=False): if True, uses filename specified on remote link. Otherwise, creates own filename with incremental index. same_filename = True
regosen/gallery_get
gallery_plugins/plugin_imagefap.py
Python
mit
1,699
import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), '../tools')) import files import probs def main(argv): k, N = files.read_line_of_ints(argv[0]) print '%0.3f' % probs.mendel2(k, N, 0.25) if __name__ == "__main__": main(sys.argv[1:])
cowboysmall/rosalind
src/stronghold/rosalind_lia.py
Python
mit
279
from behave import * from behave_webdriver.transformers import matcher_mapping try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse if 'transform-parse' not in matcher_mapping: use_step_matcher('re') else: use_step_matcher('transform-re') @given('the element "([^"]*)?" is( not)* visible') @then('I expect that element "([^"]*)?" becomes( not)* visible') @then('I expect that element "([^"]*)?" is( not)* visible') def check_element_visibility(context, element, negative): element_is_visible = context.behave_driver.element_visible(element) if negative: assert not element_is_visible, 'Expected element to not be visible, but it was' else: assert element_is_visible, 'Expected element to be visible, but it was not visible' @given('the title is( not)* "([^"]*)?"') @then('I expect that the title is( not)* "([^"]*)?"') def title(context, negative, value): if negative: assert context.behave_driver.title != value, 'Title was "{}"'.format(context.behave_driver.title) else: assert context.behave_driver.title == value, 'Title was "{}"'.format(context.behave_driver.title) @then('I expect that element "([^"]*)?" is( not)* within the viewport') def check_element_within_viewport(context, element, negative): element_in_viewport = context.behave_driver.element_in_viewport(element) if negative: assert not element_in_viewport, 'Element was completely within the viewport' else: assert element_in_viewport, 'Element was not completely within viewport' @given('the element "([^"]*)?" is( not)* enabled') @then('I expect that element "([^"]*)?" is( not)* enabled') def element_enabled(context, element, negative): enabled = context.behave_driver.element_enabled(element) if negative: assert not enabled else: assert enabled @given('the element "([^"]*)?" is( not)* selected') @then('I expect that element "([^"]*)?" is( not)* selected') def element_selected(context, element, negative): selected = context.behave_driver.element_selected(element) if negative: assert not selected else: assert selected @given('the checkbox "([^"]*)?" is( not)* checked') @then('I expect that checkbox "([^"]*)?" is( not)* checked') def element_checked(context, element, negative): checked = context.behave_driver.element_selected(element) if negative: assert not checked else: assert checked @given('there is (an|no) element "([^"]*)?" on the page') def element_exists(context, an_no, element): negative = an_no == 'no' exists = context.behave_driver.element_exists(element) if negative: assert not exists else: assert exists @then('I expect that element "([^"]*)?" does( not)* exist') def check_element_exists(context, element, negative): exists = context.behave_driver.element_exists(element) if negative: assert not exists, 'Expected the element does not exist, but element "{}" was located'.format(element) else: assert exists, 'Expected element to exist, but no element "{}" was located'.format(element) @given('the element "([^"]*)?" contains( not)* the same text as element "([^"]*)?"') @then('I expect that element "([^"]*)?"( not)* contains the same text as element "([^"]*)?"') def elements_same_text(context, first_element, negative, second_element): first_elem_text = context.behave_driver.get_element_text(first_element) second_elem_text = context.behave_driver.get_element_text(second_element) same = first_elem_text == second_elem_text if negative: assert not same, 'Element "{}" text "{}" is same as element "{}"'.format(first_element, first_elem_text, second_element) else: assert same, 'Element "{}" text "{}" is not same as element "{}" text "{}"'.format(first_element, first_elem_text, second_element, second_elem_text) @given('the element "([^"]*)?"( not)* matches the text "([^"]*)?"') @then('I expect that element "([^"]*)?"( not)* matches the text "([^"]*)?"') def element_matches_text(context, element, negative, text): elem_text = context.behave_driver.get_element_text(element) matches = elem_text == text if negative: assert not matches, 'Element "{}" text matches "{}"'.format(element, text) else: assert matches, 'The text "{}" did not match the element text "{}"'.format(text, elem_text) @given('the element "([^"]*)?"( not)* contains the text "([^"]*)?"') @then('I expect that element "([^"]*)?"( not)* contains the text "([^"]*)?"') def check_element_contains_text(context, element, negative, text): contains = context.behave_driver.element_contains(element, text) if negative: assert not contains, 'Element text does contain "{}"'.format(text) else: assert contains, 'Element text does not contain "{}"'.format(text) @given('the element "([^"]*)?"( not)* contains any text') @then('I expect that element "([^"]*)?"( not)* contains any text') def element_any_text(context, element, negative): any_text = bool(context.behave_driver.get_element_text(element)) if negative: assert not any_text else: assert any_text @given('the element "([^"]*)?" is( not)* empty') @then('I expect that element "([^"]*)?" is( not)* empty') def check_element_empty(context, element, negative): elem_text = context.behave_driver.get_element_text(element) any_text = bool(elem_text) if negative: assert any_text is True else: assert any_text is False @given('the page url is( not)* "([^"]*)?"') @then('I expect that the url is( not)* "([^"]*)?"') def check_url(context, negative, value): current_url = context.behave_driver.current_url if negative: assert current_url != value, 'The url was "{}"'.format(current_url) else: assert current_url == value, 'Expected url to be "{}", but saw the url was "{}"'.format(value, current_url) @then('I expect the url to( not)* contain "([^"]*)?"') def check_url_contains(context, negative, value): current_url = context.behave_driver.current_url if negative: assert value not in current_url, 'url was "{}"'.format(current_url) else: assert value in current_url, 'url was "{}"'.format(current_url) @given('the( css)* attribute "([^"]*)?" from element "([^"]*)?" is( not)* "([^"]*)?"') @then('I expect that the( css)* attribute "([^"]*)?" from element "([^"]*)?" is( not)* "([^"]*)?"') def check_element_attribute(context, is_css, attr, element, negative, value): if is_css: attribute_value, value = context.behave_driver.get_element_attribute(element, attr, is_css, value) else: attribute_value = context.behave_driver.get_element_attribute(element, attr) if negative: assert attribute_value != value, 'Attribute value was "{}"'.format(attribute_value) else: assert attribute_value == value, 'Attribute value was "{}"'.format(attribute_value) @given('the cookie "([^"]*)?" contains( not)* the value "([^"]*)?"') @then('I expect that cookie "([^"]*)?"( not)* contains "([^"]*)?"') def check_cookie_value(context, cookie_key, negative, value): cookie = context.behave_driver.get_cookie(cookie_key) cookie_value = cookie.get('value') if negative: assert cookie_value != value, 'Cookie value was "{}"'.format(cookie_value) else: assert cookie_value == value, 'Cookie value was "{}"'.format(cookie_value) @given('the cookie "([^"]*)?" does( not)* exist') def cookie_exists(context, cookie_key, negative): cookie = context.behave_driver.get_cookie(cookie_key) if negative: assert cookie is None, 'Cookie exists: {}'.format(cookie) else: assert cookie is not None @then('I expect that cookie "([^"]*)?"( not)* exists') def check_cookie_exists(context, cookie_key, negative): cookie = context.behave_driver.get_cookie(cookie_key) if negative: assert cookie is None, u'Cookie was present: "{}"'.format(cookie) else: assert cookie is not None, 'Cookie was not found' @given('the element "([^"]*)?" is( not)* ([\d]+)px (broad|tall)') @then('I expect that element "([^"]*)?" is( not)* ([\d]+)px (broad|tall)') def check_element_size(context, element, negative, pixels, how): elem_size = context.behave_driver.get_element_size(element) if how == 'tall': axis = 'height' else: axis = 'width' if negative: assert elem_size[axis] != int(pixels), 'Element size was "{}"'.format(elem_size) else: assert elem_size[axis] == int(pixels), 'Element size was "{}"'.format(elem_size) @given('the element "([^"]*)?" is( not)* positioned at ([\d]+)px on the (x|y) axis') @then('I expect that element "([^"]*)?" is( not)* positioned at ([\d]+)px on the (x|y) axis') def check_element_position(context, element, negative, pos, axis): element_position = context.behave_driver.get_element_location(element) if negative: assert element_position[axis] != int(pos), 'Position was {} on the {} axis'.format(element_position[axis], axis) else: assert element_position[axis] == int(pos), 'Position was {} on the {} axis'.format(element_position[axis], axis) @given('a (alertbox|confirmbox|prompt) is( not)* opened') @then('I expect that a (alertbox|confirmbox|prompt) is( not)* opened') def check_modal(context, modal, negative): if negative: assert context.behave_driver.has_alert is False else: assert context.behave_driver.has_alert is True @then('I expect that the path is( not)* "([^"]*)?"') def check_path(context, negative, value): current_url = context.behave_driver.current_url path = urlparse(current_url).path if negative: assert path != value, 'The path was "{}"'.format(path) else: assert path == value, 'Expected the path to be "{}", but saw the path "{}"'.format(value, path) @then('I expect that element "([^"]*)?" (has|does not have) the class "([^"]*)?"') def check_element_has_class(context, element, has, classname): if 'not' in has: negative = True else: negative = False has_class = context.behave_driver.element_has_class(element, classname) if negative: assert not has_class, 'Classes were {}'.format(context.behave_driver.get_element_attribute(element, 'class')) else: assert has_class, 'Classes were {}'.format(context.behave_driver.get_element_attribute(element, 'class')) @then('I expect a new (window|tab) has( not)* been opened') def check_window_opened(context, _, negative): if negative: assert not context.behave_driver.secondary_handles else: assert bool(context.behave_driver.secondary_handles) @then('I expect the url "([^"]*)?" is opened in a new (tab|window)') def check_url_new_window(context, url, _): current_handle = context.behave_driver.primary_handle for handle in context.behave_driver.secondary_handles: context.behave_driver.switch_to_window(handle) if context.behave_driver.current_url == url: context.behave_driver.switch_to_window(current_handle) break else: context.behave_driver.switch_to_window(current_handle) if len(context.behave_driver.secondary_handles) < 1: raise AssertionError('No secondary handles found!') raise AssertionError("The url '{}' was not found in any handle") @then('I expect that element "([^"]*)?" is( not)* focused') def check_element_focused(context, element, negative): element_focused = context.behave_driver.element_focused(element) if negative: assert not element_focused else: assert element_focused @then('I expect that a (alertbox|confirmbox|prompt)( not)* contains the text "([^"]*)?"') def check_modal_text_contains(context, modal_type, negative, text): alert_text = context.behave_driver.alert.text if negative: assert not text in alert_text else: assert text in alert_text @then('I wait on element "([^"]*)?"(?: for (\d+)ms)*(?: to( not)* (be checked|be enabled|be selected|be visible|contain a text|contain a value|exist))*') def wait_for_element_condition(context, element, milliseconds, negative, condition): if milliseconds: digits = ''.join(char for char in milliseconds if char.isdigit()) milliseconds = int(digits) result = context.behave_driver.wait_for_element_condition(element, milliseconds, negative, condition) if not negative: negative = '' assert result, 'was expecting element "{element}" to {negative} {condition}, but the result was {result}'.format( element=element, negative=negative, condition=condition, result=result) @then("I expect the screen is ([\d]+) by ([\d]+) pixels") def check_screen_size(context, x, y): screen_x, screen_y = context.behave_driver.screen_size use_step_matcher('parse')
spyoungtech/behave-webdriver
behave_webdriver/steps/expectations.py
Python
mit
13,568
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class LocalNetworkGatewaysOperations: """LocalNetworkGatewaysOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2018_06_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def _create_or_update_initial( self, resource_group_name: str, local_network_gateway_name: str, parameters: "_models.LocalNetworkGateway", **kwargs: Any ) -> "_models.LocalNetworkGateway": cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'LocalNetworkGateway') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('LocalNetworkGateway', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('LocalNetworkGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore async def begin_create_or_update( self, resource_group_name: str, local_network_gateway_name: str, parameters: "_models.LocalNetworkGateway", **kwargs: Any ) -> AsyncLROPoller["_models.LocalNetworkGateway"]: """Creates or updates a local network gateway in the specified resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param local_network_gateway_name: The name of the local network gateway. :type local_network_gateway_name: str :param parameters: Parameters supplied to the create or update local network gateway operation. :type parameters: ~azure.mgmt.network.v2018_06_01.models.LocalNetworkGateway :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either LocalNetworkGateway or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.LocalNetworkGateway] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, local_network_gateway_name=local_network_gateway_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('LocalNetworkGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore async def get( self, resource_group_name: str, local_network_gateway_name: str, **kwargs: Any ) -> "_models.LocalNetworkGateway": """Gets the specified local network gateway in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param local_network_gateway_name: The name of the local network gateway. :type local_network_gateway_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: LocalNetworkGateway, or the result of cls(response) :rtype: ~azure.mgmt.network.v2018_06_01.models.LocalNetworkGateway :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('LocalNetworkGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore async def _delete_initial( self, resource_group_name: str, local_network_gateway_name: str, **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore async def begin_delete( self, resource_group_name: str, local_network_gateway_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the specified local network gateway. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param local_network_gateway_name: The name of the local network gateway. :type local_network_gateway_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_initial( resource_group_name=resource_group_name, local_network_gateway_name=local_network_gateway_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore async def _update_tags_initial( self, resource_group_name: str, local_network_gateway_name: str, parameters: "_models.TagsObject", **kwargs: Any ) -> "_models.LocalNetworkGateway": cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._update_tags_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'TagsObject') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('LocalNetworkGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore async def begin_update_tags( self, resource_group_name: str, local_network_gateway_name: str, parameters: "_models.TagsObject", **kwargs: Any ) -> AsyncLROPoller["_models.LocalNetworkGateway"]: """Updates a local network gateway tags. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param local_network_gateway_name: The name of the local network gateway. :type local_network_gateway_name: str :param parameters: Parameters supplied to update local network gateway tags. :type parameters: ~azure.mgmt.network.v2018_06_01.models.TagsObject :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either LocalNetworkGateway or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.LocalNetworkGateway] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._update_tags_initial( resource_group_name=resource_group_name, local_network_gateway_name=local_network_gateway_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('LocalNetworkGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'localNetworkGatewayName': self._serialize.url("local_network_gateway_name", local_network_gateway_name, 'str', min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}'} # type: ignore def list( self, resource_group_name: str, **kwargs: Any ) -> AsyncIterable["_models.LocalNetworkGatewayListResult"]: """Gets all the local network gateways in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either LocalNetworkGatewayListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.LocalNetworkGatewayListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGatewayListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('LocalNetworkGatewayListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways'} # type: ignore
Azure/azure-sdk-for-python
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_local_network_gateways_operations.py
Python
mit
27,427
revision = '16de00f8ecc' down_revision = '1872f4529b3' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): op.add_column('proposal', sa.Column('policy_domain_id', postgresql.UUID(), nullable=True)) op.create_foreign_key( 'policy_domain_id_fkey', 'proposal', 'policy_domain', ['policy_domain_id'], ['id'], ) def downgrade(): op.drop_column('proposal', 'policy_domain_id')
mgax/mptracker
alembic/versions/16de00f8ecc_proposal_policy_doma.py
Python
mit
476
import math from Acquisition import aq_inner from five import grok from zope.component import getMultiAdapter from plone.dexterity.content import Container from plone.directives import form from plone.namedfile.interfaces import IImageScaleTraversable class IGalleryFolder(form.Schema, IImageScaleTraversable): """ A dedicated image gallery folder """ class GalleryFolder(Container): grok.implements(IGalleryFolder) class View(grok.View): """ Thumbnail view """ grok.context(IGalleryFolder) grok.require('zope2.View') grok.name('view') def update(self): self.has_galleries = len(self.contained_galleries()) > 0 self.has_images = len(self.contained_images()) > 0 def gallery_matrix(self): items = self.contained_galleries() return self.build_matrix(items) def image_matrix(self): items = self.image_list() return self.build_matrix(items) def contained_galleries(self): context = aq_inner(self.context) items = context.restrictedTraverse('@@folderListing')( portal_type='newport.sitecontent.galleryfolder') return items def contained_images(self): context = aq_inner(self.context) items = context.restrictedTraverse('@@folderListing')( portal_type='Image') return items def build_matrix(self, data): items = data count = len(items) rowcount = count / 4.0 rows = math.ceil(rowcount) matrix = [] for i in range(int(rows)): row = [] for j in range(4): index = 4 * i + j if index <= int(count - 1): cell = {} cell['item'] = items[index] row.append(cell) matrix.append(row) return matrix def image_list(self): images = self.contained_images() data = [] for item in images: info = {} info['title'] = item.Title info['desc'] = item.Description thumb = self.getImageTag(item, scalename='thumb') info['thumb_url'] = thumb['url'] info['thumb_width'] = thumb['width'] info['thumb_height'] = thumb['height'] original = self.getImageTag(item, scalename='original') info['original_url'] = original['url'] info['original_width'] = original['width'] info['original_height'] = original['height'] data.append(info) return data def getImageTag(self, item, scalename): obj = item.getObject() scales = getMultiAdapter((obj, self.request), name='images') if scalename == 'thumb': scale = scales.scale('image', width=200, height=200) else: scale = scales.scale('image', width=768, height=768) item = {} if scale is not None: item['url'] = scale.url item['width'] = scale.width item['height'] = scale.height return item
vwc/buildout.mycarman
src/newport.sitecontent/newport/sitecontent/galleryfolder.py
Python
mit
3,071
#!/usr/bin/env python3 # Copyright (c) 2020 The Fujicoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Utilities for working directly with the wallet's BDB database file This is specific to the configuration of BDB used in this project: - pagesize: 4096 bytes - Outer database contains single subdatabase named 'main' - btree - btree leaf pages Each key-value pair is two entries in a btree leaf. The first is the key, the one that follows is the value. And so on. Note that the entry data is itself not in the correct order. Instead entry offsets are stored in the correct order and those offsets are needed to then retrieve the data itself. Page format can be found in BDB source code dbinc/db_page.h This only implements the deserialization of btree metadata pages and normal btree pages. Overflow pages are not implemented but may be needed in the future if dealing with wallets with large transactions. `db_dump -da wallet.dat` is useful to see the data in a wallet.dat BDB file """ import binascii import struct # Important constants PAGESIZE = 4096 OUTER_META_PAGE = 0 INNER_META_PAGE = 2 # Page type values BTREE_INTERNAL = 3 BTREE_LEAF = 5 BTREE_META = 9 # Some magic numbers for sanity checking BTREE_MAGIC = 0x053162 DB_VERSION = 9 # Deserializes a leaf page into a dict. # Btree internal pages have the same header, for those, return None. # For the btree leaf pages, deserialize them and put all the data into a dict def dump_leaf_page(data): page_info = {} page_header = data[0:26] _, pgno, prev_pgno, next_pgno, entries, hf_offset, level, pg_type = struct.unpack('QIIIHHBB', page_header) page_info['pgno'] = pgno page_info['prev_pgno'] = prev_pgno page_info['next_pgno'] = next_pgno page_info['hf_offset'] = hf_offset page_info['level'] = level page_info['pg_type'] = pg_type page_info['entry_offsets'] = struct.unpack('{}H'.format(entries), data[26:26 + entries * 2]) page_info['entries'] = [] if pg_type == BTREE_INTERNAL: # Skip internal pages. These are the internal nodes of the btree and don't contain anything relevant to us return None assert pg_type == BTREE_LEAF, 'A non-btree leaf page has been encountered while dumping leaves' for i in range(0, entries): offset = page_info['entry_offsets'][i] entry = {'offset': offset} page_data_header = data[offset:offset + 3] e_len, pg_type = struct.unpack('HB', page_data_header) entry['len'] = e_len entry['pg_type'] = pg_type entry['data'] = data[offset + 3:offset + 3 + e_len] page_info['entries'].append(entry) return page_info # Deserializes a btree metadata page into a dict. # Does a simple sanity check on the magic value, type, and version def dump_meta_page(page): # metadata page # general metadata metadata = {} meta_page = page[0:72] _, pgno, magic, version, pagesize, encrypt_alg, pg_type, metaflags, _, free, last_pgno, nparts, key_count, record_count, flags, uid = struct.unpack('QIIIIBBBBIIIIII20s', meta_page) metadata['pgno'] = pgno metadata['magic'] = magic metadata['version'] = version metadata['pagesize'] = pagesize metadata['encrypt_alg'] = encrypt_alg metadata['pg_type'] = pg_type metadata['metaflags'] = metaflags metadata['free'] = free metadata['last_pgno'] = last_pgno metadata['nparts'] = nparts metadata['key_count'] = key_count metadata['record_count'] = record_count metadata['flags'] = flags metadata['uid'] = binascii.hexlify(uid) assert magic == BTREE_MAGIC, 'bdb magic does not match bdb btree magic' assert pg_type == BTREE_META, 'Metadata page is not a btree metadata page' assert version == DB_VERSION, 'Database too new' # btree metadata btree_meta_page = page[72:512] _, minkey, re_len, re_pad, root, _, crypto_magic, _, iv, chksum = struct.unpack('IIIII368sI12s16s20s', btree_meta_page) metadata['minkey'] = minkey metadata['re_len'] = re_len metadata['re_pad'] = re_pad metadata['root'] = root metadata['crypto_magic'] = crypto_magic metadata['iv'] = binascii.hexlify(iv) metadata['chksum'] = binascii.hexlify(chksum) return metadata # Given the dict from dump_leaf_page, get the key-value pairs and put them into a dict def extract_kv_pairs(page_data): out = {} last_key = None for i, entry in enumerate(page_data['entries']): # By virtue of these all being pairs, even number entries are keys, and odd are values if i % 2 == 0: out[entry['data']] = b'' last_key = entry['data'] else: out[last_key] = entry['data'] return out # Extract the key-value pairs of the BDB file given in filename def dump_bdb_kv(filename): # Read in the BDB file and start deserializing it pages = [] with open(filename, 'rb') as f: data = f.read(PAGESIZE) while len(data) > 0: pages.append(data) data = f.read(PAGESIZE) # Sanity check the meta pages dump_meta_page(pages[OUTER_META_PAGE]) dump_meta_page(pages[INNER_META_PAGE]) # Fetch the kv pairs from the leaf pages kv = {} for i in range(3, len(pages)): info = dump_leaf_page(pages[i]) if info is not None: info_kv = extract_kv_pairs(info) kv = {**kv, **info_kv} return kv
fujicoin/fujicoin
test/functional/test_framework/bdb.py
Python
mit
5,534
# coding=utf-8 import base64 import tornado.ioloop import tornado.web from tornado.web import _create_signature_v1, _time_independent_equals import tornado.gen import tornado.httpclient import tornado.escape from tornado.escape import utf8 from tornado.concurrent import Future from qr import get_qrcode import uuid import os def create_url_signed_value(secret, value): signature = _create_signature_v1(secret, value) token = "-".join([value, signature]) return token class LoginBuff(object): def __init__(self): self.waiters = {} def wait_for_login(self, user_id): future = Future() self.waiters[user_id] = future return future def new_login_msg(self, user_id): if user_id in self.waiters: self.waiters[user_id].set_result(True) self.waiters.pop(user_id) global_login_buff = LoginBuff() class BaseHandler(tornado.web.RequestHandler): def get_current_user(self): user_id = self.get_secure_cookie('user_id') if not user_id: return None else: return user_id def decode_url_signed_value(self, token): token = utf8(token) parts = utf8(token).split("-") if len(parts) != 2: return False signature = _create_signature_v1(self.application.settings["cookie_secret"], parts[0]) if not _time_independent_equals(parts[1], signature): return False try: return parts[0] except Exception: return False class CellPhoneLoginHandler(BaseHandler): def get(self, token): user_id = self.decode_url_signed_value(token) if user_id and user_id in global_login_buff.waiters: self.render('cellphone.html') else: self.write('二维码识别错误,请重新扫码') def post(self, token): user_id = self.decode_url_signed_value(token) if user_id and user_id in global_login_buff.waiters: global_login_buff.new_login_msg(user_id) self.write('PC端登录成功!') else: self.write('二维码识别错误,请重新扫码') class HelloHandler(BaseHandler): @tornado.web.authenticated def get(self): self.render('hello.html') class LogoutHandler(BaseHandler): def get(self): self.clear_cookie("user_id") self.redirect("/pc") class PCLoginRedirectHandler(BaseHandler): def get(self): user_id = uuid.uuid4().get_hex() token = create_url_signed_value(self.application.settings["cookie_secret"], user_id) url = '/pc/{0}'.format(token) self.redirect(url) class PCLoginHandler(BaseHandler): def get(self, token): user_id = self.decode_url_signed_value(token) if user_id and user_id not in global_login_buff.waiters: url = 'http://{0}/cellphone/{1}'.format(self.request.host, token) img_data = get_qrcode(url) base64_img_data = base64.b64encode(img_data) self.render('pc.html', base64_img_data=base64_img_data) else: self.redirect('/pc') @tornado.gen.coroutine def post(self, token): user_id = self.decode_url_signed_value(token) self.user_id = user_id login_success = yield global_login_buff.wait_for_login(user_id) if login_success: self.set_secure_cookie('user_id', user_id) self.write('ok') def on_connection_close(self): global_login_buff.waiters.pop(self.user_id) application = tornado.web.Application([ (r"/cellphone/([^/]+)", CellPhoneLoginHandler), (r"/hello", HelloHandler), (r"/logout", LogoutHandler), (r"/pc/([^/]+)", PCLoginHandler), (r"/pc", PCLoginRedirectHandler), (r"/", tornado.web.RedirectHandler, {'url': '/pc'}), ], template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), cookie_secret="fuck xi bao zi", debug=True, login_url='/pc' ) if __name__ == "__main__": import tornado.options tornado.options.parse_command_line() application.listen(8888, '0.0.0.0') tornado.ioloop.IOLoop.current().start()
inuyasha2012/tornado-qrcode-login-example
example/main.py
Python
mit
4,259
# -*- coding: utf-8 -*- import logging import numpy as np from collections import OrderedDict import theano import theano.tensor as T from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams from theano.tensor.nnet.conv import conv2d, ConvOp from theano.sandbox.cuda.blas import GpuCorrMM from theano.sandbox.cuda.basic_ops import gpu_contiguous from blocks.bricks.cost import SquaredError from blocks.bricks.cost import CategoricalCrossEntropy, MisclassificationRate from blocks.graph import add_annotation, Annotation from blocks.roles import add_role, PARAMETER, WEIGHT, BIAS from utils import shared_param, AttributeDict from nn import maxpool_2d, global_meanpool_2d, BNPARAM logger = logging.getLogger('main.model') floatX = theano.config.floatX theano.sandbox.cuda.use('gpu0') class LadderAE(): def __init__(self, p): self.p = p # 是否转置初始权重矩阵 self.init_weights_transpose = False # 学习几率 self.default_lr = p.lr # OrderedDict顺序字典 self.shareds = OrderedDict() self.rstream = RandomStreams(seed=p.seed) self.rng = np.random.RandomState(seed=p.seed) # 编码网络的层数要大于1 n_layers = len(p.encoder_layers) assert n_layers > 1, "Need to define encoder layers" # 去噪网络的x,每一层都有一个潜变量作为去噪cost的x值,所以数量等于层数 assert n_layers == len(p.denoising_cost_x), ( "Number of denoising costs does not match with %d layers: %s" % (n_layers, str(p.denoising_cost_x))) # 除了x本身已经是tuble以外,其他的类型都要复制n_layers份,并放入tuple中。 def one_to_all(x): """ (5.,) -> 5 -> (5., 5., 5.) ('relu',) -> 'relu' -> ('relu', 'relu', 'relu') """ if type(x) is tuple and len(x) == 1: x = x[0] if type(x) is float: x = (np.float32(x),) * n_layers if type(x) is str: x = (x,) * n_layers return x p.decoder_spec = one_to_all(p.decoder_spec) p.f_local_noise_std = one_to_all(p.f_local_noise_std) acts = one_to_all(p.get('act', 'relu')) # nn的层数与decoder的层数匹配 assert n_layers == len(p.decoder_spec), "f and g need to match" assert (n_layers == len(acts)), ( "Not enough activations given. Requires %d. Got: %s" % (n_layers, str(acts))) # 为激活配置添加最后一层softmax层 acts = acts[:-1] + ('softmax',) def parse_layer(spec): """ 'fc:5' -> ('fc', 5) '5' -> ('fc', 5) 5 -> ('fc', 5) 'convv:3:2:2' -> ('convv', [3,2,2]) """ # 配置中只写了参数配置(纯数字)的情况默认配置为fc层 if type(spec) is not str: return "fc", spec # 卷积层配置? spec = spec.split(':') # spec开头的str作为参数的配置类型(fc/conv),如果spec只有一个值,默认配置为fc l_type = spec.pop(0) if len(spec) >= 2 else "fc" spec = map(int, spec) # 拨壳 spec = spec[0] if len(spec) == 1 else spec return l_type, spec # map,iterable中每个元素都应用parse_layer enc = map(parse_layer, p.encoder_layers) # 三个数组依次取一个值组成新的元素,并给予编号。 self.layers = list(enumerate(zip(enc, p.decoder_spec, acts))) # END 模型设定完成 def weight(self, init, name, cast_float32=True, for_conv=False): weight = self.shared(init, name, cast_float32, role=WEIGHT) if for_conv: return weight.dimshuffle('x', 0, 'x', 'x') return weight def bias(self, init, name, cast_float32=True, for_conv=False): b = self.shared(init, name, cast_float32, role=BIAS) if for_conv: return b.dimshuffle('x', 0, 'x', 'x') return b def shared(self, init, name, cast_float32=True, role=PARAMETER, **kwargs): p = self.shareds.get(name) if p is None: p = shared_param(init, name, cast_float32, role, **kwargs) self.shareds[name] = p return p def counter(self): name = 'counter' p = self.shareds.get(name) update = [] if p is None: p_max_val = np.float32(10) p = self.shared(np.float32(1), name, role=BNPARAM) p_max = self.shared(p_max_val, name + '_max', role=BNPARAM) update = [(p, T.clip(p + np.float32(1), np.float32(0), p_max)), (p_max, p_max_val)] return (p, update) def noise_like(self, x): noise = self.rstream.normal(size=x.shape, avg=0.0, std=1.0) return T.cast(noise, dtype=floatX) def rand_init(self, in_dim, out_dim): """ Random initialization for fully connected layers """ W = self.rng.randn(in_dim, out_dim) / np.sqrt(in_dim) return W def rand_init_conv(self, dim): """ Random initialization for convolution filters """ fan_in = np.prod(dtype=floatX, a=dim[1:]) bound = np.sqrt(3. / max(1.0, (fan_in))) W = np.asarray( self.rng.uniform(low=-bound, high=bound, size=dim), dtype=floatX) return W def new_activation_dict(self): return AttributeDict({'z': {}, 'h': {}, 's': {}, 'm': {}}) def annotate_update(self, update, tag_to): a = Annotation() for (var, up) in update: a.updates[var] = up add_annotation(tag_to, a) def apply(self, input_labeled, target_labeled, input_unlabeled): self.layer_counter = 0 input_dim = self.p.encoder_layers[0] # Store the dimension tuples in the same order as layers. layers = self.layers self.layer_dims = {0: input_dim} self.lr = self.shared(self.default_lr, 'learning_rate', role=None) self.costs = costs = AttributeDict() self.costs.denois = AttributeDict() self.act = AttributeDict() self.error = AttributeDict() top = len(layers) - 1 N = input_labeled.shape[0] self.join = lambda l, u: T.concatenate([l, u], axis=0) self.labeled = lambda x: x[:N] if x is not None else x self.unlabeled = lambda x: x[N:] if x is not None else x self.split_lu = lambda x: (self.labeled(x), self.unlabeled(x)) input_concat = self.join(input_labeled, input_unlabeled) def encoder(input_, path_name, input_noise_std=0, noise_std=[]): h = input_ logger.info(' 0: noise %g' % input_noise_std) if input_noise_std > 0.: h = h + self.noise_like(h) * input_noise_std d = AttributeDict() d.unlabeled = self.new_activation_dict() d.labeled = self.new_activation_dict() d.labeled.z[0] = self.labeled(h) d.unlabeled.z[0] = self.unlabeled(h) prev_dim = input_dim for i, (spec, _, act_f) in layers[1:]: d.labeled.h[i - 1], d.unlabeled.h[i - 1] = self.split_lu(h) noise = noise_std[i] if i < len(noise_std) else 0. curr_dim, z, m, s, h = self.f(h, prev_dim, spec, i, act_f, path_name=path_name, noise_std=noise) assert self.layer_dims.get(i) in (None, curr_dim) self.layer_dims[i] = curr_dim d.labeled.z[i], d.unlabeled.z[i] = self.split_lu(z) d.unlabeled.s[i] = s d.unlabeled.m[i] = m prev_dim = curr_dim d.labeled.h[i], d.unlabeled.h[i] = self.split_lu(h) return d # Clean, supervised logger.info('Encoder: clean, labeled') clean = self.act.clean = encoder(input_concat, 'clean') # Corrupted, supervised logger.info('Encoder: corr, labeled') corr = self.act.corr = encoder(input_concat, 'corr', input_noise_std=self.p.super_noise_std, noise_std=self.p.f_local_noise_std) est = self.act.est = self.new_activation_dict() # Decoder path in opposite order logger.info('Decoder: z_corr -> z_est') for i, ((_, spec), l_type, act_f) in layers[::-1]: z_corr = corr.unlabeled.z[i] z_clean = clean.unlabeled.z[i] z_clean_s = clean.unlabeled.s.get(i) z_clean_m = clean.unlabeled.m.get(i) fspec = layers[i+1][1][0] if len(layers) > i+1 else (None, None) if i == top: ver = corr.unlabeled.h[i] ver_dim = self.layer_dims[i] top_g = True else: ver = est.z.get(i + 1) ver_dim = self.layer_dims.get(i + 1) top_g = False z_est = self.g(z_lat=z_corr, z_ver=ver, in_dims=ver_dim, out_dims=self.layer_dims[i], l_type=l_type, num=i, fspec=fspec, top_g=top_g) if z_est is not None: # Denoising cost if z_clean_s and self.p.zestbn == 'bugfix': z_est_norm = (z_est - z_clean_m) / T.sqrt(z_clean_s + np.float32(1e-10)) elif z_clean_s is None or self.p.zestbn == 'no': z_est_norm = z_est else: assert False, 'Not supported path' se = SquaredError('denois' + str(i)) costs.denois[i] = se.apply(z_est_norm.flatten(2), z_clean.flatten(2)) \ / np.prod(self.layer_dims[i], dtype=floatX) costs.denois[i].name = 'denois' + str(i) denois_print = 'denois %.2f' % self.p.denoising_cost_x[i] else: denois_print = '' # Store references for later use est.h[i] = self.apply_act(z_est, act_f) est.z[i] = z_est est.s[i] = None est.m[i] = None logger.info(' g%d: %10s, %s, dim %s -> %s' % ( i, l_type, denois_print, self.layer_dims.get(i+1), self.layer_dims.get(i) )) # Costs y = target_labeled.flatten() costs.class_clean = CategoricalCrossEntropy().apply(y, clean.labeled.h[top]) costs.class_clean.name = 'cost_class_clean' costs.class_corr = CategoricalCrossEntropy().apply(y, corr.labeled.h[top]) costs.class_corr.name = 'cost_class_corr' # This will be used for training costs.total = costs.class_corr * 1.0 for i in range(top + 1): if costs.denois.get(i) and self.p.denoising_cost_x[i] > 0: costs.total += costs.denois[i] * self.p.denoising_cost_x[i] costs.total.name = 'cost_total' # Classification error mr = MisclassificationRate() self.error.clean = mr.apply(y, clean.labeled.h[top]) * np.float32(100.) self.error.clean.name = 'error_rate_clean' def apply_act(self, input, act_name): if input is None: return input act = { 'relu': lambda x: T.maximum(0, x), 'leakyrelu': lambda x: T.switch(x > 0., x, 0.1 * x), 'linear': lambda x: x, 'softplus': lambda x: T.log(1. + T.exp(x)), 'sigmoid': lambda x: T.nnet.sigmoid(x), 'softmax': lambda x: T.nnet.softmax(x), }.get(act_name) assert act, 'unknown act %s' % act_name if act_name == 'softmax': input = input.flatten(2) return act(input) def annotate_bn(self, var, id, var_type, mb_size, size, norm_ax): var_shape = np.array((1,) + size) out_dim = np.prod(var_shape) / np.prod(var_shape[list(norm_ax)]) # Flatten the var - shared variable updating is not trivial otherwise, # as theano seems to believe a row vector is a matrix and will complain # about the updates orig_shape = var.shape var = var.flatten() # Here we add the name and role, the variables will later be identified # by these values var.name = id + '_%s_clean' % var_type add_role(var, BNPARAM) shared_var = self.shared(np.zeros(out_dim), name='shared_%s' % var.name, role=None) # Update running average estimates. When the counter is reset to 1, it # will clear its memory cntr, c_up = self.counter() one = np.float32(1) run_avg = lambda new, old: one / cntr * new + (one - one / cntr) * old if var_type == 'mean': new_value = run_avg(var, shared_var) elif var_type == 'var': mb_size = T.cast(mb_size, 'float32') new_value = run_avg(mb_size / (mb_size - one) * var, shared_var) else: raise NotImplemented('Unknown batch norm var %s' % var_type) # Add the counter update to the annotated update if it is the first # instance of a counter self.annotate_update([(shared_var, new_value)] + c_up, var) return var.reshape(orig_shape) def f(self, h, in_dim, spec, num, act_f, path_name, noise_std=0): assert path_name in ['clean', 'corr'] # Generates identifiers used for referencing shared variables. # E.g. clean and corrupted encoders will end up using the same # variable name and hence sharing parameters gen_id = lambda s: '_'.join(['f', str(num), s]) layer_type, _ = spec # Pooling if layer_type in ['maxpool', 'globalmeanpool']: z, output_size = self.f_pool(h, spec, in_dim) norm_ax = (0, -2, -1) # after pooling, no activation func for now unless its softmax act_f = "linear" if act_f != "softmax" else act_f # Convolution elif layer_type in ['convv', 'convf']: z, output_size = self.f_conv(h, spec, in_dim, gen_id('W')) norm_ax = (0, -2, -1) # Fully connected elif layer_type == "fc": h = h.flatten(2) if h.ndim > 2 else h _, dim = spec W = self.weight(self.rand_init(np.prod(in_dim), dim), gen_id('W')) z, output_size = T.dot(h, W), (dim,) norm_ax = (0,) else: raise ValueError("Unknown layer spec: %s" % layer_type) m = s = None is_normalizing = True if is_normalizing: keep_dims = True z_l = self.labeled(z) z_u = self.unlabeled(z) m = z_u.mean(norm_ax, keepdims=keep_dims) s = z_u.var(norm_ax, keepdims=keep_dims) m_l = z_l.mean(norm_ax, keepdims=keep_dims) s_l = z_l.var(norm_ax, keepdims=keep_dims) if path_name == 'clean': # Batch normalization estimates the mean and variance of # validation and test sets based on the training set # statistics. The following annotates the computation of # running average to the graph. m_l = self.annotate_bn(m_l, gen_id('bn'), 'mean', z_l.shape[0], output_size, norm_ax) s_l = self.annotate_bn(s_l, gen_id('bn'), 'var', z_l.shape[0], output_size, norm_ax) z = self.join( (z_l - m_l) / T.sqrt(s_l + np.float32(1e-10)), (z_u - m) / T.sqrt(s + np.float32(1e-10))) if noise_std > 0: z += self.noise_like(z) * noise_std # z for lateral connection z_lat = z b_init, c_init = 0.0, 1.0 b_c_size = output_size[0] # Add bias if act_f != 'linear': z += self.bias(b_init * np.ones(b_c_size), gen_id('b'), for_conv=len(output_size) > 1) if is_normalizing: # Add free parameter (gamma in original Batch Normalization paper) # if needed by the activation. For instance ReLU does't need one # and we only add it to softmax if hyperparameter top_c is set. if (act_f not in ['relu', 'leakyrelu', 'linear', 'softmax'] or (act_f == 'softmax' and self.p.top_c is True)): c = self.weight(c_init * np.ones(b_c_size), gen_id('c'), for_conv=len(output_size) > 1) z *= c h = self.apply_act(z, act_f) logger.info(' f%d: %s, %s,%s noise %.2f, params %s, dim %s -> %s' % ( num, layer_type, act_f, ' BN,' if is_normalizing else '', noise_std, spec[1], in_dim, output_size)) return output_size, z_lat, m, s, h def f_pool(self, x, spec, in_dim): layer_type, dims = spec num_filters = in_dim[0] if "globalmeanpool" == layer_type: y, output_size = global_meanpool_2d(x, num_filters) # scale the variance to match normal conv layers with xavier init y = y * np.float32(in_dim[-1]) * np.float32(np.sqrt(3)) else: assert dims[0] != 1 or dims[1] != 1 y, output_size = maxpool_2d(x, in_dim, poolsize=(dims[1], dims[1]), poolstride=(dims[0], dims[0])) return y, output_size def f_conv(self, x, spec, in_dim, weight_name): layer_type, dims = spec num_filters = dims[0] filter_size = (dims[1], dims[1]) stride = (dims[2], dims[2]) bm = 'full' if 'convf' in layer_type else 'valid' num_channels = in_dim[0] W = self.weight(self.rand_init_conv( (num_filters, num_channels) + filter_size), weight_name) if stride != (1, 1): f = GpuCorrMM(subsample=stride, border_mode=bm, pad=(0, 0)) y = f(gpu_contiguous(x), gpu_contiguous(W)) else: assert self.p.batch_size == self.p.valid_batch_size y = conv2d(x, W, image_shape=(2*self.p.batch_size, ) + in_dim, filter_shape=((num_filters, num_channels) + filter_size), border_mode=bm) output_size = ((num_filters,) + ConvOp.getOutputShape(in_dim[1:], filter_size, stride, bm)) return y, output_size def g(self, z_lat, z_ver, in_dims, out_dims, l_type, num, fspec, top_g): f_layer_type, dims = fspec is_conv = f_layer_type is not None and ('conv' in f_layer_type or 'pool' in f_layer_type) gen_id = lambda s: '_'.join(['g', str(num), s]) in_dim = np.prod(dtype=floatX, a=in_dims) out_dim = np.prod(dtype=floatX, a=out_dims) num_filters = out_dims[0] if is_conv else out_dim if l_type[-1] in ['0']: g_type, u_type = l_type[:-1], l_type[-1] else: g_type, u_type = l_type, None # Mapping from layer above: u if u_type in ['0'] or z_ver is None: if z_ver is None and u_type not in ['0']: logger.warn('Decoder %d:%s without vertical input' % (num, g_type)) u = None else: if top_g: u = z_ver elif is_conv: u = self.g_deconv(z_ver, in_dims, out_dims, gen_id('W'), fspec) else: W = self.weight(self.rand_init(in_dim, out_dim), gen_id('W')) u = T.dot(z_ver, W) # Batch-normalize u if u is not None: norm_ax = (0,) if u.ndim <= 2 else (0, -2, -1) keep_dims = True u -= u.mean(norm_ax, keepdims=keep_dims) u /= T.sqrt(u.var(norm_ax, keepdims=keep_dims) + np.float32(1e-10)) # Define the g function if not is_conv: z_lat = z_lat.flatten(2) bi = lambda inits, name: self.bias(inits * np.ones(num_filters), gen_id(name), for_conv=is_conv) wi = lambda inits, name: self.weight(inits * np.ones(num_filters), gen_id(name), for_conv=is_conv) if g_type == '': z_est = None elif g_type == 'i': z_est = z_lat elif g_type in ['sig']: sigval = bi(0., 'c1') + wi(1., 'c2') * z_lat if u is not None: sigval += wi(0., 'c3') * u + wi(0., 'c4') * z_lat * u sigval = T.nnet.sigmoid(sigval) z_est = bi(0., 'a1') + wi(1., 'a2') * z_lat + wi(1., 'b1') * sigval if u is not None: z_est += wi(0., 'a3') * u + wi(0., 'a4') * z_lat * u elif g_type in ['lin']: a1 = wi(1.0, 'a1') b = bi(0.0, 'b') z_est = a1 * z_lat + b elif g_type in ['relu']: assert u is not None b = bi(0., 'b') x = u + b z_est = self.apply_act(x, 'relu') elif g_type in ['sigmoid']: assert u is not None b = bi(0., 'b') c = wi(1., 'c') z_est = self.apply_act((u + b) * c, 'sigmoid') elif g_type in ['comparison_g2']: # sig without the uz cross term sigval = bi(0., 'c1') + wi(1., 'c2') * z_lat if u is not None: sigval += wi(0., 'c3') * u sigval = T.nnet.sigmoid(sigval) z_est = bi(0., 'a1') + wi(1., 'a2') * z_lat + wi(1., 'b1') * sigval if u is not None: z_est += wi(0., 'a3') * u elif g_type in ['comparison_g3']: # sig without the sigmoid nonlinearity z_est = bi(0., 'a1') + wi(1., 'a2') * z_lat if u is not None: z_est += wi(0., 'a3') * u + wi(0., 'a4') * z_lat * u elif g_type in ['comparison_g4']: # No mixing between z_lat and u before final sum, otherwise similar # to sig def nonlin(inp, in_name='input', add_bias=True): w1 = wi(1., 'w1_%s' % in_name) b1 = bi(0., 'b1') w2 = wi(1., 'w2_%s' % in_name) b2 = bi(0., 'b2') if add_bias else 0 w3 = wi(0., 'w3_%s' % in_name) return w2 * T.nnet.sigmoid(b1 + w1 * inp) + w3 * inp + b2 z_est = nonlin(z_lat, 'lat') if u is None else \ nonlin(z_lat, 'lat') + nonlin(u, 'ver', False) elif g_type in ['comparison_g5', 'gauss']: # Gaussian assumption on z: (z - mu) * v + mu if u is None: b1 = bi(0., 'b1') w1 = wi(1., 'w1') z_est = w1 * z_lat + b1 else: a1 = bi(0., 'a1') a2 = wi(1., 'a2') a3 = bi(0., 'a3') a4 = bi(0., 'a4') a5 = bi(0., 'a5') a6 = bi(0., 'a6') a7 = wi(1., 'a7') a8 = bi(0., 'a8') a9 = bi(0., 'a9') a10 = bi(0., 'a10') mu = a1 * T.nnet.sigmoid(a2 * u + a3) + a4 * u + a5 v = a6 * T.nnet.sigmoid(a7 * u + a8) + a9 * u + a10 z_est = (z_lat - mu) * v + mu else: raise NotImplementedError("unknown g type: %s" % str(g_type)) # Reshape the output if z is for conv but u from fc layer if (z_est is not None and type(out_dims) == tuple and len(out_dims) > 1.0 and z_est.ndim < 4): z_est = z_est.reshape((z_est.shape[0],) + out_dims) return z_est def g_deconv(self, z_ver, in_dims, out_dims, weight_name, fspec): """ Inverse operation for each type of f used in convnets """ f_type, f_dims = fspec assert z_ver is not None num_channels = in_dims[0] if in_dims is not None else None num_filters, width, height = out_dims[:3] if f_type in ['globalmeanpool']: u = T.addbroadcast(z_ver, 2, 3) assert in_dims[1] == 1 and in_dims[2] == 1, \ "global pooling needs in_dims (1,1): %s" % str(in_dims) elif f_type in ['maxpool']: sh, str, size = z_ver.shape, f_dims[0], f_dims[1] assert str == size, "depooling requires stride == size" u = T.zeros((sh[0], sh[1], sh[2] * str, sh[3] * str), dtype=z_ver.dtype) for x in xrange(str): for y in xrange(str): u = T.set_subtensor(u[:, :, x::str, y::str], z_ver) u = u[:, :, :width, :height] elif f_type in ['convv', 'convf']: filter_size, str = (f_dims[1], f_dims[1]), f_dims[2] W_shape = (num_filters, num_channels) + filter_size W = self.weight(self.rand_init_conv(W_shape), weight_name) if str > 1: # upsample if strided version sh = z_ver.shape u = T.zeros((sh[0], sh[1], sh[2] * str, sh[3] * str), dtype=z_ver.dtype) u = T.set_subtensor(u[:, :, ::str, ::str], z_ver) else: u = z_ver # no strides, only deconv u = conv2d(u, W, filter_shape=W_shape, border_mode='valid' if 'convf' in f_type else 'full') u = u[:, :, :width, :height] else: raise NotImplementedError('Layer %s has no convolutional decoder' % f_type) return u
ryukinkou/ladder_customized
ladder_theano_customized/ladder.py
Python
mit
26,220
# elmr.config # The ELMR configuration file. # # Author: Benjamin Bengfort <benjamin@bengfort.com> # Created: Thu Apr 09 08:44:18 2015 -0400 # # Copyright (C) 2015 University of Maryland # For license information, see LICENSE.txt # # ID: config.py [] benjamin@bengfort.com $ """ The ELMR configuration file. """ ########################################################################## ## Imports ########################################################################## import os from elmr.utils import classproperty from elmr.exceptions import ImproperlyConfigured ########################################################################## ## Constants and Helper Functions ########################################################################## ENVIRON_PREFIX = "ELMR" BASE_PATH = os.path.join(os.path.dirname(__file__), "..") FIXTURES = os.path.join(BASE_PATH, "fixtures") MIGRATIONS = os.path.join(os.path.dirname(__file__), "migrations") def settings(name, default=None, required=False, prefix=ENVIRON_PREFIX): """ Fetches the setting from the an environment variable by prepending the prefix, if not found, sets the default value. If required, and the setting remains None, this function will raise an ImproperlyConfigured exception. """ envvar = "%s_%s" % (prefix.upper(), name.upper()) if envvar in os.environ: return os.environ[envvar] if required and default is None: raise ImproperlyConfigured("Missing required setting '%s' " "from environment" % envvar) return default def get_settings_object(default, envvar="settings", prefix=ENVIRON_PREFIX): """ Returns the correct settings object string by inspecting an environment variable (prefixed by the ENVIRON_PREFIX) for the words "production", "development", or "testing". Raises an error if these aren't found. You must specify a default string in case the environment doesn't contain the correct variable. Usage in Flask would be as follows: app = Flask(__name__) app.config.from_object(get_settings_object("development")) This will look for an envvar, ELMR_SETTINGS and if it is set, will return that configuration string, e.g. "elmr.config.ProductionConfig". Otherwise it will return the default, in this case, "elmr.config.DevelopmentConfig". """ mode = settings(envvar, default, prefix).lower() pkg = "elmr.config" jump = { "production": "%s.%s" % (pkg, ProductionConfig.__name__), "development": "%s.%s" % (pkg, DevelopmentConfig.__name__), "testing": "%s.%s" % (pkg, TestingConfig.__name__), } if mode not in jump: raise ImproperlyConfigured("Could not load settings for name '%s'!\n" "Use 'production', 'development', or " "'testing' to properly configure." % mode) return jump[mode] ########################################################################## ## Configuration Object ########################################################################## class Config(object): """ Default configuration for ELMR application """ ## Flask Settings DEBUG = settings("debug", False) TESTING = settings("testing", False) CSRF_ENABLED = settings("csrf_enabled", True) SECRET_KEY = settings("secret_key", required=True) MIGRATIONS = settings("migrate_repo", MIGRATIONS) ## Ingestion Settings STARTYEAR = settings("startyear", "2000") ENDYEAR = settings("endyear", "2015") FIXTURES = settings("fixtures", FIXTURES) @classproperty def SQLALCHEMY_DATABASE_URI(klass): """ Alias for DATABASE_URI property """ return klass.DATABASE_URI @classproperty def DATABASE_URI(klass): """ Accesses the Heroku `DATABASE_URL` otherwise returns the expected `ELMR_DATABASE_URI` setting as before. """ envvar = "DATABASE_URL" if envvar in os.environ: return os.environ[envvar] return settings("database_uri") class ProductionConfig(Config): """ Production specific settings for ELMR application """ pass class DevelopmentConfig(Config): """ Development specific settings for ELMR application """ DEBUG = True class TestingConfig(DevelopmentConfig): """ Testing settings for travis-ci and other tests """ TESTING = True SECRET_KEY = "supersecret" # secret not needed in testing DATABASE_URI = "postgresql+psycopg2://tester:secret@localhost/elmrtest" STARTYEAR = settings("startyear", "2006") ENDYEAR = settings("endyear", "2007")
bbengfort/jobs-report
elmr/config.py
Python
mit
4,794
from rtmidi.midiconstants import * from rtmidi.midiutil import open_midiport from router import Router import logging import time from config import ROUTER_IPS, DEBUG_LEVEL, MIDI_MAPPING ROUTERS = [Router(ip).connect() for ip in ROUTER_IPS] logging.basicConfig(level=DEBUG_LEVEL) ROUTERS = [Router(ip).connect() for ip in ROUTER_IPS] class MidiInputHandler(object): def __init__(self, port): self.port = port def __call__(self, event, data=None): message, deltatime = event status = message[0] if status & 0xF0 != 0x90 and status & 0xF0 != 0x80: return if status & 0xF0 == 0x90: note = message[1] on = message[2] != 0 elif status & 0xF0 == 0x80: note = message[1] on = False if note in MIDI_MAPPING: router = MIDI_MAPPING[note]["router"] gpio = MIDI_MAPPING[note]["gpio"] # TODO: modulo! if router >= 0 and router < len(ROUTERS): ROUTERS[router].switch_light(gpio, on) else: logging.debug("Router {} not connected".format(router)) else: logging.debug("Note {} not in mapping".format(note)) try: midiin, port_name = open_midiport(None) except (EOFError, KeyboardInterrupt): sys.exit() print("Attaching MIDI input callback handler.") midiin.set_callback(MidiInputHandler(port_name)) print("Entering main loop. Press Control-C to exit.") try: # just wait for keyboard interrupt in main thread while True: time.sleep(1) except KeyboardInterrupt: print('') finally: print("Exit.") midiin.close_port()
bdejong/router-gate
miditest.py
Python
mit
1,682
from typing import Dict from CreatureRogue.data_layer.location_area_rect import LocationAreaRect from CreatureRogue.data_layer.location_area_rect_collection import LocationAreaRectCollection from CreatureRogue.data_layer.map_data_tile_type import MapDataTileType # TODO - Change to enum? HP_STAT = 1 ATTACK_STAT = 2 DEFENSE_STAT = 3 SP_ATTACK_STAT = 4 SP_DEFENSE_STAT = 5 SPEED_STAT = 6 ACCURACY_STAT = 7 EVASION_STAT = 8 def load_location_area_rects(rects_file_name: str) -> LocationAreaRectCollection: """ TODO - Put somewhere sensible """ rects = LocationAreaRectCollection() with open(rects_file_name) as rects_file: for line in rects_file: location_area_id, x1, y1, x2, y2 = [int(part) for part in line.strip().split(',')] rects.add_location_area_rect(LocationAreaRect(location_area_id, x1, y1, x2, y2)) return rects class StaticGameData: """ Static game data is loaded on start up and is a memory cache of the database which contains creature species, moves etc. It's really just a collection of the different objects to facilitate passing it around the game and each object should be accessed directly (static_data.*). """ def __init__(self, species, types, type_chart, moves, stats, colors, growth_rates, move_targets, regions, locations, location_areas, xp_lookup, pokeballs, ailments, map_data_tile_types: Dict[int, MapDataTileType]): self.species = species self.types = types self.type_chart = type_chart self.moves = moves self.stats = stats self.colors = colors self.growth_rates = growth_rates self.move_targets = move_targets self.regions = regions self.locations = locations self.location_areas = location_areas self.location_area_rects = None self.xp_lookup = xp_lookup self.pokeballs = pokeballs self.ailments = ailments self.map_data_tile_types = map_data_tile_types def stat(self, stat): return self.stats[stat]
DaveTCode/CreatureRogue
CreatureRogue/data_layer/data.py
Python
mit
2,084
# encoding: utf8 from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Cause', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=64)), ('description', models.TextField()), ('creator', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')), ('target', models.PositiveIntegerField(null=True, blank=True)), ('url', models.URLField(null=True, blank=True)), ('is_verified', models.BooleanField(default=False)), ('is_published', models.BooleanField(default=False)), ], options={ }, bases=(models.Model,), ), ]
vladimiroff/humble-media
humblemedia/causes/migrations/0001_initial.py
Python
mit
1,075
import _plotly_utils.basevalidators class MaxpointsValidator(_plotly_utils.basevalidators.NumberValidator): def __init__( self, plotly_name="maxpoints", parent_name="funnelarea.stream", **kwargs ): super(MaxpointsValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "calc"), max=kwargs.pop("max", 10000), min=kwargs.pop("min", 0), **kwargs )
plotly/plotly.py
packages/python/plotly/plotly/validators/funnelarea/stream/_maxpoints.py
Python
mit
506
import os import urllib import jinja2 import webapp2 JINJA_ENVIRONMENT = jinja2.Environment( loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), extensions=['jinja2.ext.autoescape'], autoescape=True) class MainPage(webapp2.RequestHandler): def get(self): template = JINJA_ENVIRONMENT.get_template('index.html') self.response.write(template.render()) application = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
spawnedc/rubberducksoftware.co.uk
duck/duck.py
Python
mit
469
# -*- coding: utf-8 -*- """#179 Largest Number (Medium). (https://leetcode.com/problems/largest-number/#/description) Given a list of non negative integers, arrange them such that they form the largest number. For example, given [3, 30, 34, 5, 9], the largest formed number is 9534330. Note: The result may be very large, so you need to return a string instead of an integer. """ import functools class LargestNumber: """Return the largest number based on the element of the list.""" def arrange_largest_number(self, nums): """Return the largest combination number in a given list. Args: nums (list): Input list Returns: result (string): Re-arrange result based on the above criteria. """ nums = [str(x) for x in nums] nums = sorted(nums, key=functools.cmp_to_key(self._compare)) return ''.join(nums).lstrip('0') or 0 def _compare(self, a, b): """Compare two strings. Args: a (string): String a. b (String): String b. Returns: result (int): return 1 if a +b > b + a, else -1. """ return [1, -1][a + b > b + a]
AppliedAlgorithmsGroup/leon-lee
src/python/largest_number.py
Python
mit
1,191
""" Django settings for digihel project. Generated by 'django-admin startproject' using Django 1.9.6. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os CI = bool(os.environ.get('CI')) # When running in Travis. DEBUG = (os.environ.get('DEBUG') == '1') PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.dirname(PROJECT_DIR) LOCALE_PATHS = ( os.path.join(BASE_DIR, 'locale'), ) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # Application definition INSTALLED_APPS = [ 'users', 'helusers', 'people', 'content', 'kehmet', 'digi', 'digihel', 'feedback', 'search', 'events', 'wagtail.wagtailforms', 'wagtail.wagtailredirects', 'wagtail.wagtailembeds', 'wagtail.wagtailsites', 'wagtail.wagtailusers', 'wagtail.wagtailsnippets', 'wagtail.wagtaildocs', 'wagtail.wagtailimages', 'wagtail.wagtailsearch', 'wagtail.wagtailadmin', 'wagtail.wagtailcore', 'wagtail.contrib.modeladmin', 'wagtail.contrib.table_block', 'compressor', 'modelcluster', 'taggit', 'blog', 'djangobower', 'wagtail_svgmap', 'wagtailtinymce', 'djcelery', 'social_widgets', 'allauth', 'allauth.account', 'allauth.socialaccount', 'helusers.providers.helsinki', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.sites', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.humanize' ] MIDDLEWARE_CLASSES = [ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', 'wagtail.wagtailcore.middleware.SiteMiddleware', 'wagtail.wagtailredirects.middleware.RedirectMiddleware', ] ROOT_URLCONF = 'digihel.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(PROJECT_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'digihel.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'digihel', 'USER': os.environ.get('DATABASE_USER', 'digihel'), } } # celery CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' BROKER_URL = 'redis://localhost:6379/0' # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'fi' TIME_ZONE = 'Europe/Helsinki' USE_I18N = True USE_L10N = True USE_TZ = True # # Authentication # AUTH_USER_MODEL = 'users.User' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', 'allauth.account.auth_backends.AuthenticationBackend', ) SOCIALACCOUNT_PROVIDERS = { 'helsinki': { 'VERIFIED_EMAIL': True } } LOGIN_REDIRECT_URL = '/' ACCOUNT_LOGOUT_ON_GET = True SOCIALACCOUNT_ADAPTER = 'helusers.providers.helsinki.provider.SocialAccountAdapter' # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.AppDirectoriesFinder', 'djangobower.finders.BowerFinder', 'compressor.finders.CompressorFinder', ] STATIC_ROOT = os.path.join(BASE_DIR, 'static') STATIC_URL = '/static/' MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' COMPRESS_PRECOMPILERS = ( ('text/x-scss', 'django_libsass.SassCompiler'), ('text/coffeescript', 'coffee --compile --stdio'), ) COMPRESS_CSS_FILTERS = ( 'compressor.filters.css_default.CssAbsoluteFilter', 'django_compressor_autoprefixer.AutoprefixerFilter', ) COMPRESS_ENABLED = True COMPRESS_AUTOPREFIXER_BINARY = os.path.join(BASE_DIR, 'node_modules/.bin/postcss') BOWER_PATH = os.path.join(BASE_DIR, 'node_modules/.bin/bower') BOWER_COMPONENTS_ROOT = os.path.join(BASE_DIR, 'components/') BOWER_INSTALLED_APPS = [ 'bourbon#4.2.7', 'bootstrap-sass#3.3.7', 'jquery#<3.0', 'bootstrap-validator#0.11.5', 'remarkable-bootstrap-notify#3.1.3', 'matchHeight#0.7.0', 'moment', ] # Wagtail settings WAGTAIL_SITE_NAME = "digihel" SITE_ID = 1 WAGTAILADMIN_RICH_TEXT_EDITORS = { 'default': { 'WIDGET': 'digihel.tinymce.DigiHelTinyMCERichTextArea', }, } if CI: # Use Elasticsearch in CI environments. # You can also use this as a template for your `local_settings` file. WAGTAILSEARCH_BACKENDS = { 'default': { 'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch', 'URLS': ['http://localhost:9200'], 'INDEX': 'digihel', 'TIMEOUT': 5, }, } # local_settings.py can be used to override environment-specific settings # like database and email that differ between development and production. f = os.path.join(BASE_DIR, "local_settings.py") if os.path.exists(f): import sys import imp module_name = "%s.local_settings" % ROOT_URLCONF.split('.')[0] module = imp.new_module(module_name) module.__file__ = f sys.modules[module_name] = module exec(open(f, "rb").read()) if 'SECRET_KEY' not in locals(): secret_file = os.path.join(BASE_DIR, '.django_secret') try: SECRET_KEY = open(secret_file).read().strip() except IOError: import random system_random = random.SystemRandom() try: keyspace = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' SECRET_KEY = ''.join([system_random.choice(keyspace) for i in range(64)]) secret = open(secret_file, 'w') import os os.chmod(secret_file, 0o0600) secret.write(SECRET_KEY) secret.close() except IOError: Exception('Please create a %s file with random characters to generate your secret key!' % secret_file)
terotic/digihel
digihel/settings.py
Python
mit
6,952
TM = "TM" # if no targets except realizations from TM fundsAvailable = "fundsAvailable" itemsAvailable = "itemsAvailable" oid = "oid" side = "side" # events onDelta = "onDelta" deltaSide = "side" onBatchDeltas = "onBatchDeltas" onTrade = "onTrade" onBatchTrades = "onBatchTrades" onOrderbook = "onOrderbook" onTicker = "onTicker" onLag = "onLag" onTransactionsList = "onTransactionsList" onAccountBallance = "onAccountBalance" onOpenOrders = "onOpenOrders" onSynchedWithExchange = "onSynchedWithExchange" noSuchOrder = "noSuchOrder" asks = "asks" normAsk = 1 bids = "bids" normBid = 2 checkpoint = "checkpoint" deltas = "deltas" date = "date" lastTid = "lastTid" mode = "mode" market = "market" trades = "trades" container = "container" price = "price" amount = "amount" # features minAskChanged = "minAskChanged" maxBidChanged = "maxBidChanged" # dataWindow prevOrdb = "prevOrdb" lastSentCheckpoint = "lastSentCheckpoint" lastTid = "lastTid" deltas = "deltas" newTrades = "newTrades" # deltas added = "added" removed = "removed" changed = "changed" # filters spreadChanged = "spreadChanged" # activeStrategies marketOrder = "marketOrder" limitOrder = "limitOrder" # IPC queues QMainStream = "QMainStream" QRecorder = "QRecorder" # IPC containers ids reconnect = "reconnect" IPCOrderbook = "IPCOrderbook" IPCDelta = "IPCDelta" IPCTrade = "IPCTrade" # class Order - actions orderBuy = "orderBuy" orderSell = "orderSell" orderCancel = "orderCancel" orderPlace = "orderPlace" # class Account accountEnoughFunds = "enoughFunds" accountEnoughItems = "enoughItems" accountNotEnoughFunds = "notEnoughFunds" accountNotEnoughItems = "notEnoughItems" # class Strategies modeSingleMarket = "modeSingleMarket" modeMultiMarket = "modeMultiMarket"
jmakov/market_tia
tia/trad/tools/ipc/naming_conventions.py
Python
mit
1,741
import datetime import json import types from uuid import UUID import lazy_object_proxy from future.utils import iteritems from simpleflow.futures import Future def serialize_complex_object(obj): if isinstance( obj, bytes ): # Python 3 only (serialize_complex_object not called here in Python 2) return obj.decode("utf-8", errors="replace") if isinstance(obj, datetime.datetime): r = obj.isoformat() if obj.microsecond: r = r[:23] + r[26:] # milliseconds only if r.endswith("+00:00"): r = r[:-6] + "Z" return r elif isinstance(obj, datetime.date): return obj.isoformat() elif isinstance(obj, datetime.time): r = obj.isoformat() if obj.microsecond: r = r[:12] return r elif isinstance(obj, types.GeneratorType): return [i for i in obj] elif isinstance(obj, Future): return obj.result elif isinstance(obj, UUID): return str(obj) elif isinstance(obj, lazy_object_proxy.Proxy): return obj.__wrapped__ elif isinstance(obj, (set, frozenset)): return list(obj) raise TypeError( "Type %s couldn't be serialized. This is a bug in simpleflow," " please file a new issue on GitHub!" % type(obj) ) def _resolve_proxy(obj): if isinstance(obj, dict): return {k: _resolve_proxy(v) for k, v in iteritems(obj)} if isinstance(obj, (list, tuple)): return [_resolve_proxy(v) for v in obj] if isinstance(obj, lazy_object_proxy.Proxy): return str(obj) return obj def json_dumps(obj, pretty=False, compact=True, **kwargs): """ JSON dump to string. :param obj: :type obj: Any :param pretty: :type pretty: bool :param compact: :type compact: bool :return: :rtype: str """ if "default" not in kwargs: kwargs["default"] = serialize_complex_object if pretty: kwargs["indent"] = 4 kwargs["sort_keys"] = True kwargs["separators"] = (",", ": ") elif compact: kwargs["separators"] = (",", ":") kwargs["sort_keys"] = True try: return json.dumps(obj, **kwargs) except TypeError: # lazy_object_proxy.Proxy subclasses basestring: serialize_complex_object isn't called on python2 # and some versions of pypy obj = _resolve_proxy(obj) return json.dumps(obj, **kwargs) def json_loads_or_raw(data): """ Try to get a JSON object from a string. If this isn't JSON, return the raw string. :param data: string; should be in JSON format :return: JSON-decoded object or raw data """ if not data: return None try: return json.loads(data) except Exception: return data
botify-labs/simpleflow
simpleflow/utils/json_tools.py
Python
mit
2,806
#!/usr/bin/env python3 ''' Chapter 4 of Automate the Boring Stuff The first assignment for this chapter is to write a program - comma_code.py This program includes the function `comma_code(list)` to process one step of the sequence. Included is a second way to do this easier with join() - a function not yet introduced. ''' # ch4 problem - https://automatetheboringstuff.com/chapter4/ # Written by Jack Hayhurst def comma_code(parts): '''comma_code joints the elements in a list with an oxford comma''' output = '' for word in parts[:-1]: output += str(word) output += ', ' output += 'and ' output += str(parts[-1]) return output if __name__ == "__main__": print('there is no default action defined for this file')
jakdept/pythonbook
ch4/comma_code.py
Python
mit
765
# -*- coding: utf-8 -*- # Generated by Django 1.9 on 2017-01-11 10:05 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('servicos', '0001_initial'), ] operations = [ migrations.AlterModelOptions( name='consulta', options={'ordering': ['data', 'medico', 'paciente'], 'verbose_name': 'Consulta', 'verbose_name_plural': 'Consultas'}, ), migrations.AddField( model_name='consulta', name='data', field=models.DateField(blank=True, null=True, verbose_name='Data da Consulta'), ), migrations.AddField( model_name='consulta', name='hora', field=models.CharField(blank=True, max_length=10, null=True, verbose_name='Hora da Consulta'), ), ]
eduardoedson/scp
servicos/migrations/0002_auto_20170111_1005.py
Python
mit
892
# coding=utf-8 def game(function): """Decorator for check available game in attribute named `_game`.""" def _decorator(self, *args, **kwargs): if self._game is None: raise RuntimeError('Not chosen game.') return function(self, *args, **kwargs) return _decorator
pyvim/barbot
barbot/decorators.py
Python
mit
304
import random import numpy as np from ..common import w2tok from ..constants import EOW, UNK def _grouped(col, n): i, l, dst = 0, len(col), [] while i < l: dst.append(col[i: i + min(l - i, n)]) i += len(dst[-1]) return dst def make_generator(batch_size, dataset, to_samples, shuffle): max_words = max(dataset.n_words // batch_size, 2) sentences = dataset.sentences[:] if shuffle: random.shuffle(sentences) data_rows = _grouped([w for s in sentences for w in s], max_words)[0: batch_size] data_rows += [[]] * max(0, (batch_size - len(data_rows))) assert len(data_rows) == batch_size, str(len(data_rows)) num_batches = len(data_rows[0]) - 1 def generator(): while 1: for i in range(num_batches): batch = [] for row in data_rows: batch.append((row[i], row[i + 1]) if i < len(row) - 1 else None) assert any([any(samples) for samples in batch]), str(batch) yield to_samples(batch) n_samples = num_batches * batch_size return n_samples, generator() def initialize_c2w2c_data(dataset, batch_size, maxlen, V_C, shuffle=True, include_oov=True): cache = {} for word in dataset.vocabulary.tokens: is_oov = False token = w2tok(word, maxlen, pad=None) chars = np.zeros(shape=(maxlen,), dtype=np.int32) n_chars = 0 if len(word) + 1 > maxlen: is_oov = True for ch in token: if V_C.has(ch): chars[n_chars] = V_C.get_index(ch) + 1 n_chars += 1 else: is_oov = True for i in range(n_chars, maxlen): chars[i] = V_C.get_index(EOW) + 1 weights = np.array([1.] * n_chars + [0.] * (maxlen - n_chars), dtype=np.float32) cache[word] = chars, weights, n_chars, is_oov def to_samples(batch): ctx = np.zeros(shape=(batch_size, maxlen), dtype=np.int32) y_tm1 = np.zeros(shape=(batch_size, maxlen), dtype=np.int32) y = np.zeros(shape=(batch_size, maxlen), dtype=np.int32) y_w = np.zeros(shape=(batch_size, maxlen), dtype=np.float32) for i, sample in enumerate(batch): if sample is not None: w_t, w_tp1 = sample x_chars, _, n, _ = cache[w_t] y_chars, w, _, oov = cache[w_tp1] for k in range(n): ctx[i, k] = x_chars[k] np.copyto(y_tm1[i][1:], y_chars[0: -1]) np.copyto(y[i], y_chars - 1) if oov and not include_oov: continue np.copyto(y_w[i], w) # sparse_categorical_crossentropy requires y to have the same shape as model output y = np.expand_dims(y, -1) return {'context': ctx, 'y_tm1': y_tm1}, y, y_w def _make(): return make_generator(batch_size, dataset, to_samples, shuffle) n_oov = sum((1 if cache[w][3] else 0) for w in dataset.get_words()) oov_rate = 1. * n_oov / dataset.n_words return _make, oov_rate def initialize_word_lstm_data(dataset, batch_size, V_W, shuffle=True, include_oov=True): def is_oov(w): return not V_W.has(w) def to_samples(batch): ctx = np.zeros(shape=(batch_size, V_W.size), dtype=np.bool) y = np.zeros(shape=(batch_size,), dtype=np.int32) y_w = np.zeros(shape=(batch_size,), dtype=np.float32) for i, sample in enumerate(batch): if sample is not None: w_t, w_tp1 = sample w_t = UNK if is_oov(w_t) else w_t w_tp1 = UNK if is_oov(w_tp1) else w_tp1 y[i] = V_W.get_index(w_tp1) ctx[i, V_W.get_index(w_t)] = 1 if w_tp1 == UNK and not include_oov: continue y_w[i] = 1. # sparse_categorical_crossentropy requires y to have the same shape as model output y = np.expand_dims(y, -1) return ctx, y, y_w def _make(): return make_generator(batch_size, dataset, to_samples, shuffle) n_oov = sum((1 if is_oov(w) else 0) for w in dataset.get_words()) oov_rate = 1. * n_oov / dataset.n_words return _make, oov_rate
milankinen/c2w2c
src/dataset/generator.py
Python
mit
3,902
#!/usr/bin/env python3 """creates an MD-file.""" import configparser import os import platform import shutil import subprocess import sys import syslog import time import traceback from mausy5043libs.libdaemon3 import Daemon import mausy5043funcs.fileops3 as mf # constants DEBUG = False IS_JOURNALD = os.path.isfile('/bin/journalctl') MYID = "".join(list(filter(str.isdigit, os.path.realpath(__file__).split('/')[-1]))) MYAPP = os.path.realpath(__file__).split('/')[-3] MYAPPDIR = "/".join(list(filter(str, os.path.realpath(__file__).split('/')[:-2]))) NODE = os.uname()[1] # initialise logging syslog.openlog(ident=MYAPP, facility=syslog.LOG_LOCAL0) class MyDaemon(Daemon): """Override Daemon-class run() function.""" @staticmethod def run(): """Execute main loop.""" iniconf = configparser.ConfigParser() iniconf.read('/' + MYAPPDIR + '/config.ini') flock = iniconf.get(MYID, "lockfile") fdata = iniconf.get(MYID, "markdown") sample_time = iniconf.getint(MYID, "reporttime") / iniconf.getint(MYID, "samplespercycle") while True: try: start_time = time.time() do_markdown(flock, fdata) pause_time = sample_time - (time.time() - start_time) - (start_time % sample_time) if pause_time > 0: mf.syslog_trace("Waiting : {0}s".format(pause_time), False, DEBUG) mf.syslog_trace("................................", False, DEBUG) time.sleep(pause_time) except Exception: mf.syslog_trace("Unexpected error in run()", syslog.LOG_CRIT, DEBUG) mf.syslog_trace(traceback.format_exc(), syslog.LOG_CRIT, DEBUG) raise def do_markdown(flock, fdata): """Create a MarkDown file.""" uname = os.uname() branch_file = os.environ['HOME'] + "/.upsdiagd.branch" with open(branch_file, 'r') as file_handle: upsbranch = file_handle.read().strip('\n') mf.lock(flock) shutil.copyfile('/' + MYAPPDIR + '/default.md', fdata) with open(fdata, 'a') as file_handle: mf.syslog_trace("writing {0}".format(fdata), False, DEBUG) # ups13 and ups14 are disabled, because the current UPS (EATON) does not supply # usable data for these graphs # file_handle.write('![A GNUplot image should be here: ups13.png](img/ups13.png)\n') # file_handle.write('![A GNUplot image should be here: ups14.png](img/ups14.png)\n') file_handle.write('![A GNUplot image should be here: ups16.png](img/ups16.png)\n') file_handle.write('![A GNUplot image should be here: ups15.png](img/ups15.png)\n') file_handle.write('![A GNUplot image should be here: ups17.png](img/ups17.png)\n') # System ID file_handle.write('!!! ') file_handle.write(uname[0] + ' ' + uname[2] + ' ' + uname[3] + ' ' + uname[4] + ' ' + platform.platform() + ' \n') # branch file_handle.write('!!! upsdiagd on: ' + upsbranch + ' \n') file_handle.write('!!! ' + time.strftime("%Y.%m.%d %H:%M") + '\n\n') # upsc ups@localhost 2>/dev/null |grep -v "serial" upsc = str(subprocess.check_output(["upsc", "ups@localhost"]), 'utf-8').splitlines() file_handle.write('### UPS detail information\n\n') for ups_data in upsc: file_handle.write(ups_data + ' \n') mf.unlock(flock) if __name__ == "__main__": daemon = MyDaemon('/tmp/' + MYAPP + '/' + MYID + '.pid') # pylint: disable=C0103 if len(sys.argv) == 2: if sys.argv[1] == 'start': daemon.start() elif sys.argv[1] == 'stop': daemon.stop() elif sys.argv[1] == 'restart': daemon.restart() elif sys.argv[1] == 'debug': # assist with debugging. print("Debug-mode started. Use <Ctrl>+C to stop.") DEBUG = True mf.syslog_trace("Daemon logging is ON", syslog.LOG_DEBUG, DEBUG) daemon.run() else: print("Unknown command") sys.exit(2) sys.exit(0) else: print("usage: {0!s} start|stop|restart|foreground".format(sys.argv[0])) sys.exit(2)
Mausy5043/upsdiagd
daemons/ups82d.py
Python
mit
3,978
import re PYTHON_SHEBANG_PATTERN = re.compile(r'#![\w /]*python') def read_header(filename): with open(filename, 'r') as f: return f.read(100) def has_python_shebang(filename): header = read_header(filename) return bool(PYTHON_SHEBANG_PATTERN.match(header)) def matches_any_pattern(filenames, patterns): matches = set() # I specifically loop patterns first so that we can put # cheaper checks in the front of the list and do those first for pattern in patterns: for filename in filenames: if filename in matches: continue if pattern(filename): matches.add(filename) continue return matches def python_source(filenames): patterns = [ lambda filename: filename.endswith('.py'), has_python_shebang, ] return matches_any_pattern(filenames, patterns)
EliRibble/mothermayi
mothermayi/files.py
Python
mit
899
# -*- coding: utf-8 -*- import os import unittest # prepare for test os.environ['ANIMA_TEST_SETUP'] = "" from pymel import core as pm from anima.dcc.mayaEnv import ai2rs class Ai2RSTester(unittest.TestCase): """tests for anima.dcc.mayaEnv.ai2rs classes """ def setUp(self): """create the test setup """ # be sure that arnold and redshift is loaded if not pm.pluginInfo('mtoa', q=1, loaded=1): pm.loadPlugin('mtoa') if not pm.pluginInfo('redshift4maya', q=1, loaded=1): pm.loadPlugin('redshift4maya') def tearDown(self): """clean the test """ # create a new file pm.newFile(f=True) # delete any .rsmap in the test_data folder import os import glob test_data_path = os.path.abspath('./test_data/') for f in glob.glob('%s/*.rstexbin' % test_data_path): try: os.remove(f) except OSError: pass def test_conversion_of_ai_standard_to_red_shift_material_created(self): """test conversion of aiStandard material """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) # check if the material is created self.assertIsInstance( rs_material, pm.nt.RedshiftMaterial ) def test_conversion_of_ai_standard_to_red_shift_material_diffuse_properties(self): """test conversion of aiStandard material to RedshiftMaterial diffuse properties """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') diffuse_color = (1, 0.5, 0) diffuse_weight = 0.532 diffuse_roughness = 0.8 transl_weight = 0.25 diffuse_direct = 0.95 diffuse_indirect = 0.89 ai_standard.color.set(diffuse_color) ai_standard.Kd.set(diffuse_weight) ai_standard.diffuseRoughness.set(diffuse_roughness) ai_standard.Kb.set(transl_weight) ai_standard.directDiffuse.set(diffuse_direct) ai_standard.indirectDiffuse.set(diffuse_indirect) conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) # check diffuse properties self.assertAlmostEqual( rs_material.diffuse_color.get(), diffuse_color, places=3 ) self.assertAlmostEqual( rs_material.diffuse_weight.get(), diffuse_weight, places=3 ) self.assertAlmostEqual( rs_material.diffuse_roughness.get(), diffuse_roughness, places=3 ) self.assertAlmostEqual( rs_material.transl_weight.get(), transl_weight, places=3 ) self.assertAlmostEqual( rs_material.diffuse_direct.get(), diffuse_direct, places=3 ) self.assertAlmostEqual( rs_material.diffuse_indirect.get(), diffuse_indirect, places=3 ) def test_conversion_of_ai_standard_to_red_shift_material_specular_properties(self): """test conversion of aiStandard material to RedshiftMaterial specular properties """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') refl_color = (1, 0.5, 0) refl_weight = 0.532 refl_roughness = 0.8 refl_aniso = 0.25 refl_aniso_rotation = 0.5 refl_brdf = 1 refl_fresnel_mode = 1 refl_reflectivity = 0.01 refl_direct = 0.95 refl_indirect = 0.89 ai_standard.KsColor.set(refl_color) ai_standard.Ks.set(refl_weight) ai_standard.specularRoughness.set(refl_roughness) ai_standard.specularAnisotropy.set(refl_aniso) ai_standard.specularRotation.set(refl_aniso_rotation) ai_standard.specularDistribution.set(refl_brdf) ai_standard.specularFresnel.set(refl_fresnel_mode) ai_standard.Ksn.set(refl_reflectivity) ai_standard.directSpecular.set(refl_direct) ai_standard.indirectSpecular.set(refl_indirect) conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) # check specular properties self.assertAlmostEqual( rs_material.refl_color.get(), refl_color, places=3 ) self.assertAlmostEqual( rs_material.refl_weight.get(), refl_weight, places=3 ) self.assertAlmostEqual( rs_material.refl_roughness.get(), refl_roughness, places=3 ) self.assertAlmostEqual( rs_material.refl_aniso.get(), -0.5, places=3 ) self.assertAlmostEqual( rs_material.refl_reflectivity.get()[0], refl_reflectivity, places=3 ) self.assertAlmostEqual( rs_material.refl_reflectivity.get()[1], refl_reflectivity, places=3 ) self.assertAlmostEqual( rs_material.refl_reflectivity.get()[2], refl_reflectivity, places=3 ) self.assertEqual( rs_material.refl_edge_tint.get(), (1, 1, 1) ) self.assertAlmostEqual( rs_material.refl_direct.get(), refl_direct, places=3 ) self.assertAlmostEqual( rs_material.refl_indirect.get(), refl_indirect, places=3 ) def test_conversion_of_ai_standard_to_red_shift_material_refraction_properties(self): """test conversion of aiStandard material to RedshiftMaterial refraction properties """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') refr_color = (1, 0.5, 0) refr_weight = 0.532 refr_ior = 1.434 refr_abbe = 29.942196 refr_roughness = 0.8 refr_transmittance = (0.57, 0.34, 0.54) opacity_color = (0.5, 0.87, 0.12) ai_standard.KtColor.set(refr_color) ai_standard.Kt.set(refr_weight) ai_standard.FresnelUseIOR.set(0) ai_standard.IOR.set(refr_ior) ai_standard.dispersionAbbe.set(refr_abbe) ai_standard.refractionRoughness.set(refr_roughness) ai_standard.transmittance.set(refr_transmittance) ai_standard.opacity.set(opacity_color) conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) self.assertAlmostEqual( rs_material.refr_color.get()[0], refr_color[0], places=3 ) self.assertAlmostEqual( rs_material.refr_color.get()[1], refr_color[1], places=3 ) self.assertAlmostEqual( rs_material.refr_color.get()[2], refr_color[2], places=3 ) self.assertAlmostEqual( rs_material.refr_weight.get(), refr_weight, places=3 ) self.assertAlmostEqual(rs_material.refr_ior.get(), refr_ior, places=3) self.assertEqual(rs_material.refr_use_base_IOR.get(), 0) self.assertAlmostEqual( rs_material.refr_abbe.get(), refr_abbe, places=3 ) self.assertAlmostEqual( rs_material.refr_roughness.get(), refr_roughness, places=3 ) self.assertAlmostEqual( rs_material.refr_transmittance.get()[0], refr_transmittance[0], places=3 ) self.assertAlmostEqual( rs_material.refr_transmittance.get()[1], refr_transmittance[1], places=3 ) self.assertAlmostEqual( rs_material.refr_transmittance.get()[2], refr_transmittance[2], places=3 ) self.assertAlmostEqual( rs_material.opacity_color.get()[0], opacity_color[0], places=3 ) self.assertAlmostEqual( rs_material.opacity_color.get()[1], opacity_color[1], places=3 ) self.assertAlmostEqual( rs_material.opacity_color.get()[2], opacity_color[2], places=3 ) def test_conversion_of_ai_standard_to_red_shift_material_sss_properties(self): """test conversion of aiStandard material to RedshiftMaterial sss properties """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') ms_color0 = (1, 0.5, 0) ms_amount = 0.532 ms_radius0 = 1.434 emission_color = (0.57, 0.34, 0.54) emission_weight = 0.5 ai_standard.KsssColor.set(ms_color0) ai_standard.Ksss.set(ms_amount) ai_standard.sssRadius.set([ms_radius0, ms_radius0, ms_radius0]) ai_standard.emissionColor.set(emission_color) ai_standard.emission.set(emission_weight) conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) self.assertAlmostEqual( rs_material.ms_color0.get()[0], ms_color0[0], places=3 ) self.assertAlmostEqual( rs_material.ms_color0.get()[1], ms_color0[1], places=3 ) self.assertAlmostEqual( rs_material.ms_color0.get()[2], ms_color0[2], places=3 ) self.assertAlmostEqual( rs_material.ms_amount.get(), ms_amount, places=3 ) self.assertAlmostEqual( rs_material.ms_radius0.get(), ms_radius0, places=3 ) self.assertAlmostEqual( rs_material.emission_color.get()[0], emission_color[0], places=3 ) self.assertAlmostEqual( rs_material.emission_color.get()[1], emission_color[1], places=3 ) self.assertAlmostEqual( rs_material.emission_color.get()[2], emission_color[2], places=3 ) self.assertAlmostEqual( rs_material.emission_weight.get(), emission_weight, places=3 ) def test_conversion_of_ai_standard_to_red_shift_material_channels_with_textures(self): """test conversion of aiStandard material to RedshiftMaterial channels with textures """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') # create a diffuse texture file_node = pm.shadingNode('file', asTexture=1) file_node.outColor >> ai_standard.color conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) # now expect the corresponding channel to also have the same connection # from the file_node self.assertIn( file_node, rs_material.diffuse_color.inputs() ) def test_conversion_of_ai_standard_to_red_shift_material_bump_properties(self): """test conversion of aiStandard material to RedshiftMaterial channels with textures """ # create one aiStandard material ai_standard, ai_standardSG = pm.createSurfaceShader('aiStandard') # create a diffuse texture file_node = pm.shadingNode('file', asTexture=1) bump2d_node = pm.shadingNode('bump2d', asUtility=1) file_node.outAlpha >> bump2d_node.bumpValue bump2d_node.outNormal >> ai_standard.normalCamera conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(ai_standard) # now expect the corresponding channel to also have the same connection # from the file_node self.assertIn( bump2d_node, rs_material.bump_input.inputs() ) def test_node_type_is_not_on_the_spec_sheet(self): """testing if no error will be raised when the given node type is not on the conversion spec sheet """ # create a surface node node = pm.createNode('surface') conversion_man = ai2rs.ConversionManager() rs_material = conversion_man.convert(node) self.assertIsNone(rs_material) def test_texture_files_converted_to_rsmap(self): """testing if texture files are converted to rsmap files """ import os file_texture_node = pm.shadingNode('file', asTexture=1) texture_full_path = os.path.abspath('./test_data/texture.png') rstexbin_full_path = '%s.rstexbin' % \ os.path.splitext(texture_full_path)[0] file_texture_node.fileTextureName.set(texture_full_path) conversion_man = ai2rs.ConversionManager() conversion_man.convert(file_texture_node) self.assertTrue(os.path.exists(rstexbin_full_path)) def test_mesh_subdiv_attributes(self): """testing if mesh attributes are transferred correctly """ mesh_node = pm.createNode('mesh') # set arnold attributes mesh_node.aiSubdivType.set(1) mesh_node.aiSubdivIterations.set(2) mesh_node.aiSubdivAdaptiveSpace.set(1) mesh_node.aiDispAutobump.set(1) mesh_node.aiDispHeight.set(1.3) conversion_man = ai2rs.ConversionManager() conversion_man.convert(mesh_node) self.assertEqual(mesh_node.rsEnableSubdivision.get(), 1) self.assertEqual(mesh_node.rsMaxTessellationSubdivs.get(), 2) self.assertEqual(mesh_node.rsSubdivisionRule.get(), 0) self.assertEqual(mesh_node.rsScreenSpaceAdaptive.get(), 0) self.assertEqual(mesh_node.rsDoSmoothSubdivision.get(), 1) self.assertAlmostEqual( mesh_node.rsDisplacementScale.get(), 1.3, places=3 ) self.assertEqual(mesh_node.rsAutoBumpMap.get(), 1) class RedShiftTextureProcessorTester(unittest.TestCase): """tests for anima.dcc.mayaEnv.ai2rs.RedShiftTextureProcessor class """ def setUp(self): """create the test setup """ if not pm.pluginInfo('redshift4maya', q=1, loaded=1): pm.loadPlugin('redshift4maya') def tearDown(self): """clean the test """ # create a new file pm.newFile(f=True) # delete any .rsmap in the test_data folder import os import glob test_data_path = os.path.abspath('./test_data/') for f in glob.glob('%s/*.rstexbin' % test_data_path): try: os.remove(f) except OSError: pass def test_convert_is_working_properly(self): """testing if convert is working properly """ import os from anima.render.redshift import RedShiftTextureProcessor texture_full_path = os.path.abspath('./test_data/texture.png') rstp = RedShiftTextureProcessor(texture_full_path) result = rstp.convert() rsmap_full_path = os.path.abspath('./test_data/texture.rstexbin') self.assertEqual( result[0].replace('\\', '/'), rsmap_full_path.replace('\\', '/') ) self.assertTrue( os.path.exists(rsmap_full_path) )
eoyilmaz/anima
tests/dcc/maya/test_ai2rs.py
Python
mit
15,063
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Partially based on AboutMethods in the Ruby Koans # from runner.koan import * def my_global_function(a,b): return a + b class AboutMethods(Koan): def test_calling_a_global_function(self): self.assertEqual(5, my_global_function(2,3)) # NOTE: Wrong number of arguments is not a SYNTAX error, but a # runtime error. def test_calling_functions_with_wrong_number_of_arguments(self): try: my_global_function() except TypeError as exception: msg = exception.args[0] # Note, the text comparison works for Python 3.2 # It has changed in the past and may change in the future self.assertRegex(msg, r'my_global_function\(\) missing 2 required positional arguments') try: my_global_function(1, 2, 3) except Exception as e: msg = e.args[0] # Note, watch out for parenthesis. They need slashes in front! self.assertRegex(msg, r'my_global_function\(\) takes 2 positional arguments but 3 were given') # ------------------------------------------------------------------ def pointless_method(self, a, b): sum = a + b def test_which_does_not_return_anything(self): self.assertEqual(None, self.pointless_method(1, 2)) # Notice that methods accessed from class scope do not require # you to pass the first "self" argument? # ------------------------------------------------------------------ def method_with_defaults(self, a, b='default_value'): return [a, b] def test_calling_with_default_values(self): self.assertEqual([1, 'default_value'], self.method_with_defaults(1)) self.assertEqual([1, 2], self.method_with_defaults(1, 2)) # ------------------------------------------------------------------ def method_with_var_args(self, *args): return args def test_calling_with_variable_arguments(self): self.assertEqual((), self.method_with_var_args()) self.assertEqual(('one',), self.method_with_var_args('one')) self.assertEqual(('one', 'two'), self.method_with_var_args('one', 'two')) # ------------------------------------------------------------------ def function_with_the_same_name(self, a, b): return a + b def test_functions_without_self_arg_are_global_functions(self): def function_with_the_same_name(a, b): return a * b self.assertEqual(12, function_with_the_same_name(3,4)) def test_calling_methods_in_same_class_with_explicit_receiver(self): def function_with_the_same_name(a, b): return a * b self.assertEqual(7, self.function_with_the_same_name(3,4)) # ------------------------------------------------------------------ def another_method_with_the_same_name(self): return 10 link_to_overlapped_method = another_method_with_the_same_name def another_method_with_the_same_name(self): return 42 def test_that_old_methods_are_hidden_by_redefinitions(self): self.assertEqual(42, self.another_method_with_the_same_name()) def test_that_overlapped_method_is_still_there(self): self.assertEqual(10, self.link_to_overlapped_method()) # ------------------------------------------------------------------ def empty_method(self): pass def test_methods_that_do_nothing_need_to_use_pass_as_a_filler(self): self.assertEqual(None, self.empty_method()) def test_pass_does_nothing_at_all(self): "You" "shall" "not" pass self.assertEqual(True, "Still got to this line" != None) # ------------------------------------------------------------------ def one_line_method(self): return 'Madagascar' def test_no_indentation_required_for_one_line_statement_bodies(self): self.assertEqual('Madagascar', self.one_line_method()) # ------------------------------------------------------------------ def method_with_documentation(self): "A string placed at the beginning of a function is used for documentation" return "ok" def test_the_documentation_can_be_viewed_with_the_doc_method(self): self.assertRegex(self.method_with_documentation.__doc__, 'A string placed at the beginning of a function is used for documentation') # ------------------------------------------------------------------ class Dog: def name(self): return "Fido" def _tail(self): # Prefixing a method with an underscore implies private scope return "wagging" def __password(self): return 'password' # Genius! def test_calling_methods_in_other_objects(self): rover = self.Dog() self.assertEqual("Fido", rover.name()) def test_private_access_is_implied_but_not_enforced(self): rover = self.Dog() # This is a little rude, but legal self.assertEqual("wagging", rover._tail()) def test_attributes_with_double_underscore_prefixes_are_subject_to_name_mangling(self): rover = self.Dog() with self.assertRaises(AttributeError): password = rover.__password() # But this still is! self.assertEqual('password', rover._Dog__password()) # Name mangling exists to avoid name clash issues when subclassing. # It is not for providing effective access protection
gerardolopezduenas/python-koans-solutions
about_methods.py
Python
mit
5,504
# -*- coding: utf-8 -*- """ Transforms: Vincent Data Class for Vega Transform types """ from __future__ import (print_function, division) from .core import grammar, GrammarClass from ._compat import str_types class Transform(GrammarClass): """Container to Transforma metrics As detailed in the Vega wiki: "A data transform performs operations on a data set prior to visualization. Common examples include filtering and grouping (e.g., group data points with the same stock ticker for plotting as separate lines). All transform definitions must include a "type" parameter, which specifies the transform to apply. Each transform then has a set of transform-specific parameters." """ @grammar(str_types) def type(value): """string: property name in which to store the computed transform value. The valid transform types are as follows: array, copy, facet, filter, flatten, formula, sort, stats, unique, zip, force, geo, geopath, link, pie, stack, treemap, wordcloud """ valid_transforms = ['array', 'copy', 'facet', 'filter', 'flatten', 'formula', 'sort', 'stats', 'unique', 'zip', 'force', 'geo', 'geopath', 'link', 'pie', 'stack', 'treemap', 'wordcloud'] if value not in valid_transforms: raise ValueError('Transform type must be' ' one of {0}'.format(str(valid_transforms))) @grammar(list) def fields(value): """list: Can take data references or object references Only used if ``type`` is ``array`` or ``copy`` """ @grammar(grammar_type=str_types, grammar_name='from') def from_(value): """str: The name of the object to copy values from Only used if ``type`` is ``copy`` """ @grammar(grammar_type=(list,) + str_types, grammar_name='as') def as_(value): """list: The field names to copy the values to. Can be used with the following ``type``: ``copy`` ``unique`` ``zip`` """ @grammar(list) def keys(value): """list: Each key value corresponds to a single facet in the output. Only used if ``type`` is ``facet`` """ @grammar(str_types) def sort(value): """string: Optional for sorting facet values Only used if ``type`` is ``facet`` """ @grammar(str_types) def test(value): """string: A string containing a javascript filtering expression. Ex: d.data.y >= 3 Only used if ``type`` is ``filter`` """ @grammar(str_types) def field(value): """string: Property name to store computed formula value. Only used if ``type`` is ``formula`` or ``unique`` See: https://github.com/trifacta/vega/wiki/Data-Transforms#-formula """ @grammar(str_types) def expr(value): """string: Javascript expression of a formula, referencing the data as d. Only used if ``type`` is formula See: https://github.com/trifacta/vega/wiki/Data-Transforms#-formula """ @grammar(str_types + (list,)) def by(value): """str, list: a field or list of fields to sort. Can prepend with - to sort descending. Only used if ``type`` is ``sort`` """ @grammar(str_types) def value(value): """str: Field for which to compute statistics. Only used if ``type`` is ``stats`` """ @grammar(bool) def median(value): """boolean: If true, median statistic will also be computed. Only used if ``type`` is stats`` """ @grammar(grammar_type=str_types, grammar_name='with') def with_(value): """string: Name of dataset to zip to current dataset Only used if ``type`` is ``zip`` """ @grammar(str_types) def key(value): """string: Primary dataset field to match to secondary data Only used if ``type`` is ``zip`` """ @grammar(grammar_type=str_types, grammar_name='withKey') def with_key(value): """string: Field in secondary dataset to match to primary Only used if ``type`` is ``zip`` """ @grammar((int, float,) + str_types) def default(value): """Default value to use if no matching key value is found for zip transformation""" @grammar(str_types) def links(value): """string: Name of link (edge) data set. To be used with ``force`` types """ @grammar(list) def size(value): """list: Dimensions of force layout To be used with ``force`` types """ @grammar(int) def iterations(value): """int: Number of iterations to run force directed layout. To be used with ``force`` types """ @grammar((int,) + str_types) def charge(value): """int or string: Strength of the charge each node exerts. To be used with ``force`` types """ @grammar(grammar_type=(int,) + str_types, grammar_name='linkDistance') def link_distance(value): """int or string: Determines lenght of the edges, in pixels. To be used with ``force`` types """ @grammar(grammar_type=(int,) + str_types, grammar_name='linkStrength') def link_strength(value): """int or string: Determines the tension of the edges. To be used with ``force`` types """ @grammar((int, float)) def friction(value): """int or float: Strength of friction force to stabilize layout To be used with ``force`` types """ @grammar((int, float)) def theta(value): """int or float: theta parameter for the Barnes-Hut algorithm. To be used with ``force`` types """ @grammar((int, float)) def gravity(value): """int or float: Strength of pseudo-gravity force To be used with ``force`` types """ @grammar((int, float)) def alpha(value): """int or float: "temperature" parameter to determine node position adjustment To be used with ``force`` types """ @grammar(str_types) def point(value): """string: Data field determining the points at which to stack. When stacked vertically, these are the x-coords. To be used with ``stack`` types """ @grammar(str_types) def height(value): """string: Data field determining thickness, or height of stacks. To be used with ``stack`` types """ @grammar(str_types) def offset(value): """string: Baseline offset style. Must be one of the following: ``zero``, ``silhouette``, ``wiggle``, ``expand`` To be used with ``stack`` types """ offsets = ['zero', 'silhouette', 'wiggle', 'expand'] if value not in offsets: raise ValueError('offset must be one of {0}'.format(offsets)) @grammar(str_types) def order(value): """str: The sort order for stack layers. Must be one of the following: ``default``, ``reverse``, ``inside-out`` To be used with ``stack`` types """ orders = ['default', 'reverse', 'inside-out'] if value not in orders: raise ValueError('order must be one of {0}'.format(orders)) @grammar(str_types) def projection(value): """str: Cartographic projection. Accepts any projection supported by the D3 projection plug-in: https://github.com/mbostock/d3/wiki/Geo-Projections """ @grammar(list) def center(value): """Center of the projection. Should be length=2""" if len(value) != 2: raise ValueError('len(center) must = 2') @grammar(list) def translate(value): """Translation of the projection. Should be length=2""" if len(value) != 2: raise ValueError('len(center) must = 2') @grammar(int) def scale(value): """The scale of the projection""" if value < 0: raise ValueError('Scale cannot be negative.') @grammar(int) def rotate(value): """The rotation of the projection""" if value < 0: raise ValueError('The rotation cannot be negative.')
myusuf3/vincent
vincent/transforms.py
Python
mit
8,421
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- from typing import ( TYPE_CHECKING, Union, Any, Dict, Mapping, Optional, List, Tuple ) from azure.core import MatchConditions from ._common_conversion import _transform_patch_to_cosmos_post from ._models import UpdateMode, TransactionOperation from ._serialize import _get_match_headers, _add_entity_properties, _prepare_key from ._entity import TableEntity if TYPE_CHECKING: from azure.core.pipeline.transport import HttpRequest import msrest from ._generated import models, AzureTable from ._generated._configuration import AzureTableConfiguration EntityType = Union[TableEntity, Mapping[str, Any]] OperationType = Union[TransactionOperation, str] TransactionOperationType = Union[Tuple[OperationType, EntityType], Tuple[OperationType, EntityType, Mapping[str, Any]]] class TableBatchOperations(object): """ This is the class that is used for batch operations for the data tables service. The Tables service supports batch transactions on entities that are in the same table and belong to the same partition group. Multiple operations are supported within a single transaction. The batch can include at most 100 entities, and its total payload may be no more than 4 MB in size. """ def __init__( self, client, # type: AzureTable serializer, # type: msrest.Serializer deserializer, # type: msrest.Deserializer config, # type: AzureTableConfiguration table_name, # type: str is_cosmos_endpoint=False, # type: bool **kwargs # type: Dict[str, Any] ): """Create TableClient from a Credential. :param client: an AzureTable object :type client: AzureTable :param serializer: serializer object for request serialization :type serializer: msrest.Serializer :param deserializer: deserializer object for request serialization :type deserializer: msrest.Deserializer :param config: Azure Table Configuration object :type config: AzureTableConfiguration :param table_name: name of the Table to perform operations on :type table_name: str :param table_client: TableClient object to perform operations on :type table_client: TableClient :returns: None """ self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config self._is_cosmos_endpoint = is_cosmos_endpoint self.table_name = table_name self._partition_key = kwargs.pop("partition_key", None) self.requests = [] # type: List[HttpRequest] def __len__(self): return len(self.requests) def _verify_partition_key( self, entity # type: EntityType ): # (...) -> None if self._partition_key is None: self._partition_key = entity["PartitionKey"] elif entity["PartitionKey"] != self._partition_key: raise ValueError("Partition Keys must all be the same") def add_operation(self, operation): # type: (TransactionOperationType) -> None """Add a single operation to a batch.""" try: operation_type, entity, kwargs = operation # type: ignore except ValueError: operation_type, entity, kwargs = operation[0], operation[1], {} # type: ignore try: getattr(self, operation_type.lower())(entity, **kwargs) except AttributeError: raise ValueError("Unrecognized operation: {}".format(operation)) def create( self, entity, # type: EntityType **kwargs # type: Any ): # type: (...) -> None """Adds an insert operation to the current batch. :param entity: The properties for the table entity. :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] :return: None :rtype: None :raises ValueError: .. admonition:: Example: .. literalinclude:: ../samples/sample_batching.py :start-after: [START batching] :end-before: [END batching] :language: python :dedent: 8 :caption: Creating and adding an entity to a Table """ self._verify_partition_key(entity) temp = entity.copy() # type: ignore if "PartitionKey" in temp and "RowKey" in temp: temp = _add_entity_properties(temp) else: raise ValueError("PartitionKey and/or RowKey were not provided in entity") self._batch_create_entity(table=self.table_name, entity=temp, **kwargs) def _batch_create_entity( self, table, # type: str entity, # type: EntityType timeout=None, # type: Optional[int] request_id_parameter=None, # type: Optional[str] response_preference="return-no-content", # type: Optional[Union[str, "models.ResponseFormat"]] query_options=None, # type: Optional["models.QueryOptions"] **kwargs # type: Any ): # (...) -> None """ Adds an insert operation to the batch. See :func:`azure.data.tables.TableClient.insert_entity` for more information on insert operations. The operation will not be executed until the batch is committed :param: table: The table to perform the operation on :type: table: str :param: entity: The entity to insert. Can be a dict or an entity object Must contain a PartitionKey and a RowKey. :type: entity: dict or :class:`~azure.data.tables.models.Entity` """ _format = None if query_options is not None: _format = query_options.format data_service_version = "3.0" content_type = kwargs.pop("content_type", "application/json;odata=nometadata") accept = "application/json;odata=minimalmetadata" # Construct URL url = self._batch_create_entity.metadata["url"] # type: ignore path_format_arguments = { "url": self._serialize.url( "self._config.url", self._config.url, "str", skip_quote=True ), "table": self._serialize.url("table", table, "str"), } url = self._client._client.format_url( # pylint: disable=protected-access url, **path_format_arguments ) # Construct parameters query_parameters = {} # type: Dict[str, Any] if timeout is not None: query_parameters["timeout"] = self._serialize.query( "timeout", timeout, "int", minimum=0 ) if _format is not None: query_parameters["$format"] = self._serialize.query( "format", _format, "str" ) # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters["x-ms-version"] = self._serialize.header( "self._config.version", self._config.version, "str" ) if request_id_parameter is not None: header_parameters["x-ms-client-request-id"] = self._serialize.header( "request_id_parameter", request_id_parameter, "str" ) header_parameters["DataServiceVersion"] = self._serialize.header( "data_service_version", data_service_version, "str" ) if response_preference is not None: header_parameters["Prefer"] = self._serialize.header( "response_preference", response_preference, "str" ) header_parameters["Content-Type"] = self._serialize.header( "content_type", content_type, "str" ) header_parameters["Accept"] = self._serialize.header("accept", accept, "str") body_content_kwargs = {} # type: Dict[str, Any] if entity is not None: body_content = self._serialize.body(entity, "{object}") else: body_content = None body_content_kwargs["content"] = body_content request = self._client._client.post( # pylint: disable=protected-access url, query_parameters, header_parameters, **body_content_kwargs ) self.requests.append(request) _batch_create_entity.metadata = {"url": "/{table}"} # type: ignore def update( self, entity, # type: EntityType mode=UpdateMode.MERGE, # type: Union[str, UpdateMode] **kwargs # type: Any ): # (...) -> None """Adds an update operation to the current batch. :param entity: The properties for the table entity. :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] :param mode: Merge or Replace entity :type mode: :class:`~azure.data.tables.UpdateMode` :keyword str etag: Etag of the entity :keyword match_condition: MatchCondition :paramtype match_condition: ~azure.core.MatchCondition :return: None :rtype: None :raises ValueError: .. admonition:: Example: .. literalinclude:: ../samples/sample_batching.py :start-after: [START batching] :end-before: [END batching] :language: python :dedent: 8 :caption: Creating and adding an entity to a Table """ self._verify_partition_key(entity) temp = entity.copy() # type: ignore match_condition = kwargs.pop("match_condition", None) etag = kwargs.pop("etag", None) if match_condition and not etag: try: etag = entity.metadata.get("etag", None) # type: ignore except (AttributeError, TypeError): pass if_match = _get_match_headers( etag=etag, match_condition=match_condition or MatchConditions.Unconditionally, ) partition_key = _prepare_key(temp["PartitionKey"]) row_key = _prepare_key(temp["RowKey"]) temp = _add_entity_properties(temp) if mode == UpdateMode.REPLACE: self._batch_update_entity( table=self.table_name, partition_key=partition_key, row_key=row_key, if_match=if_match, table_entity_properties=temp, **kwargs ) elif mode == UpdateMode.MERGE: self._batch_merge_entity( table=self.table_name, partition_key=partition_key, row_key=row_key, if_match=if_match, table_entity_properties=temp, **kwargs ) else: raise ValueError("Mode type '{}' is not supported.".format(mode)) def _batch_update_entity( self, table, # type: str partition_key, # type: str row_key, # type: str timeout=None, # type: Optional[int] request_id_parameter=None, # type: Optional[str] if_match=None, # type: Optional[str] table_entity_properties=None, # type: Optional[EntityType] query_options=None, # type: Optional["models.QueryOptions"] **kwargs # type: Any ): # type: (...) -> None """Update entity in a table. :param table: The name of the table. :type table: str :param partition_key: The partition key of the entity. :type partition_key: str :param row_key: The row key of the entity. :type row_key: str :param timeout: The timeout parameter is expressed in seconds. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when analytics logging is enabled. :type request_id_parameter: str :param if_match: Match condition for an entity to be updated. If specified and a matching entity is not found, an error will be raised. To force an unconditional update, set to the wildcard character (*). If not specified, an insert will be performed when no existing entity is found to update and a replace will be performed if an existing entity is found. :type if_match: str :param table_entity_properties: The properties for the table entity. :type table_entity_properties: dict[str, object] :param query_options: Parameter group. :type query_options: ~azure.data.tables.models.QueryOptions :return: None :rtype: None """ _format = None if query_options is not None: _format = query_options.format data_service_version = "3.0" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._batch_update_entity.metadata["url"] # type: ignore path_format_arguments = { "url": self._serialize.url( "self._config.url", self._config.url, "str", skip_quote=True ), "table": self._serialize.url("table", table, "str"), "partitionKey": self._serialize.url("partition_key", partition_key, "str"), "rowKey": self._serialize.url("row_key", row_key, "str"), } url = self._client._client.format_url( # pylint: disable=protected-access url, **path_format_arguments ) # Construct parameters query_parameters = {} # type: Dict[str, Any] if timeout is not None: query_parameters["timeout"] = self._serialize.query( "timeout", timeout, "int", minimum=0 ) if _format is not None: query_parameters["$format"] = self._serialize.query( "format", _format, "str" ) # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters["x-ms-version"] = self._serialize.header( "self._config.version", self._config.version, "str" ) if request_id_parameter is not None: header_parameters["x-ms-client-request-id"] = self._serialize.header( "request_id_parameter", request_id_parameter, "str" ) header_parameters["DataServiceVersion"] = self._serialize.header( "data_service_version", data_service_version, "str" ) if if_match is not None: header_parameters["If-Match"] = self._serialize.header( "if_match", if_match, "str" ) header_parameters["Content-Type"] = self._serialize.header( "content_type", content_type, "str" ) header_parameters["Accept"] = self._serialize.header("accept", accept, "str") body_content_kwargs = {} # type: Dict[str, Any] if table_entity_properties is not None: body_content = self._serialize.body(table_entity_properties, "{object}") else: body_content = None body_content_kwargs["content"] = body_content request = self._client._client.put( # pylint: disable=protected-access url, query_parameters, header_parameters, **body_content_kwargs ) self.requests.append(request) _batch_update_entity.metadata = { # type: ignore "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" } # type: ignore def _batch_merge_entity( self, table, # type: str partition_key, # type: str row_key, # type: str timeout=None, # type: Optional[int] request_id_parameter=None, # type: Optional[str] if_match=None, # type: Optional[str] table_entity_properties=None, # type: Optional[EntityType] query_options=None, # type: Optional["models.QueryOptions"] **kwargs # type: Any ): # type: (...) -> None """Merge entity in a table. :param table: The name of the table. :type table: str :param partition_key: The partition key of the entity. :type partition_key: str :param row_key: The row key of the entity. :type row_key: str :param timeout: The timeout parameter is expressed in seconds. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when analytics logging is enabled. :type request_id_parameter: str :param if_match: Match condition for an entity to be updated. If specified and a matching entity is not found, an error will be raised. To force an unconditional update, set to the wildcard character (*). If not specified, an insert will be performed when no existing entity is found to update and a merge will be performed if an existing entity is found. :type if_match: str :param table_entity_properties: The properties for the table entity. :type table_entity_properties: dict[str, object] :param query_options: Parameter group. :type query_options: ~azure.data.tables.models.QueryOptions :return: None :rtype: None """ _format = None if query_options is not None: _format = query_options.format data_service_version = "3.0" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._batch_merge_entity.metadata["url"] # type: ignore path_format_arguments = { "url": self._serialize.url( "self._config.url", self._config.url, "str", skip_quote=True ), "table": self._serialize.url("table", table, "str"), "partitionKey": self._serialize.url("partition_key", partition_key, "str"), "rowKey": self._serialize.url("row_key", row_key, "str"), } url = self._client._client.format_url( # pylint: disable=protected-access url, **path_format_arguments ) # Construct parameters query_parameters = {} # type: Dict[str, Any] if timeout is not None: query_parameters["timeout"] = self._serialize.query( "timeout", timeout, "int", minimum=0 ) if _format is not None: query_parameters["$format"] = self._serialize.query( "format", _format, "str" ) # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters["x-ms-version"] = self._serialize.header( "self._config.version", self._config.version, "str" ) if request_id_parameter is not None: header_parameters["x-ms-client-request-id"] = self._serialize.header( "request_id_parameter", request_id_parameter, "str" ) header_parameters["DataServiceVersion"] = self._serialize.header( "data_service_version", data_service_version, "str" ) if if_match is not None: header_parameters["If-Match"] = self._serialize.header( "if_match", if_match, "str" ) header_parameters["Content-Type"] = self._serialize.header( "content_type", content_type, "str" ) header_parameters["Accept"] = self._serialize.header("accept", accept, "str") body_content_kwargs = {} # type: Dict[str, Any] if table_entity_properties is not None: body_content = self._serialize.body(table_entity_properties, "{object}") else: body_content = None body_content_kwargs["content"] = body_content request = self._client._client.patch( # pylint: disable=protected-access url, query_parameters, header_parameters, **body_content_kwargs ) if self._is_cosmos_endpoint: _transform_patch_to_cosmos_post(request) self.requests.append(request) _batch_merge_entity.metadata = { # type: ignore "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" } def delete( self, entity, # type: EntityType **kwargs # type: Any ): # type: (...) -> None """Adds a delete operation to the current branch. :param partition_key: The partition key of the entity. :type partition_key: str :param row_key: The row key of the entity. :type row_key: str :keyword str etag: Etag of the entity :keyword match_condition: MatchCondition :paramtype match_condition: ~azure.core.MatchCondition :raises ValueError: .. admonition:: Example: .. literalinclude:: ../samples/sample_batching.py :start-after: [START batching] :end-before: [END batching] :language: python :dedent: 8 :caption: Creating and adding an entity to a Table """ self._verify_partition_key(entity) temp = entity.copy() # type: ignore partition_key = _prepare_key(temp["PartitionKey"]) row_key = _prepare_key(temp["RowKey"]) match_condition = kwargs.pop("match_condition", None) etag = kwargs.pop("etag", None) if match_condition and not etag: try: etag = entity.metadata.get("etag", None) # type: ignore except (AttributeError, TypeError): pass if_match = _get_match_headers( etag=etag, match_condition=match_condition or MatchConditions.Unconditionally, ) self._batch_delete_entity( table=self.table_name, partition_key=partition_key, row_key=row_key, if_match=if_match, **kwargs ) def _batch_delete_entity( self, table, # type: str partition_key, # type: str row_key, # type: str if_match, # type: str timeout=None, # type: Optional[int] request_id_parameter=None, # type: Optional[str] query_options=None, # type: Optional["models.QueryOptions"] ): # type: (...) -> None """Deletes the specified entity in a table. :param table: The name of the table. :type table: str :param partition_key: The partition key of the entity. :type partition_key: str :param row_key: The row key of the entity. :type row_key: str :param if_match: Match condition for an entity to be deleted. If specified and a matching entity is not found, an error will be raised. To force an unconditional delete, set to the wildcard character (*). :type if_match: str :param timeout: The timeout parameter is expressed in seconds. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when analytics logging is enabled. :type request_id_parameter: str :param query_options: Parameter group. :type query_options: ~azure.data.tables.models.QueryOptions :return: None :rtype: None """ _format = None if query_options is not None: _format = query_options.format data_service_version = "3.0" accept = "application/json;odata=minimalmetadata" # Construct URL url = self._batch_delete_entity.metadata["url"] # type: ignore path_format_arguments = { "url": self._serialize.url( "self._config.url", self._config.url, "str", skip_quote=True ), "table": self._serialize.url("table", table, "str"), "partitionKey": self._serialize.url("partition_key", partition_key, "str"), "rowKey": self._serialize.url("row_key", row_key, "str"), } url = self._client._client.format_url( # pylint: disable=protected-access url, **path_format_arguments ) # Construct parameters query_parameters = {} # type: Dict[str, Any] if timeout is not None: query_parameters["timeout"] = self._serialize.query( "timeout", timeout, "int", minimum=0 ) if _format is not None: query_parameters["$format"] = self._serialize.query( "format", _format, "str" ) # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters["x-ms-version"] = self._serialize.header( "self._config.version", self._config.version, "str" ) if request_id_parameter is not None: header_parameters["x-ms-client-request-id"] = self._serialize.header( "request_id_parameter", request_id_parameter, "str" ) header_parameters["DataServiceVersion"] = self._serialize.header( "data_service_version", data_service_version, "str" ) header_parameters["If-Match"] = self._serialize.header( "if_match", if_match, "str" ) header_parameters["Accept"] = self._serialize.header("accept", accept, "str") request = self._client._client.delete( # pylint: disable=protected-access url, query_parameters, header_parameters ) self.requests.append(request) _batch_delete_entity.metadata = { # type: ignore "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" } def upsert( self, entity, # type: EntityType mode=UpdateMode.MERGE, # type: Union[str, UpdateMode] **kwargs # type: Any ): # type: (...) -> None """Adds an upsert (update/merge) operation to the batch. :param entity: The properties for the table entity. :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] :param mode: Merge or Replace entity :type mode: :class:`~azure.data.tables.UpdateMode` :raises ValueError: .. admonition:: Example: .. literalinclude:: ../samples/sample_batching.py :start-after: [START batching] :end-before: [END batching] :language: python :dedent: 8 :caption: Creating and adding an entity to a Table """ self._verify_partition_key(entity) temp = entity.copy() # type: ignore partition_key = _prepare_key(temp["PartitionKey"]) row_key = _prepare_key(temp["RowKey"]) temp = _add_entity_properties(temp) if mode == UpdateMode.MERGE: self._batch_merge_entity( table=self.table_name, partition_key=partition_key, row_key=row_key, table_entity_properties=temp, **kwargs ) elif mode == UpdateMode.REPLACE: self._batch_update_entity( table=self.table_name, partition_key=partition_key, row_key=row_key, table_entity_properties=temp, **kwargs ) else: raise ValueError("Mode type '{}' is not supported.".format(mode))
Azure/azure-sdk-for-python
sdk/tables/azure-data-tables/azure/data/tables/_table_batch.py
Python
mit
27,928
""" WSGI config for agt project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "agt.settings") application = get_wsgi_application()
ArtGeekTech/Coding-Project-STEM-OPT-01
agt/wsgi.py
Python
mit
383
# -*- coding: utf-8 -*- nick = '' #Your chosen default nickname second = '' #Backup nick (if the first happens to be taken) username = '' realname = ''
gentoomen/Pymn
config.py
Python
mit
152
class Solution(object): def subsetsWithDup(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ res=[[]] nums=sorted(nums) for i in range(len(nums)): if i==0 or nums[i]!=nums[i-1]: lastlengh=len(res) res+=[x+[nums[i]] for x in res] elif nums[i]==nums[i-1]: res+=[x+[nums[i]] for x in res[-lastlengh:]] return res
Hehwang/Leetcode-Python
code/090 Subsets II.py
Python
mit
466
"""Restricted execution facilities. The class RExec exports methods r_exec(), r_eval(), r_execfile(), and r_import(), which correspond roughly to the built-in operations exec, eval(), execfile() and import, but executing the code in an environment that only exposes those built-in operations that are deemed safe. To this end, a modest collection of 'fake' modules is created which mimics the standard modules by the same names. It is a policy decision which built-in modules and operations are made available; this module provides a reasonable default, but derived classes can change the policies e.g. by overriding or extending class variables like ok_builtin_modules or methods like make_sys(). XXX To do: - r_open should allow writing tmp dir - r_exec etc. with explicit globals/locals? (Use rexec("exec ... in ...")?) """ import sys import __builtin__ import os import ihooks __all__ = ["RExec"] class FileBase: ok_file_methods = ('fileno', 'flush', 'isatty', 'read', 'readline', 'readlines', 'seek', 'tell', 'write', 'writelines') class FileWrapper(FileBase): # XXX This is just like a Bastion -- should use that! def __init__(self, f): self.f = f for m in self.ok_file_methods: if not hasattr(self, m) and hasattr(f, m): setattr(self, m, getattr(f, m)) def close(self): self.flush() TEMPLATE = """ def %s(self, *args): return apply(getattr(self.mod, self.name).%s, args) """ class FileDelegate(FileBase): def __init__(self, mod, name): self.mod = mod self.name = name for m in FileBase.ok_file_methods + ('close',): exec TEMPLATE % (m, m) class RHooks(ihooks.Hooks): def __init__(self, *args): # Hacks to support both old and new interfaces: # old interface was RHooks(rexec[, verbose]) # new interface is RHooks([verbose]) verbose = 0 rexec = None if args and type(args[-1]) == type(0): verbose = args[-1] args = args[:-1] if args and hasattr(args[0], '__class__'): rexec = args[0] args = args[1:] if args: raise TypeError, "too many arguments" ihooks.Hooks.__init__(self, verbose) self.rexec = rexec def set_rexec(self, rexec): # Called by RExec instance to complete initialization self.rexec = rexec def is_builtin(self, name): return self.rexec.is_builtin(name) def init_builtin(self, name): m = __import__(name) return self.rexec.copy_except(m, ()) def init_frozen(self, name): raise SystemError, "don't use this" def load_source(self, *args): raise SystemError, "don't use this" def load_compiled(self, *args): raise SystemError, "don't use this" def load_package(self, *args): raise SystemError, "don't use this" def load_dynamic(self, name, filename, file): return self.rexec.load_dynamic(name, filename, file) def add_module(self, name): return self.rexec.add_module(name) def modules_dict(self): return self.rexec.modules def default_path(self): return self.rexec.modules['sys'].path # XXX Backwards compatibility RModuleLoader = ihooks.FancyModuleLoader RModuleImporter = ihooks.ModuleImporter class RExec(ihooks._Verbose): """Restricted Execution environment.""" ok_path = tuple(sys.path) # That's a policy decision ok_builtin_modules = ('audioop', 'array', 'binascii', 'cmath', 'errno', 'imageop', 'marshal', 'math', 'md5', 'operator', 'parser', 'regex', 'pcre', 'rotor', 'select', 'sha', '_sre', 'strop', 'struct', 'time') ok_posix_names = ('error', 'fstat', 'listdir', 'lstat', 'readlink', 'stat', 'times', 'uname', 'getpid', 'getppid', 'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid') ok_sys_names = ('ps1', 'ps2', 'copyright', 'version', 'platform', 'exit', 'maxint') nok_builtin_names = ('open', 'file', 'reload', '__import__') def __init__(self, hooks = None, verbose = 0): ihooks._Verbose.__init__(self, verbose) # XXX There's a circular reference here: self.hooks = hooks or RHooks(verbose) self.hooks.set_rexec(self) self.modules = {} self.ok_dynamic_modules = self.ok_builtin_modules list = [] for mname in self.ok_builtin_modules: if mname in sys.builtin_module_names: list.append(mname) self.ok_builtin_modules = tuple(list) self.set_trusted_path() self.make_builtin() self.make_initial_modules() # make_sys must be last because it adds the already created # modules to its builtin_module_names self.make_sys() self.loader = RModuleLoader(self.hooks, verbose) self.importer = RModuleImporter(self.loader, verbose) def set_trusted_path(self): # Set the path from which dynamic modules may be loaded. # Those dynamic modules must also occur in ok_builtin_modules self.trusted_path = filter(os.path.isabs, sys.path) def load_dynamic(self, name, filename, file): if name not in self.ok_dynamic_modules: raise ImportError, "untrusted dynamic module: %s" % name if sys.modules.has_key(name): src = sys.modules[name] else: import imp src = imp.load_dynamic(name, filename, file) dst = self.copy_except(src, []) return dst def make_initial_modules(self): self.make_main() self.make_osname() # Helpers for RHooks def is_builtin(self, mname): return mname in self.ok_builtin_modules # The make_* methods create specific built-in modules def make_builtin(self): m = self.copy_except(__builtin__, self.nok_builtin_names) m.__import__ = self.r_import m.reload = self.r_reload m.open = m.file = self.r_open def make_main(self): m = self.add_module('__main__') def make_osname(self): osname = os.name src = __import__(osname) dst = self.copy_only(src, self.ok_posix_names) dst.environ = e = {} for key, value in os.environ.items(): e[key] = value def make_sys(self): m = self.copy_only(sys, self.ok_sys_names) m.modules = self.modules m.argv = ['RESTRICTED'] m.path = map(None, self.ok_path) m.exc_info = self.r_exc_info m = self.modules['sys'] l = self.modules.keys() + list(self.ok_builtin_modules) l.sort() m.builtin_module_names = tuple(l) # The copy_* methods copy existing modules with some changes def copy_except(self, src, exceptions): dst = self.copy_none(src) for name in dir(src): setattr(dst, name, getattr(src, name)) for name in exceptions: try: delattr(dst, name) except AttributeError: pass return dst def copy_only(self, src, names): dst = self.copy_none(src) for name in names: try: value = getattr(src, name) except AttributeError: continue setattr(dst, name, value) return dst def copy_none(self, src): m = self.add_module(src.__name__) m.__doc__ = src.__doc__ return m # Add a module -- return an existing module or create one def add_module(self, mname): if self.modules.has_key(mname): return self.modules[mname] self.modules[mname] = m = self.hooks.new_module(mname) m.__builtins__ = self.modules['__builtin__'] return m # The r* methods are public interfaces def r_exec(self, code): m = self.add_module('__main__') exec code in m.__dict__ def r_eval(self, code): m = self.add_module('__main__') return eval(code, m.__dict__) def r_execfile(self, file): m = self.add_module('__main__') execfile(file, m.__dict__) def r_import(self, mname, globals={}, locals={}, fromlist=[]): return self.importer.import_module(mname, globals, locals, fromlist) def r_reload(self, m): return self.importer.reload(m) def r_unload(self, m): return self.importer.unload(m) # The s_* methods are similar but also swap std{in,out,err} def make_delegate_files(self): s = self.modules['sys'] self.delegate_stdin = FileDelegate(s, 'stdin') self.delegate_stdout = FileDelegate(s, 'stdout') self.delegate_stderr = FileDelegate(s, 'stderr') self.restricted_stdin = FileWrapper(sys.stdin) self.restricted_stdout = FileWrapper(sys.stdout) self.restricted_stderr = FileWrapper(sys.stderr) def set_files(self): if not hasattr(self, 'save_stdin'): self.save_files() if not hasattr(self, 'delegate_stdin'): self.make_delegate_files() s = self.modules['sys'] s.stdin = self.restricted_stdin s.stdout = self.restricted_stdout s.stderr = self.restricted_stderr sys.stdin = self.delegate_stdin sys.stdout = self.delegate_stdout sys.stderr = self.delegate_stderr def reset_files(self): self.restore_files() s = self.modules['sys'] self.restricted_stdin = s.stdin self.restricted_stdout = s.stdout self.restricted_stderr = s.stderr def save_files(self): self.save_stdin = sys.stdin self.save_stdout = sys.stdout self.save_stderr = sys.stderr def restore_files(self): sys.stdin = self.save_stdin sys.stdout = self.save_stdout sys.stderr = self.save_stderr def s_apply(self, func, args=(), kw=None): self.save_files() try: self.set_files() if kw: r = apply(func, args, kw) else: r = apply(func, args) finally: self.restore_files() return r def s_exec(self, *args): return self.s_apply(self.r_exec, args) def s_eval(self, *args): return self.s_apply(self.r_eval, args) def s_execfile(self, *args): return self.s_apply(self.r_execfile, args) def s_import(self, *args): return self.s_apply(self.r_import, args) def s_reload(self, *args): return self.s_apply(self.r_reload, args) def s_unload(self, *args): return self.s_apply(self.r_unload, args) # Restricted open(...) def r_open(self, file, mode='r', buf=-1): if mode not in ('r', 'rb'): raise IOError, "can't open files for writing in restricted mode" return open(file, mode, buf) # Restricted version of sys.exc_info() def r_exc_info(self): ty, va, tr = sys.exc_info() tr = None return ty, va, tr def test(): import getopt, traceback opts, args = getopt.getopt(sys.argv[1:], 'vt:') verbose = 0 trusted = [] for o, a in opts: if o == '-v': verbose = verbose+1 if o == '-t': trusted.append(a) r = RExec(verbose=verbose) if trusted: r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted) if args: r.modules['sys'].argv = args r.modules['sys'].path.insert(0, os.path.dirname(args[0])) else: r.modules['sys'].path.insert(0, "") fp = sys.stdin if args and args[0] != '-': try: fp = open(args[0]) except IOError, msg: print "%s: can't open file %s" % (sys.argv[0], `args[0]`) return 1 if fp.isatty(): print "*** RESTRICTED *** Python", sys.version print 'Type "help", "copyright", "credits" or "license" ' \ 'for more information.' while 1: try: try: s = raw_input('>>> ') except EOFError: print break if s and s[0] != '#': s = s + '\n' c = compile(s, '<stdin>', 'single') r.s_exec(c) except SystemExit, n: return n except: traceback.print_exc() else: text = fp.read() fp.close() c = compile(text, fp.name, 'exec') try: r.s_exec(c) except SystemExit, n: return n except: traceback.print_exc() return 1 if __name__ == '__main__': sys.exit(test())
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-2.2/Lib/rexec.py
Python
mit
12,831
# fbdata.generic # FBDATA from .models import ( FBAlbum, FBEvent, FBLink, FBPhoto, FBStatus, FBVideo, StreamPost ) _FB_CLASSES = { 'album': FBAlbum, 'event': FBEvent, 'link': FBLink, 'photo': FBPhoto, 'status': FBStatus, 'video': FBVideo, 'post': StreamPost } def class_for_type(object_type): return _FB_CLASSES.get(object_type, None) def album_exists(user, object_id): return FBAlbum.objects.filter(user=user, object_id=object_id).exists() def event_exists(user, object_id): return FBEvent.objects.filter(user=user, event_id=object_id).exists() def link_exists(user, link_id): return FBLink.objects.filter(user=user, link_id=link_id).exists() def post_exists(user, post_id): return StreamPost.objects.filter(user=user, post_id=post_id).exists() def photo_exists(user, object_id): return FBPhoto.objects.filter(user=user, object_id=object_id).exists() def status_exists(user, status_id): return FBStatus.objects.filter(user=user, status_id=status_id).exists() def video_exists(user, video_id): return FBVideo.objects.filter(user=user, video_id=video_id).exists()
valuesandvalue/valuesandvalue
vavs_project/fbdata/generic.py
Python
mit
1,183
import pandas as pd def _partitions(n: int): """Generate partitions of the integer in lexicographic order. """ # base case of recursion: zero is the sum of the empty list if n == 0: yield [] return # modify partitions of n-1 to form partitions of n for p in _partitions(n-1): yield [1] + p if p and (len(p) < 2 or p[1] > p[0]): yield [p[0] + 1] + p[1:] def find_best_partitioning( dataframe: pd.DataFrame, value_column: str, partition_size: float, smallest_partition: float) -> pd.DataFrame: """Find the partitioning of the data that minimizes the error. Args: dataframe: Dataframe containing the data to be partitioned. value_column: Header of the column containing the values partition_size: Total size of the partitioning smallest_partition: All partitionings are multiples of this value Returns: Dataframe containing the best partitioning for each row """ num_entries = len(dataframe) partition_table = pd.DataFrame({ 'partitioning': [0] * num_entries, 'best_partitioning': [0] * num_entries }, index=dataframe.index) # Normalize and sort the values partition_table['values'] = \ dataframe[value_column] / dataframe[value_column].sum() partition_table.sort_values('values', inplace=True) assert (-0.00005 < partition_table['values'].sum() - 1. < 0.00005), \ "Normalization of partition table failed!" # Each partition is checked to see how much error it has against the ideal # partitioning, and the best is selected. best_error = float("inf") for partition in _partitions(int(partition_size / smallest_partition)): # Fill the rest of the partition with zeros partitioning = pd.Series( [0] * (num_entries - len(partition)) + list(partition), index=partition_table.index) partition_table['partitioning'] = partitioning * smallest_partition error = sum( abs(partition_table['partitioning'] - partition_table['values'] * partition_size)) if error < best_error: best_error = error partition_table['best_partitioning'] = \ partition_table['partitioning'] print(error, best_error, " ... ", partition[len(partition) - 4:]) del partition_table['partitioning'] # Return only non-zero partitions return partition_table[partition_table["best_partitioning"] > 0]
jnfrye/local_plants_book
src/PyFloraBook/threshold/partition.py
Python
mit
2,589
import fileinput def str_to_int(s): return([ int(x) for x in s.split() ]) # args = [ 'line 1', 'line 2', ... ] def proc_input(args): (n, m) = str_to_int(args[0]) strings = [ args[i + 1].strip() for i in xrange(n) ] return(n, m, strings) def solve(args, verbose=False): (n, m, strings) = proc_input(args) acc = [ '' ] * n r = 0 for i in xrange(m): rollback = False for j in xrange(n): acc[j] += strings[j][i] for j in xrange(n - 1): if acc[j + 1] < acc[j]: r += 1 rollback = True break if rollback: for j in xrange(n): acc[j] = acc[j][:-1] if verbose: print r return r def test(): assert(str_to_int('1 2 3') == [ 1, 2, 3 ]) assert(proc_input([ '1 10', 'codeforces' ]) == (1, 10, [ 'codeforces' ])) assert(solve([ '1 10', 'codeforces' ]) == 0) assert(solve([ '4 4', 'case', 'care', 'test', 'code' ]) == 2) assert(solve([ '5 4', 'code', 'forc', 'esco', 'defo', 'rces' ]) == 4) if __name__ == '__main__': from sys import argv if argv.pop() == 'test': test() else: solve(list(fileinput.input()), verbose=True)
cripplet/practice
codeforces/496/soln/c_columns.py
Python
mit
1,075
from django.db import models from django.conf import settings from django.db.models.signals import post_save from django.dispatch import receiver from rest_framework.authtoken.models import Token from jsonfield import JSONField from . import consts @receiver(post_save, sender=settings.AUTH_USER_MODEL) def create_auth_token(sender, instance=None, created=False, **kwargs): if created: Token.objects.create(user=instance) class DevinoRequest(models.Model): api_resource = models.CharField(max_length=64, choices=consts.API_CHOICES) data = JSONField(null=True, blank=True) dc = models.DateTimeField(auto_now_add=True) class DevinoAnswer(models.Model): code = models.CharField(max_length=64) description = models.CharField(max_length=256) result = JSONField(null=True, blank=True) request = models.OneToOneField(DevinoRequest, related_name='devino_answer') is_fail = models.BooleanField(default=False) dc = models.DateTimeField(auto_now_add=True)
telminov/email-service
core/models.py
Python
mit
1,001
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The PlanBcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the listtransactions API.""" from test_framework.test_framework import PlanbcoinTestFramework from test_framework.util import * from test_framework.mininode import CTransaction, COIN from io import BytesIO def txFromHex(hexstring): tx = CTransaction() f = BytesIO(hex_str_to_bytes(hexstring)) tx.deserialize(f) return tx class ListTransactionsTest(PlanbcoinTestFramework): def __init__(self): super().__init__() self.num_nodes = 4 self.setup_clean_chain = False def setup_nodes(self): #This test requires mocktime self.enable_mocktime() self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir) def run_test(self): # Simple send, 0 to 1: txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) self.sync_all() assert_array_result(self.nodes[0].listtransactions(), {"txid":txid}, {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0}) assert_array_result(self.nodes[1].listtransactions(), {"txid":txid}, {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0}) # mine a block, confirmations should change: self.nodes[0].generate(1) self.sync_all() assert_array_result(self.nodes[0].listtransactions(), {"txid":txid}, {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1}) assert_array_result(self.nodes[1].listtransactions(), {"txid":txid}, {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1}) # send-to-self: txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) assert_array_result(self.nodes[0].listtransactions(), {"txid":txid, "category":"send"}, {"amount":Decimal("-0.2")}) assert_array_result(self.nodes[0].listtransactions(), {"txid":txid, "category":"receive"}, {"amount":Decimal("0.2")}) # sendmany from node1: twice to self, twice to node2: send_to = { self.nodes[0].getnewaddress() : 0.11, self.nodes[1].getnewaddress() : 0.22, self.nodes[0].getaccountaddress("from1") : 0.33, self.nodes[1].getaccountaddress("toself") : 0.44 } txid = self.nodes[1].sendmany("", send_to) self.sync_all() assert_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.11")}, {"txid":txid} ) assert_array_result(self.nodes[0].listtransactions(), {"category":"receive","amount":Decimal("0.11")}, {"txid":txid} ) assert_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.22")}, {"txid":txid} ) assert_array_result(self.nodes[1].listtransactions(), {"category":"receive","amount":Decimal("0.22")}, {"txid":txid} ) assert_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.33")}, {"txid":txid} ) assert_array_result(self.nodes[0].listtransactions(), {"category":"receive","amount":Decimal("0.33")}, {"txid":txid, "account" : "from1"} ) assert_array_result(self.nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.44")}, {"txid":txid, "account" : ""} ) assert_array_result(self.nodes[1].listtransactions(), {"category":"receive","amount":Decimal("0.44")}, {"txid":txid, "account" : "toself"} ) multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True) txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1) self.nodes[1].generate(1) self.sync_all() assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0) assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True), {"category":"receive","amount":Decimal("0.1")}, {"txid":txid, "account" : "watchonly"} ) self.run_rbf_opt_in_test() # Check that the opt-in-rbf flag works properly, for sent and received # transactions. def run_rbf_opt_in_test(self): # Check whether a transaction signals opt-in RBF itself def is_opt_in(node, txid): rawtx = node.getrawtransaction(txid, 1) for x in rawtx["vin"]: if x["sequence"] < 0xfffffffe: return True return False # Find an unconfirmed output matching a certain txid def get_unconfirmed_utxo_entry(node, txid_to_match): utxo = node.listunspent(0, 0) for i in utxo: if i["txid"] == txid_to_match: return i return None # 1. Chain a few transactions that don't opt-in. txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) assert(not is_opt_in(self.nodes[0], txid_1)) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"}) # Tx2 will build off txid_1, still not opting in to RBF. utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1) assert_equal(utxo_to_use["safe"], True) utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) assert_equal(utxo_to_use["safe"], False) # Create tx2 using createrawtransaction inputs = [{"txid":utxo_to_use["txid"], "vout":utxo_to_use["vout"]}] outputs = {self.nodes[0].getnewaddress(): 0.999} tx2 = self.nodes[1].createrawtransaction(inputs, outputs) tx2_signed = self.nodes[1].signrawtransaction(tx2)["hex"] txid_2 = self.nodes[1].sendrawtransaction(tx2_signed) # ...and check the result assert(not is_opt_in(self.nodes[1], txid_2)) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"}) sync_mempools(self.nodes) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"}) # Tx3 will opt-in to RBF utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2) inputs = [{"txid": txid_2, "vout":utxo_to_use["vout"]}] outputs = {self.nodes[1].getnewaddress(): 0.998} tx3 = self.nodes[0].createrawtransaction(inputs, outputs) tx3_modified = txFromHex(tx3) tx3_modified.vin[0].nSequence = 0 tx3 = bytes_to_hex_str(tx3_modified.serialize()) tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex'] txid_3 = self.nodes[0].sendrawtransaction(tx3_signed) assert(is_opt_in(self.nodes[0], txid_3)) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"}) # Tx4 will chain off tx3. Doesn't signal itself, but depends on one # that does. utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3) inputs = [{"txid": txid_3, "vout":utxo_to_use["vout"]}] outputs = {self.nodes[0].getnewaddress(): 0.997} tx4 = self.nodes[1].createrawtransaction(inputs, outputs) tx4_signed = self.nodes[1].signrawtransaction(tx4)["hex"] txid_4 = self.nodes[1].sendrawtransaction(tx4_signed) assert(not is_opt_in(self.nodes[1], txid_4)) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"}) sync_mempools(self.nodes) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"}) # Replace tx3, and check that tx4 becomes unknown tx3_b = tx3_modified tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee tx3_b = bytes_to_hex_str(tx3_b.serialize()) tx3_b_signed = self.nodes[0].signrawtransaction(tx3_b)['hex'] txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True) assert(is_opt_in(self.nodes[0], txid_3b)) assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"}) sync_mempools(self.nodes) assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"}) # Check gettransaction as well: for n in self.nodes[0:2]: assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no") assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no") assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes") assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes") assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown") # After mining a transaction, it's no longer BIP125-replaceable self.nodes[0].generate(1) assert(txid_3b not in self.nodes[0].getrawmempool()) assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no") assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown") if __name__ == '__main__': ListTransactionsTest().main()
planbcoin/planbcoin
test/functional/listtransactions.py
Python
mit
10,477
from gmrf import CovKernel from mesh import QuadMesh from mesh import Mesh1D from fem import QuadFE from fem import DofHandler from function import Nodal from plot import Plot import matplotlib.pyplot as plt from gmrf import modchol_ldlt, Covariance from scipy import linalg import numpy as np from scipy import linalg as la import unittest class TestCovariance(unittest.TestCase): """ Test class for covariance """ def test_constructor(self): pass def test_assembly(self): pass
hvanwyk/quadmesh
tests/test_gmrf/test_covariance.py
Python
mit
541
from decimal import Decimal from datetime import datetime import pytz import pytest from poker.card import Card from poker.hand import Combo from poker.constants import Currency, GameType, Game, Limit, Action, MoneyType from poker.handhistory import _Player, _PlayerAction from poker.room.pokerstars import PokerStarsHandHistory, _Street from . import stars_hands ET = pytz.timezone("US/Eastern") @pytest.fixture def hand(request): """Parse handhistory defined in hand_text class attribute and returns a PokerStarsHandHistory instance. """ hh = PokerStarsHandHistory(request.instance.hand_text) hh.parse() return hh @pytest.fixture def hand_header(request): """Parse hand history header only defined in hand_text and returns a PokerStarsHandHistory instance. """ hh = PokerStarsHandHistory(request.instance.hand_text) hh.parse_header() return hh @pytest.fixture(scope="module") def flop(): return _Street( [ "[2s 6d 6h]", "W2lkm2n: bets 80", "MISTRPerfect: folds", "Uncalled bet (80) returned to W2lkm2n", "W2lkm2n collected 150 from pot", "W2lkm2n: doesn't show hand", ], 0, ) def test_open_from_file(testdir): bbb_path = str(testdir.joinpath("handhistory/bbb.txt")) hh = PokerStarsHandHistory.from_file(bbb_path) hh.parse() assert hh.ident == "138364355489" assert type(hh.raw) is str class TestHandHeaderNoLimitHoldemTourFreeroll: hand_text = """ PokerStars Hand #152455023342: Tournament #1545783901, Freeroll Hold'em No Limit - Level I (10/20) - 2016/04/25 23:22:00 BRT [2016/04/25 22:22:00 ET] """ # noqa @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "152455023342"), ("game_type", GameType.TOUR), ("tournament_ident", "1545783901"), ("tournament_level", "I"), ("currency", Currency("USD")), ("buyin", Decimal("0")), ("rake", Decimal("0")), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(10)), ("bb", Decimal(20)), ("date", ET.localize(datetime(2016, 4, 25, 22, 22, 0))), ("extra", {"money_type": MoneyType.REAL}), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value class TestHandHeaderNoLimitHoldemTourPlayMoney: hand_text = """ PokerStars Hand #152504147861: Tournament #1545751329, 870+130 Hold'em No Limit - Level I (10/20) - 2016/04/27 1:17:16 BRT [2016/04/27 0:17:16 ET] """ # noqa @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "152504147861"), ("game_type", GameType.TOUR), ("tournament_ident", "1545751329"), ("tournament_level", "I"), ("currency", None), ("buyin", Decimal("870")), ("rake", Decimal("130")), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(10)), ("bb", Decimal(20)), ("date", ET.localize(datetime(2016, 4, 27, 00, 17, 16))), ("extra", {"money_type": MoneyType.PLAY}), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value class TestHandHeaderLimitHoldemCashPlayMoney: hand_text = """ PokerStars Hand #153769972916: Hold'em Limit (10/20) - 2016/05/24 8:52:39 BRT [2016/05/24 7:52:39 ET] """ # noqa @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "153769972916"), ("game_type", GameType.CASH), ("tournament_ident", None), ("tournament_level", None), ("currency", None), ("buyin", None), ("rake", None), ("game", Game.HOLDEM), ("limit", Limit.FL), ("sb", Decimal(10)), ("bb", Decimal(20)), ("date", ET.localize(datetime(2016, 5, 24, 7, 52, 39))), ("extra", {"money_type": MoneyType.PLAY}), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value class TestHandHeaderNoLimitHoldemTourStarcoin: hand_text = """ PokerStars Hand #153719873192: Tournament #1573768726, 184 SC Hold'em No Limit - Level I (25/50) - 2016/05/23 6:48:22 BRT [2016/05/23 5:48:22 ET] """ # noqa @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "153719873192"), ("game_type", GameType.TOUR), ("tournament_ident", "1573768726"), ("tournament_level", "I"), ("currency", Currency.STARS_COIN), ("buyin", Decimal(184)), ("rake", Decimal(0)), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(25)), ("bb", Decimal(50)), ("date", ET.localize(datetime(2016, 5, 23, 5, 48, 22))), ("extra", {"money_type": MoneyType.REAL}), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value class TestHandHeaderPotLimitOmahaCash: hand_text = """ PokerStars Hand #107030112846: Omaha Pot Limit ($0.01/$0.02 USD) - 2013/11/15 9:03:10 AWST [2013/11/14 20:03:10 ET] """ # noqa @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "107030112846"), ("game_type", GameType.CASH), ("tournament_ident", None), ("tournament_level", None), ("currency", Currency.USD), ("buyin", None), ("rake", None), ("game", Game.OMAHA), ("limit", Limit.PL), ("sb", Decimal("0.01")), ("bb", Decimal("0.02")), ("date", ET.localize(datetime(2013, 11, 14, 20, 3, 10))), ("extra", {"money_type": MoneyType.REAL}), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value class TestHandWithFlopOnly: hand_text = stars_hands.HAND1 # in py.test 2.4 it is recommended to use string like "attribute,expected", # but with tuple, it works in both 2.3.5 and 2.4 @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "105024000105"), ("game_type", GameType.TOUR), ("tournament_ident", "797469411"), ("tournament_level", "I"), ("currency", Currency.USD), ("buyin", Decimal("3.19")), ("rake", Decimal("0.31")), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(10)), ("bb", Decimal(20)), ("date", ET.localize(datetime(2013, 10, 4, 13, 53, 27))), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("table_name", "797469411 15"), ("max_players", 9), ("button", _Player(name="flettl2", stack=1500, seat=1, combo=None)), ("hero", _Player(name="W2lkm2n", stack=3000, seat=5, combo=Combo("AcJh"))), ( "players", [ _Player(name="flettl2", stack=1500, seat=1, combo=None), _Player(name="santy312", stack=3000, seat=2, combo=None), _Player(name="flavio766", stack=3000, seat=3, combo=None), _Player(name="strongi82", stack=3000, seat=4, combo=None), _Player(name="W2lkm2n", stack=3000, seat=5, combo=Combo("AcJh")), _Player(name="MISTRPerfect", stack=3000, seat=6, combo=None), _Player(name="blak_douglas", stack=3000, seat=7, combo=None), _Player(name="sinus91", stack=1500, seat=8, combo=None), _Player(name="STBIJUJA", stack=1500, seat=9, combo=None), ], ), ("turn", None), ("river", None), ("board", (Card("2s"), Card("6d"), Card("6h"))), ( "preflop_actions", ( "strongi82: folds", "W2lkm2n: raises 40 to 60", "MISTRPerfect: calls 60", "blak_douglas: folds", "sinus91: folds", "STBIJUJA: folds", "flettl2: folds", "santy312: folds", "flavio766: folds", ), ), ("turn_actions", None), ("river_actions", None), ("total_pot", Decimal(150)), ("show_down", False), ("winners", ("W2lkm2n",)), ], ) def test_body(self, hand, attribute, expected_value): assert getattr(hand, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ( "actions", ( _PlayerAction("W2lkm2n", Action.BET, Decimal(80)), _PlayerAction("MISTRPerfect", Action.FOLD, None), _PlayerAction("W2lkm2n", Action.RETURN, Decimal(80)), _PlayerAction("W2lkm2n", Action.WIN, Decimal(150)), _PlayerAction("W2lkm2n", Action.MUCK, None), ), ), ("cards", (Card("2s"), Card("6d"), Card("6h"))), ("is_rainbow", True), ("is_monotone", False), ("is_triplet", False), # TODO: http://www.pokerology.com/lessons/flop-texture/ # assert flop.is_dry ("has_pair", True), ("has_straightdraw", False), ("has_gutshot", True), ("has_flushdraw", False), ("players", ("W2lkm2n", "MISTRPerfect")), ("pot", Decimal(150)), ], ) def test_flop_attributes(self, hand, attribute, expected_value): assert getattr(hand.flop, attribute) == expected_value def test_flop(self, hand): assert isinstance(hand.flop, _Street) class TestAllinPreflopHand: hand_text = stars_hands.HAND2 @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "105034215446"), ("game_type", GameType.TOUR), ("tournament_ident", "797536898"), ("tournament_level", "XI"), ("currency", Currency.USD), ("buyin", Decimal("3.19")), ("rake", Decimal("0.31")), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(400)), ("bb", Decimal(800)), ("date", ET.localize(datetime(2013, 10, 4, 17, 22, 20))), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("table_name", "797536898 9"), ("max_players", 9), ( "button", _Player(name="W2lkm2n", stack=11815, seat=2, combo=Combo("JdJs")), ), ("hero", _Player(name="W2lkm2n", stack=11815, seat=2, combo=Combo("JdJs"))), ( "players", [ _Player(name="RichFatWhale", stack=12910, seat=1, combo=None), _Player(name="W2lkm2n", stack=11815, seat=2, combo=Combo("JdJs")), _Player(name="Labahra", stack=7395, seat=3, combo=None), _Player(name="Lean Abadia", stack=7765, seat=4, combo=None), _Player(name="lkenny44", stack=10080, seat=5, combo=None), _Player(name="Newfie_187", stack=1030, seat=6, combo=None), _Player(name="Hokolix", stack=13175, seat=7, combo=None), _Player(name="pmmr", stack=2415, seat=8, combo=None), _Player(name="costamar", stack=13070, seat=9, combo=None), ], ), ("turn", Card("8d")), ("river", Card("Ks")), ("board", (Card("3c"), Card("6s"), Card("9d"), Card("8d"), Card("Ks"))), ( "preflop_actions", ( "lkenny44: folds", "Newfie_187: raises 155 to 955 and is all-in", "Hokolix: folds", "pmmr: folds", "costamar: raises 12040 to 12995 and is all-in", "RichFatWhale: folds", "W2lkm2n: calls 11740 and is all-in", "Labahra: folds", "Lean Abadia: folds", "Uncalled bet (1255) returned to costamar", ), ), ("turn_actions", None), ("river_actions", None), ("total_pot", Decimal(26310)), ("show_down", True), ("winners", ("costamar",)), ], ) def test_body(self, hand, attribute, expected_value): assert getattr(hand, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("actions", None), ("cards", (Card("3c"), Card("6s"), Card("9d"))), ("is_rainbow", True), ("is_monotone", False), ("is_triplet", False), # TODO: http://www.pokerology.com/lessons/flop-texture/ # assert flop.is_dry ("has_pair", False), ("has_straightdraw", True), ("has_gutshot", True), ("has_flushdraw", False), ("players", None), ], ) def test_flop_attributes(self, hand, attribute, expected_value): assert getattr(hand.flop, attribute) == expected_value def test_flop(self, hand): assert isinstance(hand.flop, _Street) @pytest.mark.xfail def test_flop_pot(self, hand): assert hand.flop.pot == Decimal(26310) class TestBodyMissingPlayerNoBoard: hand_text = stars_hands.HAND3 @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "105026771696"), ("game_type", GameType.TOUR), ("tournament_ident", "797469411"), ("tournament_level", "X"), ("currency", Currency.USD), ("buyin", Decimal("3.19")), ("rake", Decimal("0.31")), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(300)), ("bb", Decimal(600)), ("date", ET.localize(datetime(2013, 10, 4, 14, 50, 56))), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("table_name", "797469411 11"), ("max_players", 9), ( "button", _Player(name="W2lkm2n", stack=10714, seat=8, combo=Combo("6d8d")), ), ("hero", _Player(name="W2lkm2n", stack=10714, seat=8, combo=Combo("6d8d"))), ( "players", [ _Player(name="Empty Seat 1", stack=0, seat=1, combo=None), _Player(name="snelle_jel", stack=4295, seat=2, combo=None), _Player(name="EuSh0wTelm0", stack=11501, seat=3, combo=None), _Player(name="panost3", stack=7014, seat=4, combo=None), _Player(name="Samovlyblen", stack=7620, seat=5, combo=None), _Player(name="Theralion", stack=4378, seat=6, combo=None), _Player(name="wrsport1015", stack=9880, seat=7, combo=None), _Player(name="W2lkm2n", stack=10714, seat=8, combo=Combo("6d8d")), _Player(name="fischero68", stack=8724, seat=9, combo=None), ], ), ("turn", None), ("river", None), ("board", None), ( "preflop_actions", ( "EuSh0wTelm0: folds", "panost3: folds", "Samovlyblen: folds", "Theralion: raises 600 to 1200", "wrsport1015: folds", "W2lkm2n: folds", "fischero68: folds", "snelle_jel: folds", "Uncalled bet (600) returned to Theralion", "Theralion collected 1900 from pot", "Theralion: doesn't show hand", ), ), ("turn_actions", None), ("river_actions", None), ("total_pot", Decimal(1900)), ("show_down", False), ("winners", ("Theralion",)), ], ) def test_body(self, hand, attribute, expected_value): assert getattr(hand, attribute) == expected_value def test_flop(self, hand): assert hand.flop is None class TestBodyEveryStreet: hand_text = stars_hands.HAND4 @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("ident", "105025168298"), ("game_type", GameType.TOUR), ("tournament_ident", "797469411"), ("tournament_level", "IV"), ("currency", Currency.USD), ("buyin", Decimal("3.19")), ("rake", Decimal("0.31")), ("game", Game.HOLDEM), ("limit", Limit.NL), ("sb", Decimal(50)), ("bb", Decimal(100)), ("date", ET.localize(datetime(2013, 10, 4, 14, 19, 17))), ], ) def test_values_after_header_parsed(self, hand_header, attribute, expected_value): assert getattr(hand_header, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ("table_name", "797469411 15"), ("max_players", 9), ( "button", _Player(name="W2lkm2n", stack=5145, seat=5, combo=Combo("Jc5c")), ), ("hero", _Player(name="W2lkm2n", stack=5145, seat=5, combo=Combo("Jc5c"))), ( "players", [ _Player(name="flettl2", stack=3000, seat=1, combo=None), _Player(name="santy312", stack=5890, seat=2, combo=None), _Player(name="flavio766", stack=11010, seat=3, combo=None), _Player(name="strongi82", stack=2855, seat=4, combo=None), _Player(name="W2lkm2n", stack=5145, seat=5, combo=Combo("Jc5c")), _Player(name="MISTRPerfect", stack=2395, seat=6, combo=None), _Player(name="blak_douglas", stack=3000, seat=7, combo=None), _Player(name="sinus91", stack=3000, seat=8, combo=None), _Player(name="STBIJUJA", stack=1205, seat=9, combo=None), ], ), ("turn", Card("8c")), ("river", Card("Kd")), ("board", (Card("6s"), Card("4d"), Card("3s"), Card("8c"), Card("Kd"))), ( "preflop_actions", ( "sinus91: folds", "STBIJUJA: folds", "flettl2: raises 125 to 225", "santy312: folds", "flavio766: folds", "strongi82: folds", "W2lkm2n: folds", "MISTRPerfect: folds", "blak_douglas: calls 125", ), ), ( "turn_actions", ( "blak_douglas: checks", "flettl2: bets 250", "blak_douglas: calls 250", ), ), ( "river_actions", ( "blak_douglas: checks", "flettl2: bets 1300", "blak_douglas: folds", "Uncalled bet (1300) returned to flettl2", "flettl2 collected 1300 from pot", "flettl2: doesn't show hand", ), ), ("total_pot", Decimal(1300)), ("show_down", False), ("winners", ("flettl2",)), ], ) def test_body(self, hand, attribute, expected_value): assert getattr(hand, attribute) == expected_value @pytest.mark.parametrize( ("attribute", "expected_value"), [ ( "actions", ( _PlayerAction("blak_douglas", Action.CHECK, None), _PlayerAction("flettl2", Action.BET, Decimal(150)), _PlayerAction("blak_douglas", Action.CALL, Decimal(150)), ), ), ("cards", (Card("6s"), Card("4d"), Card("3s"))), ("is_rainbow", False), ("is_monotone", False), ("is_triplet", False), # TODO: http://www.pokerology.com/lessons/flop-texture/ # assert flop.is_dry ("has_pair", False), ("has_straightdraw", True), ("has_gutshot", True), ("has_flushdraw", True), ("players", ("blak_douglas", "flettl2")), ], ) def test_flop_attributes(self, hand, attribute, expected_value): assert getattr(hand.flop, attribute) == expected_value def test_flop(self, hand): assert isinstance(hand.flop, _Street) @pytest.mark.xfail def test_flop_pot(self, hand): assert hand.flop.pot == Decimal(800) class TestClassRepresentation: hand_text = stars_hands.HAND1 def test_unicode(self, hand_header): assert str(hand_header) == "<PokerStarsHandHistory: #105024000105>" def test_str(self, hand_header): assert str(hand_header) == "<PokerStarsHandHistory: #105024000105>" class TestPlayerNameWithDot: hand_text = stars_hands.HAND5 def test_player_is_in_player_list(self, hand): assert ".prestige.U$" in [p.name for p in hand.players] def test_player_stack(self, hand): player_names = [p.name for p in hand.players] player_index = player_names.index(".prestige.U$") assert hand.players[player_index].stack == 3000
pokerregion/poker
tests/handhistory/test_stars.py
Python
mit
23,153
""" Tests for the HistoryNode module """ import unittest from unittest.mock import patch from src import historynode from test import helper class TestHistoryNode(unittest.TestCase): """ Tests for the historynode module, containing the HistoryNode class """ # pylint: disable=too-many-public-methods # Never too many tests def test_print_board(self): """Check that print_board works""" with helper.captured_output() as out: hn_obj = historynode.HistoryNode() hn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("0 0 0 \n" " 0 0 0 \n" "0 0 0 0 0 0 0\n" "0 0 0 0 0 0 0\n" "0 0 0 0 0 0 0\n" " 0 0 0 \n" " 0 0 0") self.assertEqual(actual_print, expected_print) def test_pretty_print_board(self): """Check that pretty_print_board works""" with helper.captured_output() as out: hn_obj = historynode.HistoryNode() hn_obj.pretty_print_board() actual_print = out.getvalue().strip() expected_print = ("7 . - . - .\n" " | \\ | / |\n" "6 . - . - .\n" " | / | \\ |\n" "5 . - . - . - . - . - . - .\n" " | \\ | / | \\ | / | \\ | / |\n" "4 . - . - . - . - . - . - .\n" " | / | \\ | / | \\ | / | \\ |\n" "3 . - . - . - . - . - . - .\n" " | \\ | / |\n" "2 . - . - .\n" " | / | \\ |\n" "1 . - . - .\n" " 1 2 3 4 5 6 7") self.assertEqual(actual_print, expected_print) @patch('src.historynode.ALPHABETNOTATION', True) def test_pretty_print_board_alphabet_notation(self): """ Check that pretty_print_board works with alphabet notation """ with helper.captured_output() as out: hn_obj = historynode.HistoryNode() hn_obj.pretty_print_board() actual_print = out.getvalue().strip() expected_print = ("7 . - . - .\n" " | \\ | / |\n" "6 . - . - .\n" " | / | \\ |\n" "5 . - . - . - . - . - . - .\n" " | \\ | / | \\ | / | \\ | / |\n" "4 . - . - . - . - . - . - .\n" " | / | \\ | / | \\ | / | \\ |\n" "3 . - . - . - . - . - . - .\n" " | \\ | / |\n" "2 . - . - .\n" " | / | \\ |\n" "1 . - . - .\n" " A B C D E F G") self.assertEqual(actual_print, expected_print) def test_constructor(self): """ Check that HistoryNode object is initialized correctly """ hn_obj = historynode.HistoryNode() hn_obj.constructor() self.assertEqual(hn_obj.winningState, False) self.assertEqual(hn_obj.rootP, True) self.assertEqual(hn_obj.gameState[1][1], -1) self.assertEqual(hn_obj.gameState[6][0], -1) with helper.captured_output() as out: hn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("1 1 1 \n" " 1 1 1 \n" "1 1 1 1 1 1 1\n" "1 1 1 1 1 1 1\n" "1 1 0 0 0 1 1\n" " 0 0 0 \n" " 2 0 2") self.assertEqual(actual_print, expected_print) self.assertEqual(hn_obj.gameState[3][1], 0) self.assertEqual(hn_obj.gameState[4][0], 2) def test_geeseWinP_good(self): """ Check that geeseWinP can detect a clear win state """ hn_obj = historynode.HistoryNode() hn_obj.constructor() hn_obj.gameState[2][0] = 1 hn_obj.gameState[2][1] = 3 hn_obj.gameState[2][2] = 1 hn_obj.gameState[3][0] = 1 hn_obj.gameState[3][1] = 1 hn_obj.gameState[3][2] = 3 hn_obj.gameState[4][0] = 1 hn_obj.gameState[4][1] = 3 hn_obj.gameState[4][2] = 1 expected_result = True actual_result = hn_obj.geeseWinP() self.assertEqual(actual_result, expected_result) def test_geeseWinP_not_quite(self): """ Check that geeseWinP detects that an almost win isn't a win""" hn_obj = historynode.HistoryNode() hn_obj.constructor() hn_obj.gameState[2][0] = 1 hn_obj.gameState[2][2] = 1 hn_obj.gameState[3][0] = 1 hn_obj.gameState[3][1] = 1 hn_obj.gameState[3][2] = 3 hn_obj.gameState[4][0] = 1 hn_obj.gameState[4][1] = 3 hn_obj.gameState[4][2] = 1 expected_result = False actual_result = hn_obj.geeseWinP() self.assertEqual(actual_result, expected_result) def test_geeseWinP_not_at_all(self): """ Check that geeseWinP detects that starting position isn't a win""" hn_obj = historynode.HistoryNode() hn_obj.constructor() expected_result = False actual_result = hn_obj.geeseWinP() self.assertEqual(actual_result, expected_result) def test_foxesWinP_good(self): """ Check that foxesWinP can detect a clear win state """ hn_obj = historynode.HistoryNode() # Don't run the constructor since it populates Geese pieces hn_obj.gameState[2][0] = 1 hn_obj.gameState[2][2] = 1 hn_obj.gameState[3][0] = 1 hn_obj.gameState[3][1] = 1 hn_obj.gameState[3][2] = 3 hn_obj.gameState[4][0] = 1 hn_obj.gameState[4][1] = 3 hn_obj.gameState[4][2] = 1 expected_result = True actual_result = hn_obj.foxesWinP() self.assertEqual(actual_result, expected_result) def test_foxesWinP_not_quite(self): """ Check that foxesWinP detects that an almost win isn't a win""" hn_obj = historynode.HistoryNode() # Don't run the constructor since it populates Geese pieces hn_obj.gameState[1][2] = 1 hn_obj.gameState[2][2] = 3 hn_obj.gameState[3][2] = 1 hn_obj.gameState[4][6] = 1 hn_obj.gameState[4][5] = 1 hn_obj.gameState[4][4] = 3 hn_obj.gameState[2][0] = 3 hn_obj.gameState[3][0] = 3 hn_obj.gameState[4][0] = 3 hn_obj.gameState[4][1] = 3 expected_result = False actual_result = hn_obj.foxesWinP() self.assertEqual(actual_result, expected_result) def test_foxesWinP_not_at_all(self): """ Check that foxesWinP detects that starting position isn't a win""" hn_obj = historynode.HistoryNode() hn_obj.constructor() expected_result = False actual_result = hn_obj.foxesWinP() self.assertEqual(actual_result, expected_result) def test_determineWinningState_true(self): """ Check if the game state is winning """ hn_obj = historynode.HistoryNode() hn_obj.determineWinningState() self.assertEqual(hn_obj.winningState, True) @patch.object(historynode.HistoryNode, "foxesWinP") def test_determineWinningState_false(self, mock_foxesWinP): """ Check if the game state is not winning """ mock_foxesWinP.return_value = False hn_obj = historynode.HistoryNode() hn_obj.determineWinningState() self.assertEqual(hn_obj.winningState, False) def test_eq_same(self): """ Check equality function compares boards as equal """ hn_obj_1 = historynode.HistoryNode() hn_obj_2 = historynode.HistoryNode() self.assertTrue(hn_obj_1 == hn_obj_2) def test_eq_not_same(self): """ Check equality function compares boards as not equal """ hn_obj_1 = historynode.HistoryNode() hn_obj_1.gameState[4][1] = 3 hn_obj_2 = historynode.HistoryNode() self.assertTrue(hn_obj_1 != hn_obj_2)
blairck/jaeger
test/test_historynode.py
Python
mit
8,660
default_app_config = 'stories.apps.StoriesConfig'
kendricktan/laice
stories/__init__.py
Python
mit
49
class Solution(object): def canWinNim(self, n): """ :type n: int :rtype: bool """ return False if n % 4 == 0 else True def test(): s = Solution() for i in range(20): print('%d %s' % (i, s.canWinNim(i))) if __name__ == '__main__': test()
mistwave/leetcode
Python3/no292_Nim_Game.py
Python
mit
305
import tensorflow as tf def multilayer_perceptron(x, weights, biases): """ This function takes in the input placeholder, weights and biases and returns the output tensor of a network with two hidden ReLU layers, and an output layer with linear activation. :param tf.placeholder x: Placeholder for input :param dict weights: Dictionary containing Variables describing weights of each layer :param dict biases: Dictionary containing Variables describing biases of each layer :return: The activations of the output layer """ # Hidden layer with RELU activation layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1']) layer_1 = tf.nn.relu(layer_1) # Hidden layer with RELU activation layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2']) layer_2 = tf.nn.relu(layer_2) # Output layer with linear activation out_layer = tf.matmul(layer_2, weights['out']) + biases['out'] return out_layer def run_training_cycle(sess, x, y, images, labels, n_training_epochs, batch_size, optimizer, cost, display_step=1): """ Runs a training cycle for an already opened Tensorflow session, with a user-defined optimizer and cost function. The function loops over a user-defined number of epochs, and splits the data set up in batches of size batch_size. :param y: :param x: :param tf.Session sess: The Tensorflow session under which to run this training :param images: images to classify :param labels: labels belonging to each image :param int n_training_epochs: The number of training epochs. :param int batch_size: Size of each batch :param tf.Operation optimizer: Choose the optimizer to use during training :param tf.Tensor cost: Specify which cost function to use :param int display_step: Display progress with steps display_step. Default: 1 (display all epochs) :return: """ for epoch in range(n_training_epochs): avg_cost = 0. total_batch = images.shape[0] / batch_size # Loop over all batches batches = [(images[k: k + batch_size], labels[k: k + batch_size]) for k in range(0, len(labels), batch_size)] for batch in batches: # Run optimization op (backprop) and cost op (to get loss value) _, c = sess.run([optimizer, cost], feed_dict={x: batch[0], y: batch[1]}) # Compute average loss avg_cost += c / total_batch # Display logs per epoch step if epoch % display_step == 0: print("Epoch:", '%04d' % (epoch + 1), "cost=", "{:.9f}".format(avg_cost)) print("Optimization finished") def test_model(x, y, output, label, test_images, test_labels): correct_prediction = tf.equal(tf.argmax(output, 1), tf.argmax(label, 1)) print("prediction:", correct_prediction.eval({x: test_images, y: test_labels})) # Calculate accuracy accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float")) score = accuracy.eval({x: test_images, y: test_labels}) print("Accuracy:", score) return score
jessegeerts/neural-nets
old_files/network.py
Python
mit
3,125
""" ``python-future``: pure Python implementation of Python 3 round(). """ from __future__ import division from future.utils import PYPY, PY26, bind_method # Use the decimal module for simplicity of implementation (and # hopefully correctness). from decimal import Decimal, ROUND_HALF_EVEN def newround(number, ndigits=None): """ See Python 3 documentation: uses Banker's Rounding. Delegates to the __round__ method if for some reason this exists. If not, rounds a number to a given precision in decimal digits (default 0 digits). This returns an int when called with one argument, otherwise the same type as the number. ndigits may be negative. See the test_round method in future/tests/test_builtins.py for examples. """ return_int = False if ndigits is None: return_int = True ndigits = 0 if hasattr(number, '__round__'): return number.__round__(ndigits) exponent = Decimal('10') ** (-ndigits) # Work around issue #24: round() breaks on PyPy with NumPy's types # Also breaks on CPython with NumPy's specialized int types like uint64 if 'numpy' in repr(type(number)): number = float(number) if isinstance(number, Decimal): d = number else: if not PY26: d = Decimal.from_float(number) else: d = from_float_26(number) if ndigits < 0: result = newround(d / exponent) * exponent else: result = d.quantize(exponent, rounding=ROUND_HALF_EVEN) if return_int: return int(result) else: return float(result) ### From Python 2.7's decimal.py. Only needed to support Py2.6: def from_float_26(f): """Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0') """ import math as _math from decimal import _dec_from_triple # only available on Py2.6 and Py2.7 (not 3.3) if isinstance(f, (int, long)): # handle integer inputs return Decimal(f) if _math.isinf(f) or _math.isnan(f): # raises TypeError if not a float return Decimal(repr(f)) if _math.copysign(1.0, f) == 1.0: sign = 0 else: sign = 1 n, d = abs(f).as_integer_ratio() # int.bit_length() method doesn't exist on Py2.6: def bit_length(d): if d != 0: return len(bin(abs(d))) - 2 else: return 0 k = bit_length(d) - 1 result = _dec_from_triple(sign, str(n*5**k), -k) return result __all__ = ['newround']
PythonCharmers/python-future
src/future/builtins/newround.py
Python
mit
3,190
# -*- coding: utf-8 -*- import logging logger = logging.getLogger(__name__) import ldap from django_auth_ldap.config import ActiveDirectoryGroupType from django_auth_ldap.backend import LDAPSettings class LiULDAPSettings(LDAPSettings): """ Defines common settings for all LDAP connections to LiU Active Directory. Overrides *default* settings instead of defining them hard. This makes it possible to override them per project. """ def __init__(self, *args, **kwargs): self.defaults.update({ # Server and connection settings 'SERVER_URI': 'ldap://ad.liu.se', 'START_TLS': True, 'CONNECTION_OPTIONS': { ldap.OPT_X_TLS_REQUIRE_CERT: ldap.OPT_X_TLS_NEVER, # Don't require certificate }, 'BIND_AS_AUTHENTICATING_USER': True, 'GROUP_TYPE': ActiveDirectoryGroupType(), 'USER_ATTR_MAP': { 'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail' }, }) super(LiULDAPSettings, self).__init__(*args, **kwargs) class LiUStudentLDAPSettings(LiULDAPSettings): """ Settings specifically for authenticating students. """ def __init__(self, *args, **kwargs): self.defaults.update({ 'USER_DN_TEMPLATE': 'CN=%(user)s,OU=Students,OU=Accounts,DC=ad,DC=liu,DC=se', }) super(LiUStudentLDAPSettings, self).__init__(*args, **kwargs) class LiUEmployeeLDAPSettings(LiULDAPSettings): """ Settings specifically for authenticating employees. """ def __init__(self, *args, **kwargs): self.defaults.update({ 'USER_DN_TEMPLATE': 'CN=%(user)s,OU=Employees,OU=Accounts,DC=ad,DC=liu,DC=se', }) super(LiUEmployeeLDAPSettings, self).__init__(*args, **kwargs)
ovidner/python-liu
liu/django/settings.py
Python
mit
1,861
""" tests.pretty_print ~~~~~~~~~~~~~~~~~~ :synopsis: Test utility functions. :copyright: (c) 2017, Tommy Ip. :license: MIT """ from tabled.utils import (columns_width, max_width, rotate_table, normalize_list) class TestMaxWidth: def test_normal(self) -> None: column = ['Some text', 'Some more text', 'short', 'def test():', 'This is a very long line of text', 'More!!'] assert max_width(column) == 32 def test_single_element(self) -> None: assert max_width(['Wow this is long']) == 16 class TestRotateTable: table = [['a1', 'b1', 'c1'], ['a2', 'b2', 'c2'], ['a3', 'b3', 'c3']] def test_normal(self) -> None: expected = [['a1', 'a2', 'a3'], ['b1', 'b2', 'b3'], ['c1', 'c2', 'c3']] assert rotate_table(TestRotateTable.table) == expected def test_reversible(self) -> None: rotate_twice = rotate_table(rotate_table(TestRotateTable.table)) assert rotate_twice == TestRotateTable.table def test_asymmetric_list(self) -> None: asymmetric_table = [['a', 'b', 'c', 'd', 'e'], ['1', '2', '3', '4', '5']] expected = [['a', '1'], ['b', '2'], ['c', '3'], ['d', '4'], ['e', '5']] assert rotate_table(asymmetric_table) == expected def test_single_column(self) -> None: column = [['a'], ['b'], ['c'], ['d']] row = [['a', 'b', 'c', 'd']] assert rotate_table(column) == row class TestColumnsWidth: def test_normal(self) -> None: table = [['some text', 'Some more very long text', '123'], ['Another example', 'This is short', 'aaaaaa'], ['fn (x) -> i32 \{\}', ' ', 'tableD'], ['Spammmmmmm', 'More spammmmmmmmmmmmmmmmmmmmm', '']] assert columns_width(table) == [18, 29, 6] def test_empty_cells(self) -> None: table = [['', '', '', '', ''], ['', '', '', '', '']] assert columns_width(table) == [0, 0, 0, 0, 0] class TestNormalizeList: def test_normal(self) -> None: list_raw = ['a', 'b', 'c', 'd'] list_expected = ['a', 'b', 'c', 'd', '', '', ''] assert normalize_list(list_raw, 7) == list_expected def test_same_length(self) -> None: list_raw = ['1', '2', '3', '4', '5', '6', '7'] assert normalize_list(list_raw, 7) == list_raw def test_shorter(self) -> None: list_raw = ['Cell 1', 'Cell 2', 'Cell 3', 'Cell 4', 'Cell 5'] list_expected = ['Cell 1', 'Cell 2', 'Cell 3', 'Cell 4'] assert normalize_list(list_raw, 4) == list_expected
tommyip/tabled
tests/test_utils.py
Python
mit
2,778
# Copyright © 2016-2022 Jakub Wilk <jwilk@jwilk.net> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the “Software”), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import contextlib import errno import io import os import sys import tempfile import unittest.mock from tests.tools import ( assert_equal, assert_is_instance, assert_not_equal, isolation, testcase, ) @contextlib.contextmanager def tmpcwd(): with tempfile.TemporaryDirectory(prefix='anorack.tests.') as tmpdir: orig_cwd = os.getcwd() os.chdir(tmpdir) try: yield finally: os.chdir(orig_cwd) def TextIO(s=None, *, name): fp = io.BytesIO(s) fp.name = name return io.TextIOWrapper(fp, encoding='UTF-8') class CompletedProcess(): def __init__(self, rc, stdout, stderr): self.rc = rc self.stdout = stdout self.stderr = stderr def __run_main(argv, stdin): sys.argv = argv if stdin is not None: if isinstance(stdin, str): stdin = stdin.encode('UTF-8') sys.stdin = mock_stdin = TextIO(stdin, name=sys.__stdin__.name) else: mock_stdin = None sys.stdout = mock_stdout = TextIO(name=sys.__stdout__.name) sys.stderr = mock_stderr = TextIO(name=sys.__stderr__.name) import lib.cli # pylint: disable=bad-option-value,import-outside-toplevel rc = 0 try: lib.cli.main() except SystemExit as exc: rc = exc.code except OSError as exc: rc = exc yield rc for fp in (sys.stdout, sys.stderr): fp.flush() s = fp.buffer.getvalue() # pylint: disable=no-member yield s.decode('UTF-8') del mock_stdin, mock_stdout, mock_stderr def _run_main(argv, stdin): # abuse mock to save&restore sys.argv, sys.stdin, etc.: with unittest.mock.patch.multiple(sys, argv=None, stdin=None, stdout=None, stderr=None): return CompletedProcess(*__run_main(argv, stdin)) run_main = isolation(_run_main) def t(*, stdin=None, files=None, stdout, stdout_ipa=None, stderr='', stderr_ipa=None): if stdout_ipa is None: stdout_ipa = stdout if stderr_ipa is None: stderr_ipa = stderr argv = ['anorack'] if files is not None: for (name, content) in files: with open(name, 'wt', encoding='UTF-8') as file: file.write(content) argv += [name] actual = run_main(argv, stdin) if '-@' in stdout: stdout = stdout.replace('-@', '@') actual.stdout = actual.stdout.replace('-@', '@') assert_equal(stdout, actual.stdout) assert_equal(stderr, actual.stderr) assert_equal(actual.rc, 0) argv += ['--ipa'] actual = run_main(argv, stdin) actual.stderr = actual.stderr.replace('t͡ʃ', 'tʃ') assert_equal(stdout_ipa, actual.stdout) assert_equal(stderr_ipa, actual.stderr) assert_equal(actual.rc, 0) @testcase def test_stdin(): t( stdin=( 'It could be carried by an African swallow!\n' 'Oh, yeah, a African swallow maybe, but not an\n' 'European swallow.\n' ), stdout=( "<stdin>:2: a African -> an African /'afrIk@n/\n" "<stdin>:3: an European -> a European /j,U@r-@p'i@n/\n" ), stdout_ipa=( "<stdin>:2: a African -> an African /ˈafɹɪkən/\n" "<stdin>:3: an European -> a European /jˌʊəɹəpˈiən/\n" ), ) @testcase @tmpcwd() def test_files(): t( files=( ('holy', 'It could be carried by a African swallow!'), ('grail', 'Oh, yeah, an African swallow maybe, but not an European swallow.'), ), stdout=( "holy:1: a African -> an African /'afrIk@n/\n" "grail:1: an European -> a European /j,U@r-@p'i@n/\n" ), stdout_ipa=( "holy:1: a African -> an African /ˈafɹɪkən/\n" "grail:1: an European -> a European /jˌʊəɹəpˈiən/\n" ), ) @testcase def test_warning(): def dummy_choose_art(phon): # pylint: disable=unused-argument return NotImplemented with unittest.mock.patch('lib.cli.choose_art', dummy_choose_art): t( stdin='A scratch?!', stdout='', stderr="anorack: warning: can't determine correct article for 'scratch' /skr'atS/\n", stderr_ipa="anorack: warning: can't determine correct article for 'scratch' /skɹˈatʃ/\n", ) @testcase def test_bad_io(): argv = ['anorack', '/nonexistent', '-'] actual = run_main(argv, 'a African') assert_equal('<stdin>:', actual.stdout[:8]) err = os.strerror(errno.ENOENT) stderr = f'{argv[0]}: {argv[1]}: {err}\n' assert_equal(stderr, actual.stderr) assert_equal(actual.rc, 1) argv[1:1] = ['--traceback'] actual = run_main(argv, 'a African') assert_equal('', actual.stdout) assert_equal('', actual.stderr) assert_is_instance(actual.rc, OSError) assert_equal(actual.rc.errno, errno.ENOENT) @testcase def test_changelog(): argv = ['anorack', 'doc/changelog'] actual = run_main(argv, None) assert_equal('', actual.stdout) assert_equal('', actual.stderr) assert_equal(actual.rc, 0) @testcase def test_version(): argv = ['anorack', '--version'] actual = run_main(argv, None) assert_not_equal('', actual.stdout) assert_equal('', actual.stderr) assert_equal(actual.rc, 0) del testcase # vim:ts=4 sts=4 sw=4 et
jwilk/anorack
tests/test_cli.py
Python
mit
6,465