text stringlengths 4 1.02M | meta dict |
|---|---|
from django import forms
from django.utils.translation import ugettext_lazy as _
class ReversionForm(forms.Form):
"""A form allowing a user to select which version to revert to."""
def __init__(self, blog, *args, **kwargs):
"""Create a version-selection form for the given blog.
Arguments:
blog -- a Blog instance
"""
super(ReversionForm, self).__init__(*args, **kwargs)
# Create a choices field of all the reversions for the blog
self.fields['snapshot'] = forms.ModelChoiceField(
queryset=blog.version_snapshots.order_by('-created'),
label=_("version"),
error_messages={'required': _('You must select a version snapshot.')})
| {
"content_hash": "7683f7d931abdad366555903f2ea081b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 73,
"avg_line_length": 32.8,
"alnum_prop": 0.7027439024390244,
"repo_name": "oberlin/pressgang",
"id": "704c97fb238289c7a19c46ef33c30935fd3ad323",
"size": "657",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pressgang/actions/revert/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "8867"
},
{
"name": "PHP",
"bytes": "17602"
},
{
"name": "Python",
"bytes": "190495"
}
],
"symlink_target": ""
} |
print 'from relay_left'
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
# Connects to the current device, returning a MonkeyDevice object
device = MonkeyRunner.waitForConnection()
device.wake()
# drag (tuple start, tuple end, float duration, integer steps)
# Left drag
device.drag((400,400),(100,400),0.15,5)
| {
"content_hash": "de4836bd324694d3926eb7f06a176d91",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 65,
"avg_line_length": 25.384615384615383,
"alnum_prop": 0.7696969696969697,
"repo_name": "stanzheng/android-leapmotion",
"id": "6eb716c8c9c45f3f5d6c834fe5befe19fbed2de2",
"size": "386",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "leapgestures/relay_left.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "161246"
}
],
"symlink_target": ""
} |
'''
Take picture
============
.. author:: Mathieu Virbel <mat@kivy.org>
Little example to demonstrate how to start an Intent, and get the result.
When you use the Android.startActivityForResult(), the result will be dispatched
into onActivityResult. You can catch the event with the android.activity API
from python-for-android project.
If you want to compile it, don't forget to add the CAMERA permission::
./build.py --name 'TakePicture' --package org.test.takepicture \
--permission CAMERA --version 1 \
--private ~/code/kivy/examples/android/takepicture \
debug installd
'''
__version__ = '0.1'
from kivy.app import App
from os.path import exists
from jnius import autoclass, cast
from android import activity
from functools import partial
from kivy.clock import Clock
from kivy.uix.scatter import Scatter
from kivy.properties import StringProperty
from PIL import Image
Intent = autoclass('android.content.Intent')
PythonActivity = autoclass('org.renpy.android.PythonActivity')
MediaStore = autoclass('android.provider.MediaStore')
Uri = autoclass('android.net.Uri')
Environment = autoclass('android.os.Environment')
class Picture(Scatter):
source = StringProperty(None)
class TakePictureApp(App):
def build(self):
self.index = 0
activity.bind(on_activity_result=self.on_activity_result)
def get_filename(self):
while True:
self.index += 1
fn = Environment.getExternalStorageDirectory().getPath()+'/takepicture{}.jpg'.format(self.index)
if not exists(fn):
return fn
def take_picture(self):
intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE)
self.last_fn = self.get_filename()
self.uri = Uri.parse('file://' + self.last_fn)
self.uri = cast('android.os.Parcelable', self.uri)
intent.putExtra(MediaStore.EXTRA_OUTPUT, self.uri)
PythonActivity.mActivity.startActivityForResult(intent, 0x123)
def on_activity_result(self, requestCode, resultCode, intent):
if requestCode == 0x123:
Clock.schedule_once(partial(self.add_picture, self.last_fn), 0)
def add_picture(self, fn, *args):
im = Image.open(fn)
width, height = im.size
im.thumbnail( (width/4,height/4) , Image.ANTIALIAS)
im.save(fn,quality=95)
self.root.add_widget(Picture(source=fn, center=self.root.center))
def on_pause(self):
return True
TakePictureApp().run()
| {
"content_hash": "f0f6206eedbf4317e93a1a442220543e",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 108,
"avg_line_length": 31.098765432098766,
"alnum_prop": 0.6776498610559746,
"repo_name": "Zen-CODE/kivy",
"id": "ecfbc55d2347d67cda2f02ab1e7c8c84378af789",
"size": "2519",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/android/takepicture/main.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from behave import *
import urllib2
import urllib
import httplib
import json
import jsonpath_rw
###
### Helpers.
###
## The basic and critical remote collector.
## It defines:
## context.code
## context.content_type
## context.content
## context.content_length
def get_and_process(context, url, data):
## Build request.
if data:
req_data = urllib.urlencode(data)
req = urllib2.Request(url, req_data)
else:
req = urllib2.Request(url)
## Make the attempt, or chatty fail.
#httplib.HTTPConnection.debuglevel = 1
response = None
try:
response = urllib2.urlopen(req)
except urllib2.URLError as e:
print('Tried: ', url)
if hasattr(e, 'reason'):
print('Failed to reach server: ', e.reason)
if hasattr(e, 'code'):
print('Server error, code: ', e.code)
if response and response.read():
print('Response: ', response.read())
assert True is False
else:
## Final
pass
## Parcel out what we have for downstream checking.
context.code = response.code
## https://docs.python.org/2/library/mimetools.html#mimetools.Message
context.content_type = response.info().gettype()
context.content = response.read()
context.content_length = 0
if context.content :
context.content_length = len(context.content)
###
### Definitions.
###
## Collector for internal path.
@given('I collect data at path "{path}"')
def step_impl(context, path):
full_url = context.target + path
get_and_process(context, full_url, {})
## Collector for remote resource.
@given('I collect data at URL "{url}"')
def step_impl(context, url):
get_and_process(context, url, {})
@then('the content type should be "{ctype}"')
def step_impl(context, ctype):
if not context.content_type :
## Apparently no content type at all...
assert True is False
else:
assert context.content_type == ctype
@then('the content should contain "{text}"')
def step_impl(context, text):
if not context.content :
## Apparently no text at all...
assert True is False
else:
assert context.content.rfind(text) != -1
## Adds:
## context.content_json
@when('the content is converted to JSON')
def step_impl(context):
if not context.content :
## Apparently no text at all...
assert True is False
else:
context.content_json = json.loads(context.content)
@then('the JSON should have the top-level property "{prop}"')
def step_impl(context, prop):
if not context.content_json :
## Apparently no JSON at all...
assert True is False
else:
assert context.content_json.get(prop)
@then('the JSON should have the JSONPath "{jsonpath}"')
def step_impl(context, jsonpath):
if not context.content_json :
## Apparently no JSON at all...
assert True is False
else:
jsonpath_expr = jsonpath_rw.parse(jsonpath)
res = jsonpath_expr.find(context.content_json)
#assert len(res) > 0
#print(res)
assert res
@then('the JSON should have JSONPath "{jsonpath}" equal to "{thing}" "{value}"')
def step_impl(context, jsonpath, thing, value):
if not context.content_json :
## Apparently no JSON at all...
assert True is False
else:
jsonpath_expr = jsonpath_rw.parse(jsonpath)
res = jsonpath_expr.find(context.content_json)
if not res[0] :
assert True is False
else:
if thing == "string":
assert res[0].value == value
elif thing == "integer":
assert res[0].value == int(value)
elif thing == "float":
assert res[0].value == float(value)
else:
## Not a thing we know how to deal with yet.
assert True is False
| {
"content_hash": "df1c66aa7b0c3245a090f985e78fb03b",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 80,
"avg_line_length": 29.253731343283583,
"alnum_prop": 0.6099489795918367,
"repo_name": "kshefchek/monarch-app",
"id": "413f98e662cf7e1a896fe51f0e9667d5d23750b4",
"size": "3990",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/behave/steps/data-basic.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "57960"
},
{
"name": "Gherkin",
"bytes": "17312"
},
{
"name": "HTML",
"bytes": "375931"
},
{
"name": "JavaScript",
"bytes": "1149626"
},
{
"name": "Makefile",
"bytes": "4074"
},
{
"name": "Perl",
"bytes": "16419"
},
{
"name": "Python",
"bytes": "30843"
},
{
"name": "Shell",
"bytes": "3875"
},
{
"name": "Vue",
"bytes": "124230"
}
],
"symlink_target": ""
} |
"""Test the wallet."""
from test_framework.test_framework import SarielsazTestFramework
from test_framework.util import *
class WalletTest(SarielsazTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
def setup_network(self):
self.add_nodes(4)
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.sync_all([self.nodes[0:3]])
def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
"""Return curr_balance after asserting the fee was in range"""
fee = balance_with_fee - curr_balance
assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
return curr_balance
def run_test(self):
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 50)
assert_equal(walletinfo['balance'], 0)
self.sync_all([self.nodes[0:3]])
self.nodes[1].generate(101)
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 50)
assert_equal(self.nodes[1].getbalance(), 50)
assert_equal(self.nodes[2].getbalance(), 0)
# Check that only first and second nodes have UTXOs
utxos = self.nodes[0].listunspent()
assert_equal(len(utxos), 1)
assert_equal(len(self.nodes[1].listunspent()), 1)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("test gettxout")
confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
# First, outputs that are unspent both in the chain and in the
# mempool should appear with or without include_mempool
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
assert_equal(txout['value'], 50)
# Send 21 BTC from 0 to 2 using sendtoaddress call.
# Locked memory should use at least 32 bytes to sign each transaction
self.log.info("test getmemoryinfo")
memory_before = self.nodes[0].getmemoryinfo()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
memory_after = self.nodes[0].getmemoryinfo()
assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used'])
self.log.info("test gettxout (second part)")
# utxo spent in mempool should be visible if you exclude mempool
# but invisible if you include mempool
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
assert txout is None
# new utxo from mempool should be invisible if you exclude mempool
# but visible if you include mempool
txout = self.nodes[0].gettxout(mempool_txid, 0, False)
assert txout is None
txout1 = self.nodes[0].gettxout(mempool_txid, 0, True)
txout2 = self.nodes[0].gettxout(mempool_txid, 1, True)
# note the mempool tx will have randomly assigned indices
# but 10 will go to node2 and the rest will go to node0
balance = self.nodes[0].getbalance()
assert_equal(set([txout1['value'], txout2['value']]), set([10, balance]))
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 0)
# Have node0 mine a block, thus it will collect its own fee.
self.nodes[0].generate(1)
self.sync_all([self.nodes[0:3]])
# Exercise locking of unspent outputs
unspent_0 = self.nodes[2].listunspent()[0]
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
self.nodes[2].lockunspent(False, [unspent_0])
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
assert_equal([unspent_0], self.nodes[2].listlockunspent())
self.nodes[2].lockunspent(True, [unspent_0])
assert_equal(len(self.nodes[2].listlockunspent()), 0)
# Have node1 generate 100 blocks (so node0 can recover the fee)
self.nodes[1].generate(100)
self.sync_all([self.nodes[0:3]])
# node0 should end up with 100 btc in block rewards plus fees, but
# minus the 21 plus fees sent to node2
assert_equal(self.nodes[0].getbalance(), 100-21)
assert_equal(self.nodes[2].getbalance(), 21)
# Node0 should have two unspent outputs.
# Create a couple of transactions to send them to node2, submit them through
# node1, and make sure both node0 and node2 pick them up properly:
node0utxos = self.nodes[0].listunspent(1)
assert_equal(len(node0utxos), 2)
# create both transactions
txns_to_send = []
for utxo in node0utxos:
inputs = []
outputs = {}
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] - 3
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx))
# Have node 1 (miner) send the transactions
self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)
# Have node1 mine a block to confirm transactions:
self.nodes[1].generate(1)
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 94)
assert_equal(self.nodes[2].getbalance("from1"), 94-21)
# Send 10 BTC normal
address = self.nodes[0].getnewaddress("test")
fee_per_byte = Decimal('0.001') / 1000
self.nodes[2].settxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), Decimal('10'))
# Send 10 BTC with subtract fee from amount
txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
# Sendmany 10 BTC
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [])
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_0_bal += Decimal('10')
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), node_0_bal)
# Sendmany 10 BTC with subtract fee from amount
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [address])
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
# Test ResendWalletTransactions:
# Create a couple of transactions, then start up a fourth
# node (nodes[3]) and ask nodes[0] to rebroadcast.
# EXPECT: nodes[3] should have those transactions in its mempool.
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
sync_mempools(self.nodes[0:2])
self.start_node(3)
connect_nodes_bi(self.nodes, 0, 3)
sync_blocks(self.nodes)
relayed = self.nodes[0].resendwallettransactions()
assert_equal(set(relayed), {txid1, txid2})
sync_mempools(self.nodes)
assert(txid1 in self.nodes[3].getrawmempool())
# Exercise balance rpcs
assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
assert_equal(self.nodes[0].getunconfirmedbalance(), 1)
#check if we can list zero value tx as available coins
#1. create rawtx
#2. hex-changed one output to 0.0
#3. sign and send
#4. check if recipient (node0) can list the zero value tx
usp = self.nodes[1].listunspent()
inputs = [{"txid":usp[0]['txid'], "vout":usp[0]['vout']}]
outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") #replace 11.11 with 0.0 (int32)
decRawTx = self.nodes[1].decoderawtransaction(rawTx)
signedRawTx = self.nodes[1].signrawtransaction(rawTx)
decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex'])
zeroValueTxid= decRawTx['txid']
self.nodes[1].sendrawtransaction(signedRawTx['hex'])
self.sync_all()
self.nodes[1].generate(1) #mine a block
self.sync_all()
unspentTxs = self.nodes[0].listunspent() #zero value tx must be in listunspents output
found = False
for uTx in unspentTxs:
if uTx['txid'] == zeroValueTxid:
found = True
assert_equal(uTx['amount'], Decimal('0'))
assert(found)
#do some -walletbroadcast tests
self.stop_nodes()
self.start_node(0, ["-walletbroadcast=0"])
self.start_node(1, ["-walletbroadcast=0"])
self.start_node(2, ["-walletbroadcast=0"])
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.sync_all([self.nodes[0:3]])
txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
self.nodes[1].generate(1) #mine a block, tx should not be in there
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[2].getbalance(), node_2_bal) #should not be changed because tx was not broadcasted
#now broadcast from another node, mine a block, sync, and check the balance
self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex'])
self.nodes[1].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal += 2
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
assert_equal(self.nodes[2].getbalance(), node_2_bal)
#create another tx
txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
#restart the nodes with -walletbroadcast=1
self.stop_nodes()
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
sync_blocks(self.nodes[0:3])
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:3])
node_2_bal += 2
#tx should be added to balance because after restarting the nodes tx should be broadcastet
assert_equal(self.nodes[2].getbalance(), node_2_bal)
#send a tx with value in a string (PR#6380 +)
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
txObj = self.nodes[0].gettransaction(txId)
assert_equal(txObj['amount'], Decimal('-2'))
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001")
txObj = self.nodes[0].gettransaction(txId)
assert_equal(txObj['amount'], Decimal('-0.0001'))
#check if JSON parser can handle scientific notation in strings
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4")
txObj = self.nodes[0].gettransaction(txId)
assert_equal(txObj['amount'], Decimal('-0.0001'))
# This will raise an exception because the amount type is wrong
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
# This will raise an exception since generate does not accept a string
assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")
# Import address and private key to check correct behavior of spendable unspents
# 1. Send some coins to generate new UTXO
address_to_import = self.nodes[2].getnewaddress()
txid = self.nodes[0].sendtoaddress(address_to_import, 1)
self.nodes[0].generate(1)
self.sync_all([self.nodes[0:3]])
# 2. Import address from node2 to node1
self.nodes[1].importaddress(address_to_import)
# 3. Validate that the imported address is watch-only on node1
assert(self.nodes[1].validateaddress(address_to_import)["iswatchonly"])
# 4. Check that the unspents after import are not spendable
assert_array_result(self.nodes[1].listunspent(),
{"address": address_to_import},
{"spendable": False})
# 5. Import private key of the previously imported address on node1
priv_key = self.nodes[2].dumpprivkey(address_to_import)
self.nodes[1].importprivkey(priv_key)
# 6. Check that the unspents are now spendable on node1
assert_array_result(self.nodes[1].listunspent(),
{"address": address_to_import},
{"spendable": True})
# Mine a block from node0 to an address from node1
cbAddr = self.nodes[1].getnewaddress()
blkHash = self.nodes[0].generatetoaddress(1, cbAddr)[0]
cbTxId = self.nodes[0].getblock(blkHash)['tx'][0]
self.sync_all([self.nodes[0:3]])
# Check that the txid and balance is found by node1
self.nodes[1].gettransaction(cbTxId)
# check if wallet or blockchain maintenance changes the balance
self.sync_all([self.nodes[0:3]])
blocks = self.nodes[0].generate(2)
self.sync_all([self.nodes[0:3]])
balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
block_count = self.nodes[0].getblockcount()
# Check modes:
# - True: unicode escaped as \u....
# - False: unicode directly as UTF-8
for mode in [True, False]:
self.nodes[0].ensure_ascii = mode
# unicode check: Basic Multilingual Plane, Supplementary Plane respectively
for s in [u'рыба', u'𝅘𝅥𝅯']:
addr = self.nodes[0].getaccountaddress(s)
label = self.nodes[0].getaccount(addr)
assert_equal(label, s)
assert(s in self.nodes[0].listaccounts().keys())
self.nodes[0].ensure_ascii = True # restore to default
# maintenance tests
maintenance = [
'-rescan',
'-reindex',
'-zapwallettxes=1',
'-zapwallettxes=2',
# disabled until issue is fixed: https://github.com/sarielsaz/sarielsaz/issues/7463
# '-salvagewallet',
]
chainlimit = 6
for m in maintenance:
self.log.info("check " + m)
self.stop_nodes()
# set lower ancestor limit for later
self.start_node(0, [m, "-limitancestorcount="+str(chainlimit)])
self.start_node(1, [m, "-limitancestorcount="+str(chainlimit)])
self.start_node(2, [m, "-limitancestorcount="+str(chainlimit)])
while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]:
# reindex will leave rpc warm up "early"; Wait for it to finish
time.sleep(0.1)
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
# Exercise listsinceblock with the last two blocks
coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
assert_equal(coinbase_tx_1["lastblock"], blocks[1])
assert_equal(len(coinbase_tx_1["transactions"]), 1)
assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)
# ==Check that wallet prefers to use coins that don't exceed mempool limits =====
# Get all non-zero utxos together
chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()]
singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True)
self.nodes[0].generate(1)
node0_balance = self.nodes[0].getbalance()
# Split into two chains
rawtx = self.nodes[0].createrawtransaction([{"txid":singletxid, "vout":0}], {chain_addrs[0]:node0_balance/2-Decimal('0.01'), chain_addrs[1]:node0_balance/2-Decimal('0.01')})
signedtx = self.nodes[0].signrawtransaction(rawtx)
singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
# Make a long chain of unconfirmed payments without hitting mempool limit
# Each tx we make leaves only one output of change on a chain 1 longer
# Since the amount to send is always much less than the outputs, we only ever need one output
# So we should be able to generate exactly chainlimit txs for each original output
sending_addr = self.nodes[1].getnewaddress()
txid_list = []
for i in range(chainlimit*2):
txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')))
assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit*2)
assert_equal(len(txid_list), chainlimit*2)
# Without walletrejectlongchains, we will still generate a txid
# The tx will be stored in the wallet but not accepted to the mempool
extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))
assert(extra_txid not in self.nodes[0].getrawmempool())
assert(extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()])
self.nodes[0].abandontransaction(extra_txid)
total_txs = len(self.nodes[0].listtransactions("*",99999))
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount="+str(2*chainlimit)])
# wait for loadmempool
timeout = 10
while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit*2):
time.sleep(0.5)
timeout -= 0.5
assert_equal(len(self.nodes[0].getrawmempool()), chainlimit*2)
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01'))
# Verify nothing new in wallet
assert_equal(total_txs, len(self.nodes[0].listtransactions("*",99999)))
if __name__ == '__main__':
WalletTest().main()
| {
"content_hash": "316fbbae70ff0109926589619b1e5225",
"timestamp": "",
"source": "github",
"line_count": 427,
"max_line_length": 181,
"avg_line_length": 47.594847775175644,
"alnum_prop": 0.625695025340747,
"repo_name": "sarielsaz/sarielsaz",
"id": "5884517968865ea94d9cdabb7e9638cd00472d93",
"size": "20546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/wallet.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "728890"
},
{
"name": "C++",
"bytes": "5278228"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30306"
},
{
"name": "M4",
"bytes": "193646"
},
{
"name": "Makefile",
"bytes": "114515"
},
{
"name": "Objective-C",
"bytes": "141134"
},
{
"name": "Objective-C++",
"bytes": "6771"
},
{
"name": "Python",
"bytes": "1245032"
},
{
"name": "QMake",
"bytes": "758"
},
{
"name": "Shell",
"bytes": "60997"
}
],
"symlink_target": ""
} |
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
from urllib.parse import parse_qs, urljoin, urlparse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(*, filter: Optional[str] = None, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-03-01-preview")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Authorization/accessReviewScheduleDefinitions")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if filter is not None:
_params["$filter"] = _SERIALIZER.query("filter", filter, "str", skip_quote=True)
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class AccessReviewScheduleDefinitionsAssignedForMyApprovalOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.authorization.v2021_03_01_preview.AuthorizationManagementClient`'s
:attr:`access_review_schedule_definitions_assigned_for_my_approval` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(self, filter: Optional[str] = None, **kwargs: Any) -> Iterable["_models.AccessReviewScheduleDefinition"]:
"""Get access review instances assigned for my approval.
:param filter: The filter to apply on the operation. Other than standard filters, one custom
filter option is supported : 'assignedToMeToReview()'. When one specified
$filter=assignedToMeToReview(), only items that are assigned to the calling user to review are
returned. Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AccessReviewScheduleDefinition or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.authorization.v2021_03_01_preview.models.AccessReviewScheduleDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-03-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessReviewScheduleDefinitionListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
filter=filter,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("AccessReviewScheduleDefinitionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDefinition, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/providers/Microsoft.Authorization/accessReviewScheduleDefinitions"} # type: ignore
| {
"content_hash": "229c632161dd3a224ad2767a634f3142",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 122,
"avg_line_length": 44.83892617449664,
"alnum_prop": 0.6591827570722946,
"repo_name": "Azure/azure-sdk-for-python",
"id": "bcc73df997534fd8aeeb5036e7970fe6abe2e166",
"size": "7181",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/authorization/azure-mgmt-authorization/azure/mgmt/authorization/v2021_03_01_preview/operations/_access_review_schedule_definitions_assigned_for_my_approval_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import json
from vnpy.trader.vtConstant import EMPTY_STRING
from vnpy.rpc import RpcServer
from vnpy.trader.vtFunction import getJsonPath
########################################################################
class RsEngine(object):
"""RPC服务引擎"""
settingFileName = 'RS_setting.json'
settingFilePath = getJsonPath(settingFileName, __file__)
name = u'RPC服务'
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine):
"""Constructor"""
self.mainEngine = mainEngine
self.eventEngine = eventEngine
self.server = None # RPC服务对象
self.repAddress = EMPTY_STRING # REP地址
self.pubAddress = EMPTY_STRING # PUB地址
self.functionDict = {} # 调用过的函数对象缓存字典
self.loadSetting()
self.registerEvent()
#----------------------------------------------------------------------
def loadSetting(self):
"""读取配置"""
with open(self.settingFilePath) as f:
d = json.load(f)
self.repAddress = d['repAddress']
self.pubAddress = d['pubAddress']
self.server = RpcServer(self.repAddress, self.pubAddress)
self.server.usePickle()
self.server.register(self.call)
self.server.start()
#----------------------------------------------------------------------
def registerEvent(self):
"""注册事件监听"""
self.eventEngine.registerGeneralHandler(self.processEvent)
#----------------------------------------------------------------------
def call(self, d):
"""调用函数"""
nameList = d['nameList'] # 对象属性列表
nameTuple = tuple(nameList) # 转化为元组
args = d['args'] # 调用参数
kwargs = d['kwargs']
# 如果已经有缓存,则直接调用
if nameTuple in self.functionDict:
function = self.functionDict[nameTuple]
result = function(*args, **kwargs)
return result
# 逐层寻找函数对象
else:
# 根对象为主引擎
obj = self.mainEngine
# 逐层寻找对象属性
for name in nameTuple:
obj = obj.__getattribute__(name)
# 缓存结果
self.functionDict[nameTuple] = obj
# 调用最终对象
result = obj(*args, **kwargs)
return result
#----------------------------------------------------------------------
def processEvent(self, event):
"""处理事件推送"""
self.server.publish('', event)
#----------------------------------------------------------------------
def stop(self):
"""停止"""
self.server.stop()
| {
"content_hash": "a2fa56878dd8e1a8c1e48d5bb3b88d2e",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 75,
"avg_line_length": 31.439560439560438,
"alnum_prop": 0.41698706745893044,
"repo_name": "rrrrrr8/vnpy",
"id": "04faf346fcf22f6ad336f3e0aa66e8bf114b802f",
"size": "3098",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vnpy/trader/app/rpcService/rsEngine.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1222"
},
{
"name": "C",
"bytes": "3151559"
},
{
"name": "C++",
"bytes": "8839816"
},
{
"name": "CMake",
"bytes": "30774"
},
{
"name": "Dockerfile",
"bytes": "5928"
},
{
"name": "HTML",
"bytes": "807"
},
{
"name": "Makefile",
"bytes": "87252"
},
{
"name": "Objective-C",
"bytes": "22505"
},
{
"name": "PHP",
"bytes": "4107"
},
{
"name": "Python",
"bytes": "5715025"
},
{
"name": "Shell",
"bytes": "3862"
}
],
"symlink_target": ""
} |
"""Defines the product data file input type contained within job data"""
from __future__ import unicode_literals
import os
from django.db import transaction
from django.utils.text import get_valid_filename
from django.utils.timezone import now
from job.configuration.data.data_file import AbstractDataFileStore
from job.execution.container import SCALE_JOB_EXE_OUTPUT_PATH
from product.models import FileAncestryLink, ProductFile
from recipe.models import Recipe, RecipeTypeRevision
from storage.models import Workspace
class ProductDataFileStore(AbstractDataFileStore):
"""Implements the data file store class to provide a way to validate product file output configuration and store
product data files.
"""
def get_workspaces(self, workspace_ids):
"""See :meth:`job.configuration.data.data_file.AbstractDataFileStore.get_workspaces`
"""
workspaces = Workspace.objects.filter(id__in=workspace_ids)
results = {}
for workspace in workspaces:
results[workspace.id] = workspace.is_active
return results
def store_files(self, data_files, input_file_ids, job_exe):
"""See :meth:`job.configuration.data.data_file.AbstractDataFileStore.store_files`
"""
workspace_ids = data_files.keys()
workspaces = Workspace.objects.filter(id__in=workspace_ids)
results = {}
remote_path = self._calculate_remote_path(job_exe, input_file_ids)
with transaction.atomic():
for workspace in workspaces:
file_list = data_files[workspace.id]
files_to_store = []
for file_metadata in file_list:
local_path = file_metadata.local_path
if local_path.startswith(SCALE_JOB_EXE_OUTPUT_PATH):
rel_local_path = os.path.relpath(local_path, SCALE_JOB_EXE_OUTPUT_PATH)
else:
rel_local_path = os.path.basename(local_path)
file_metadata.remote_path = os.path.join(remote_path, rel_local_path)
files_to_store.append(file_metadata)
product_files = ProductFile.objects.upload_files(files_to_store, input_file_ids, job_exe, workspace)
for i in range(len(product_files)):
full_local_path = file_list[i].local_path
product_file = product_files[i]
results[full_local_path] = product_file.id
FileAncestryLink.objects.create_file_ancestry_links(input_file_ids, set(results.values()), job_exe.job,
job_exe.id)
return results
def _calculate_remote_path(self, job_exe, input_file_ids):
"""Returns the remote path for storing the products
:param job_exe: The job execution model (with related job and job_type fields) that is storing the files
:type job_exe: :class:`job.models.JobExecution`
:param input_file_ids: Set of input file IDs
:type input_file_ids: set of int
:returns: The remote path for storing the products
:rtype: str
"""
remote_path = ''
job_recipe = Recipe.objects.get_recipe_for_job(job_exe.job_id)
if job_recipe:
recipe = job_recipe.recipe
recipe_type_path = get_valid_filename(recipe.recipe_type.name)
recipe_revision = RecipeTypeRevision.objects.get_revision(recipe.recipe_type.name, recipe.recipe_type.revision_num).revision_num
recipe_version_path = get_valid_filename('revision_%i' % recipe.recipe_type.revision_num)
remote_path = os.path.join(remote_path, 'recipes', recipe_type_path, recipe_version_path)
job_type_path = get_valid_filename(job_exe.job.job_type.name)
job_version_path = get_valid_filename(job_exe.job.job_type.version)
remote_path = os.path.join(remote_path, 'jobs', job_type_path, job_version_path)
# Try to use source start time from the job
the_date = job_exe.job.source_started
if not the_date:
# Try to grab source started the old way through the source ancestor file
for source_file in FileAncestryLink.objects.get_source_ancestors(list(input_file_ids)):
if source_file.data_started:
if not the_date or source_file.data_started < the_date:
the_date = source_file.data_started
# No data start time populated, use current time
if not the_date:
remote_path = os.path.join(remote_path, 'unknown_source_data_time')
the_date = now()
year_dir = str(the_date.year)
month_dir = '%02d' % the_date.month
day_dir = '%02d' % the_date.day
return os.path.join(remote_path, year_dir, month_dir, day_dir, 'job_exe_%i' % job_exe.id)
| {
"content_hash": "9729d4cf0b9e073264be9ae7e05123a3",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 140,
"avg_line_length": 44.6,
"alnum_prop": 0.6328984916428863,
"repo_name": "ngageoint/scale",
"id": "bdb2820861f68c89de2c37bfe4a6e593d8bbda5c",
"size": "4906",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scale/product/configuration/product_data_file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7219"
},
{
"name": "CSS",
"bytes": "12193"
},
{
"name": "Dockerfile",
"bytes": "14853"
},
{
"name": "HCL",
"bytes": "301"
},
{
"name": "HTML",
"bytes": "48818"
},
{
"name": "JavaScript",
"bytes": "503"
},
{
"name": "Makefile",
"bytes": "5852"
},
{
"name": "Python",
"bytes": "5295677"
},
{
"name": "Shell",
"bytes": "26650"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
import re
from stripe import six
from stripe.multipart_data_generator import MultipartDataGenerator
class TestMultipartDataGenerator(object):
def run_test_multipart_data_with_file(self, test_file):
params = {
"key1": b"ASCII value",
"key2": u"Üñìçôdé value",
"key3": test_file,
"key4": {
"string": "Hello!",
"int": 234,
"float": 3.14159,
"bool": True,
"dict": {"foo": "bar"},
},
}
generator = MultipartDataGenerator()
generator.add_params(params)
http_body = generator.get_post_data()
if six.PY3:
http_body = http_body.decode("utf-8")
assert re.search(
r"Content-Disposition: form-data; name=\"key1\"", http_body
)
assert re.search(r"ASCII value", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key2\"", http_body
)
assert re.search(r"Üñìçôdé value", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key3\"; "
r"filename=\".+\"",
http_body,
)
assert re.search(r"Content-Type: application/octet-stream", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key4\[string\]\"",
http_body,
)
assert re.search(r"Hello!", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key4\[int\]\"", http_body
)
assert re.search(r"234", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key4\[float\]\"",
http_body,
)
assert re.search(r"3.14159", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key4\[bool\]\"", http_body
)
assert re.search(r"True", http_body)
assert re.search(
r"Content-Disposition: form-data; name=\"key4\[dict\]\[foo\]\"",
http_body,
)
assert re.search(r"bar", http_body)
test_file.seek(0)
file_contents = test_file.read()
if six.PY3 and isinstance(file_contents, bytes):
file_contents = file_contents.decode("utf-8")
assert http_body.find(file_contents) != -1
def test_multipart_data_file_text(self):
with open(__file__, mode="r") as test_file:
self.run_test_multipart_data_with_file(test_file)
def test_multipart_data_file_binary(self):
with open(__file__, mode="rb") as test_file:
self.run_test_multipart_data_with_file(test_file)
def test_multipart_data_stringio(self):
string = six.StringIO("foo")
self.run_test_multipart_data_with_file(string)
def test_multipart_data_unicode_file_name(self):
string = six.StringIO("foo")
string.name = u"паспорт.png"
self.run_test_multipart_data_with_file(string)
| {
"content_hash": "cb467dc1ac446dedbd4979a318b71baf",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 79,
"avg_line_length": 34.065934065934066,
"alnum_prop": 0.5593548387096774,
"repo_name": "stripe/stripe-python",
"id": "bdb6da830787a94e9871b387302b6bc9b9c20b29",
"size": "3144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_multipart_data_generator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1198"
},
{
"name": "Python",
"bytes": "748390"
}
],
"symlink_target": ""
} |
import sys
import os
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
extensions = []
source_suffix = '.txt'
master_doc = 'contents'
project = 'Wilson'
copyright = '2010 Chris Dickinson'
version = '0.0.1'
release = '0.0.1'
today_fmt = '%B %d, %Y'
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = False
show_authors = False
pygments_style = 'trac'
exclude_dirnames = ['.svn','.git']
html_theme_path = ['_theme']
html_theme = 'default'
html_static_path = ['_static']
html_last_updated_fmt = '%b %d, %Y'
html_use_smartypants = True
html_additional_pages = {}
htmlhelp_basename = 'Wilsondoc'
modindex_common_prefix = ['wilson.']
| {
"content_hash": "7f882bce62ecf6848402f4e918f257cf",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 81,
"avg_line_length": 26.576923076923077,
"alnum_prop": 0.683068017366136,
"repo_name": "chrisdickinson/wilson",
"id": "5a8f2e9b92d98a756ef30c3b185f11ce816559a6",
"size": "691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "76952"
},
{
"name": "Python",
"bytes": "691"
}
],
"symlink_target": ""
} |
"""Config flow to configure Agent devices."""
import logging
from agent import AgentConnectionError, AgentError
from agent.a import Agent
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN, SERVER_URL # pylint:disable=unused-import
from .helpers import generate_url
DEFAULT_PORT = 8090
_LOGGER = logging.getLogger(__name__)
class AgentFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle an Agent config flow."""
def __init__(self):
"""Initialize the Agent config flow."""
self.device_config = {}
async def async_step_user(self, user_input=None):
"""Handle an Agent config flow."""
errors = {}
if user_input is not None:
host = user_input[CONF_HOST]
port = user_input[CONF_PORT]
server_origin = generate_url(host, port)
agent_client = Agent(server_origin, async_get_clientsession(self.hass))
try:
await agent_client.update()
except AgentConnectionError:
pass
except AgentError:
pass
await agent_client.close()
if agent_client.is_available:
await self.async_set_unique_id(agent_client.unique)
self._abort_if_unique_id_configured(
updates={
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
SERVER_URL: server_origin,
}
)
self.device_config = {
CONF_HOST: host,
CONF_PORT: port,
SERVER_URL: server_origin,
}
return await self._create_entry(agent_client.name)
errors["base"] = "device_unavailable"
data = {
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
}
return self.async_show_form(
step_id="user",
description_placeholders=self.device_config,
data_schema=vol.Schema(data),
errors=errors,
)
async def _create_entry(self, server_name):
"""Create entry for device."""
return self.async_create_entry(title=server_name, data=self.device_config)
| {
"content_hash": "955dd1ed2b6d2d5211dc353c5194e806",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 83,
"avg_line_length": 30.925925925925927,
"alnum_prop": 0.5736526946107784,
"repo_name": "tchellomello/home-assistant",
"id": "cc1d6355f3f97edd0ac76a3df510521bf0161a4c",
"size": "2505",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/components/agent_dvr/config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "26713364"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
} |
import os
import sys
import math
import time
import random
import string
import bintrees
sys.path.extend(['..', '../..'])
import banyan
def random_ints(num_items):
return list(set([random.randint(-999999, 999999) for _ in range(num_items)]))
def random_strings(num_items, len_):
return list(set([''.join(random.choice(string.letters) for _ in range(len_)) for _ in range(num_items)]))
| {
"content_hash": "362e55111356ee23725741357024b126",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 109,
"avg_line_length": 21.42105263157895,
"alnum_prop": 0.6805896805896806,
"repo_name": "cpcloud/banyan",
"id": "f17e89f36036bf195b80c8c46a16a325151afff6",
"size": "407",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "performance_tests/_src.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "324481"
},
{
"name": "CSS",
"bytes": "16435"
},
{
"name": "JavaScript",
"bytes": "90259"
},
{
"name": "Python",
"bytes": "185384"
}
],
"symlink_target": ""
} |
from datetime import datetime
import math
import unittest
from src.pyglow import IRI
from src.pyglow import LocationTime
from src.pyglow.constants import DIR_FILE as pyglow_file
print("pyglow file: {}".format(pyglow_file))
class TestIri(unittest.TestCase):
def setUp(self):
self.iri = IRI()
# Set up LocationTime instance:
dn = datetime(2010, 3, 23, 15, 30)
lat = 30
lon = -80
alt = 250
self.location_time = LocationTime(dn, lat, lon, alt)
def tearDown(self):
pass
def test_iri_run(self):
""" Simple IRI run """
# Run IRI:
self.iri.run(self.location_time)
# Make sure we have a IRI result:
self.assert_iri_result(self.iri)
def test_iri_versions(self):
""" Versions of IRI """
for version in [2012, 2016]:
# Run IRI:
self.iri.run(self.location_time, version=version)
# Make sure we have a IRI result:
self.assert_iri_result(self.iri)
def test_iri_nmf2(self):
""" Input NmF2 of IRI """
# Run IRI:
self.iri.run(self.location_time, NmF2=467145.0)
# Make sure we have a IRI result:
self.assert_iri_result(self.iri)
def test_iri_hmf2(self):
""" Input hmF2 of IRI """
# Run IRI:
self.iri.run(self.location_time, hmF2=300.0)
# Make sure we have a IRI result:
self.assert_iri_result(self.iri)
def test_iri_compute_ne(self):
""" compute_Ne switch """
# Run IRI:
self.iri.run(self.location_time, compute_Ne=False)
# Ne should not have a result:
self.assertTrue(math.isnan(self.iri.ne))
def test_iri_compute_te_ti(self):
""" compute_Te_Ti switch """
# Run IRI:
self.iri.run(self.location_time, compute_Te_Ti=False)
# Te/Ti should not have a result:
self.assertTrue(math.isnan(self.iri.Te))
self.assertTrue(math.isnan(self.iri.Ti))
def test_iri_compute_ni(self):
""" compute_Ni switch """
# Run IRI:
self.iri.run(self.location_time, compute_Ni=False)
# Ni should not have a result:
ions = self.iri.ni.keys()
for ion in ions:
self.assertTrue(math.isnan(self.iri.ni[ion]))
def assert_iri_result(self, iri):
""" Ensures that we have an IRI result """
self.assertFalse(math.isnan(iri.ne))
ions = iri.ni.keys()
for ion in ions:
self.assertFalse(math.isnan(iri.ni[ion]))
self.assertFalse(math.isnan(iri.Ti))
self.assertFalse(math.isnan(iri.Te))
self.assertFalse(math.isnan(iri.Tn))
self.assertFalse(math.isnan(iri.NmF2))
self.assertFalse(math.isnan(iri.hmF2))
| {
"content_hash": "553c9b789b0d69bc563b73c4bc9de76b",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 61,
"avg_line_length": 26.102803738317757,
"alnum_prop": 0.5850340136054422,
"repo_name": "timduly4/pyglow",
"id": "caba890da4a2d904e713208a39f947ece44e31c7",
"size": "2793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_iri.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "608"
},
{
"name": "Fortran",
"bytes": "404999"
},
{
"name": "Makefile",
"bytes": "9515"
},
{
"name": "Python",
"bytes": "105544"
}
],
"symlink_target": ""
} |
import numpy as np
from scipy.linalg import lstsq
from scipy._lib._util import float_factorial
from scipy.ndimage import convolve1d
from ._arraytools import axis_slice
def savgol_coeffs(window_length, polyorder, deriv=0, delta=1.0, pos=None,
use="conv"):
"""Compute the coefficients for a 1-D Savitzky-Golay FIR filter.
Parameters
----------
window_length : int
The length of the filter window (i.e., the number of coefficients).
polyorder : int
The order of the polynomial used to fit the samples.
`polyorder` must be less than `window_length`.
deriv : int, optional
The order of the derivative to compute. This must be a
nonnegative integer. The default is 0, which means to filter
the data without differentiating.
delta : float, optional
The spacing of the samples to which the filter will be applied.
This is only used if deriv > 0.
pos : int or None, optional
If pos is not None, it specifies evaluation position within the
window. The default is the middle of the window.
use : str, optional
Either 'conv' or 'dot'. This argument chooses the order of the
coefficients. The default is 'conv', which means that the
coefficients are ordered to be used in a convolution. With
use='dot', the order is reversed, so the filter is applied by
dotting the coefficients with the data set.
Returns
-------
coeffs : 1-D ndarray
The filter coefficients.
References
----------
A. Savitzky, M. J. E. Golay, Smoothing and Differentiation of Data by
Simplified Least Squares Procedures. Analytical Chemistry, 1964, 36 (8),
pp 1627-1639.
Jianwen Luo, Kui Ying, and Jing Bai. 2005. Savitzky-Golay smoothing and
differentiation filter for even number data. Signal Process.
85, 7 (July 2005), 1429-1434.
See Also
--------
savgol_filter
Notes
-----
.. versionadded:: 0.14.0
Examples
--------
>>> from scipy.signal import savgol_coeffs
>>> savgol_coeffs(5, 2)
array([-0.08571429, 0.34285714, 0.48571429, 0.34285714, -0.08571429])
>>> savgol_coeffs(5, 2, deriv=1)
array([ 2.00000000e-01, 1.00000000e-01, 2.07548111e-16, -1.00000000e-01,
-2.00000000e-01])
Note that use='dot' simply reverses the coefficients.
>>> savgol_coeffs(5, 2, pos=3)
array([ 0.25714286, 0.37142857, 0.34285714, 0.17142857, -0.14285714])
>>> savgol_coeffs(5, 2, pos=3, use='dot')
array([-0.14285714, 0.17142857, 0.34285714, 0.37142857, 0.25714286])
>>> savgol_coeffs(4, 2, pos=3, deriv=1, use='dot')
array([0.45, -0.85, -0.65, 1.05])
`x` contains data from the parabola x = t**2, sampled at
t = -1, 0, 1, 2, 3. `c` holds the coefficients that will compute the
derivative at the last position. When dotted with `x` the result should
be 6.
>>> x = np.array([1, 0, 1, 4, 9])
>>> c = savgol_coeffs(5, 2, pos=4, deriv=1, use='dot')
>>> c.dot(x)
6.0
"""
# An alternative method for finding the coefficients when deriv=0 is
# t = np.arange(window_length)
# unit = (t == pos).astype(int)
# coeffs = np.polyval(np.polyfit(t, unit, polyorder), t)
# The method implemented here is faster.
# To recreate the table of sample coefficients shown in the chapter on
# the Savitzy-Golay filter in the Numerical Recipes book, use
# window_length = nL + nR + 1
# pos = nL + 1
# c = savgol_coeffs(window_length, M, pos=pos, use='dot')
if polyorder >= window_length:
raise ValueError("polyorder must be less than window_length.")
halflen, rem = divmod(window_length, 2)
if pos is None:
if rem == 0:
pos = halflen - 0.5
else:
pos = halflen
if not (0 <= pos < window_length):
raise ValueError("pos must be nonnegative and less than "
"window_length.")
if use not in ['conv', 'dot']:
raise ValueError("`use` must be 'conv' or 'dot'")
if deriv > polyorder:
coeffs = np.zeros(window_length)
return coeffs
# Form the design matrix A. The columns of A are powers of the integers
# from -pos to window_length - pos - 1. The powers (i.e., rows) range
# from 0 to polyorder. (That is, A is a vandermonde matrix, but not
# necessarily square.)
x = np.arange(-pos, window_length - pos, dtype=float)
if use == "conv":
# Reverse so that result can be used in a convolution.
x = x[::-1]
order = np.arange(polyorder + 1).reshape(-1, 1)
A = x ** order
# y determines which order derivative is returned.
y = np.zeros(polyorder + 1)
# The coefficient assigned to y[deriv] scales the result to take into
# account the order of the derivative and the sample spacing.
y[deriv] = float_factorial(deriv) / (delta ** deriv)
# Find the least-squares solution of A*c = y
coeffs, _, _, _ = lstsq(A, y)
return coeffs
def _polyder(p, m):
"""Differentiate polynomials represented with coefficients.
p must be a 1-D or 2-D array. In the 2-D case, each column gives
the coefficients of a polynomial; the first row holds the coefficients
associated with the highest power. m must be a nonnegative integer.
(numpy.polyder doesn't handle the 2-D case.)
"""
if m == 0:
result = p
else:
n = len(p)
if n <= m:
result = np.zeros_like(p[:1, ...])
else:
dp = p[:-m].copy()
for k in range(m):
rng = np.arange(n - k - 1, m - k - 1, -1)
dp *= rng.reshape((n - m,) + (1,) * (p.ndim - 1))
result = dp
return result
def _fit_edge(x, window_start, window_stop, interp_start, interp_stop,
axis, polyorder, deriv, delta, y):
"""
Given an N-d array `x` and the specification of a slice of `x` from
`window_start` to `window_stop` along `axis`, create an interpolating
polynomial of each 1-D slice, and evaluate that polynomial in the slice
from `interp_start` to `interp_stop`. Put the result into the
corresponding slice of `y`.
"""
# Get the edge into a (window_length, -1) array.
x_edge = axis_slice(x, start=window_start, stop=window_stop, axis=axis)
if axis == 0 or axis == -x.ndim:
xx_edge = x_edge
swapped = False
else:
xx_edge = x_edge.swapaxes(axis, 0)
swapped = True
xx_edge = xx_edge.reshape(xx_edge.shape[0], -1)
# Fit the edges. poly_coeffs has shape (polyorder + 1, -1),
# where '-1' is the same as in xx_edge.
poly_coeffs = np.polyfit(np.arange(0, window_stop - window_start),
xx_edge, polyorder)
if deriv > 0:
poly_coeffs = _polyder(poly_coeffs, deriv)
# Compute the interpolated values for the edge.
i = np.arange(interp_start - window_start, interp_stop - window_start)
values = np.polyval(poly_coeffs, i.reshape(-1, 1)) / (delta ** deriv)
# Now put the values into the appropriate slice of y.
# First reshape values to match y.
shp = list(y.shape)
shp[0], shp[axis] = shp[axis], shp[0]
values = values.reshape(interp_stop - interp_start, *shp[1:])
if swapped:
values = values.swapaxes(0, axis)
# Get a view of the data to be replaced by values.
y_edge = axis_slice(y, start=interp_start, stop=interp_stop, axis=axis)
y_edge[...] = values
def _fit_edges_polyfit(x, window_length, polyorder, deriv, delta, axis, y):
"""
Use polynomial interpolation of x at the low and high ends of the axis
to fill in the halflen values in y.
This function just calls _fit_edge twice, once for each end of the axis.
"""
halflen = window_length // 2
_fit_edge(x, 0, window_length, 0, halflen, axis,
polyorder, deriv, delta, y)
n = x.shape[axis]
_fit_edge(x, n - window_length, n, n - halflen, n, axis,
polyorder, deriv, delta, y)
def savgol_filter(x, window_length, polyorder, deriv=0, delta=1.0,
axis=-1, mode='interp', cval=0.0):
""" Apply a Savitzky-Golay filter to an array.
This is a 1-D filter. If `x` has dimension greater than 1, `axis`
determines the axis along which the filter is applied.
Parameters
----------
x : array_like
The data to be filtered. If `x` is not a single or double precision
floating point array, it will be converted to type ``numpy.float64``
before filtering.
window_length : int
The length of the filter window (i.e., the number of coefficients).
If `mode` is 'interp', `window_length` must be less than or equal
to the size of `x`.
polyorder : int
The order of the polynomial used to fit the samples.
`polyorder` must be less than `window_length`.
deriv : int, optional
The order of the derivative to compute. This must be a
nonnegative integer. The default is 0, which means to filter
the data without differentiating.
delta : float, optional
The spacing of the samples to which the filter will be applied.
This is only used if deriv > 0. Default is 1.0.
axis : int, optional
The axis of the array `x` along which the filter is to be applied.
Default is -1.
mode : str, optional
Must be 'mirror', 'constant', 'nearest', 'wrap' or 'interp'. This
determines the type of extension to use for the padded signal to
which the filter is applied. When `mode` is 'constant', the padding
value is given by `cval`. See the Notes for more details on 'mirror',
'constant', 'wrap', and 'nearest'.
When the 'interp' mode is selected (the default), no extension
is used. Instead, a degree `polyorder` polynomial is fit to the
last `window_length` values of the edges, and this polynomial is
used to evaluate the last `window_length // 2` output values.
cval : scalar, optional
Value to fill past the edges of the input if `mode` is 'constant'.
Default is 0.0.
Returns
-------
y : ndarray, same shape as `x`
The filtered data.
See Also
--------
savgol_coeffs
Notes
-----
Details on the `mode` options:
'mirror':
Repeats the values at the edges in reverse order. The value
closest to the edge is not included.
'nearest':
The extension contains the nearest input value.
'constant':
The extension contains the value given by the `cval` argument.
'wrap':
The extension contains the values from the other end of the array.
For example, if the input is [1, 2, 3, 4, 5, 6, 7, 8], and
`window_length` is 7, the following shows the extended data for
the various `mode` options (assuming `cval` is 0)::
mode | Ext | Input | Ext
-----------+---------+------------------------+---------
'mirror' | 4 3 2 | 1 2 3 4 5 6 7 8 | 7 6 5
'nearest' | 1 1 1 | 1 2 3 4 5 6 7 8 | 8 8 8
'constant' | 0 0 0 | 1 2 3 4 5 6 7 8 | 0 0 0
'wrap' | 6 7 8 | 1 2 3 4 5 6 7 8 | 1 2 3
.. versionadded:: 0.14.0
Examples
--------
>>> from scipy.signal import savgol_filter
>>> np.set_printoptions(precision=2) # For compact display.
>>> x = np.array([2, 2, 5, 2, 1, 0, 1, 4, 9])
Filter with a window length of 5 and a degree 2 polynomial. Use
the defaults for all other parameters.
>>> savgol_filter(x, 5, 2)
array([1.66, 3.17, 3.54, 2.86, 0.66, 0.17, 1. , 4. , 9. ])
Note that the last five values in x are samples of a parabola, so
when mode='interp' (the default) is used with polyorder=2, the last
three values are unchanged. Compare that to, for example,
`mode='nearest'`:
>>> savgol_filter(x, 5, 2, mode='nearest')
array([1.74, 3.03, 3.54, 2.86, 0.66, 0.17, 1. , 4.6 , 7.97])
"""
if mode not in ["mirror", "constant", "nearest", "interp", "wrap"]:
raise ValueError("mode must be 'mirror', 'constant', 'nearest' "
"'wrap' or 'interp'.")
x = np.asarray(x)
# Ensure that x is either single or double precision floating point.
if x.dtype != np.float64 and x.dtype != np.float32:
x = x.astype(np.float64)
coeffs = savgol_coeffs(window_length, polyorder, deriv=deriv, delta=delta)
if mode == "interp":
if window_length > x.shape[axis]:
raise ValueError("If mode is 'interp', window_length must be less "
"than or equal to the size of x.")
# Do not pad. Instead, for the elements within `window_length // 2`
# of the ends of the sequence, use the polynomial that is fitted to
# the last `window_length` elements.
y = convolve1d(x, coeffs, axis=axis, mode="constant")
_fit_edges_polyfit(x, window_length, polyorder, deriv, delta, axis, y)
else:
# Any mode other than 'interp' is passed on to ndimage.convolve1d.
y = convolve1d(x, coeffs, axis=axis, mode=mode, cval=cval)
return y
| {
"content_hash": "8ccf077e402c8f7cd7eec0d97667d3c1",
"timestamp": "",
"source": "github",
"line_count": 356,
"max_line_length": 79,
"avg_line_length": 37.53932584269663,
"alnum_prop": 0.6017659383418138,
"repo_name": "perimosocordiae/scipy",
"id": "b7beb9e1fe485be7da8d555963fb4d87ddaa7991",
"size": "13364",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "scipy/signal/_savitzky_golay.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4620237"
},
{
"name": "C++",
"bytes": "959068"
},
{
"name": "Cython",
"bytes": "1059810"
},
{
"name": "Dockerfile",
"bytes": "16894"
},
{
"name": "Fortran",
"bytes": "5211680"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "778"
},
{
"name": "Meson",
"bytes": "143727"
},
{
"name": "Python",
"bytes": "15434780"
},
{
"name": "R",
"bytes": "3059"
},
{
"name": "Shell",
"bytes": "18009"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import os
import paramiko
from django.utils import six
from reviewboard.ssh.client import SSHClient
from reviewboard.ssh.errors import (BadHostKeyError, SSHAuthenticationError,
SSHError, SSHInvalidPortError)
from reviewboard.ssh.policy import RaiseUnknownHostKeyPolicy
SSH_PORT = 22
try:
import urlparse
uses_netloc = urlparse.uses_netloc
urllib_parse = urlparse.urlparse
except ImportError:
import urllib.parse
uses_netloc = urllib.parse.uses_netloc
urllib_parse = urllib.parse.urlparse
# A list of known SSH URL schemes.
ssh_uri_schemes = ["ssh", "sftp"]
uses_netloc.extend(ssh_uri_schemes)
def humanize_key(key):
"""Returns a human-readable key as a series of hex characters."""
return ':'.join(["%02x" % ord(c) for c in key.get_fingerprint()])
def is_ssh_uri(url):
"""Returns whether or not a URL represents an SSH connection."""
return urllib_parse(url)[0] in ssh_uri_schemes
def check_host(netloc, username=None, password=None, namespace=None):
"""
Checks if we can connect to a host with a known key.
This will raise an exception if we cannot connect to the host. The
exception will be one of BadHostKeyError, UnknownHostKeyError, or
SCMError.
"""
from django.conf import settings
client = SSHClient(namespace=namespace)
client.set_missing_host_key_policy(RaiseUnknownHostKeyPolicy())
kwargs = {}
if ':' in netloc:
hostname, port = netloc.split(':')
try:
port = int(port)
except ValueError:
raise SSHInvalidPortError(port)
else:
hostname = netloc
port = SSH_PORT
# We normally want to notify on unknown host keys, but not when running
# unit tests.
if getattr(settings, 'RUNNING_TEST', False):
client.set_missing_host_key_policy(paramiko.WarningPolicy())
kwargs['allow_agent'] = False
try:
client.connect(hostname, port, username=username, password=password,
pkey=client.get_user_key(), **kwargs)
except paramiko.BadHostKeyException as e:
raise BadHostKeyError(e.hostname, e.key, e.expected_key)
except paramiko.AuthenticationException as e:
# Some AuthenticationException instances have allowed_types set,
# and some don't.
allowed_types = getattr(e, 'allowed_types', [])
if 'publickey' in allowed_types:
key = client.get_user_key()
else:
key = None
raise SSHAuthenticationError(allowed_types=allowed_types, user_key=key)
except paramiko.SSHException as e:
msg = six.text_type(e)
if msg == 'No authentication methods available':
raise SSHAuthenticationError
else:
raise SSHError(msg)
def register_rbssh(envvar):
"""Registers rbssh in an environment variable.
This is a convenience method for making sure that rbssh is set properly
in the environment for different tools. In some cases, we need to
specifically place it in the system environment using ``os.putenv``,
while in others (Mercurial, Bazaar), we need to place it in ``os.environ``.
"""
envvar = envvar.encode('utf-8')
os.putenv(envvar, b'rbssh')
os.environ[envvar] = b'rbssh'
| {
"content_hash": "68c03c48ba28ca8cf52ae403441e824a",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 79,
"avg_line_length": 30.63302752293578,
"alnum_prop": 0.6678646301287811,
"repo_name": "davidt/reviewboard",
"id": "c53b6b1fcd9563f5bbc4e6eacb3a8663a4087126",
"size": "3339",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "reviewboard/ssh/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "206392"
},
{
"name": "HTML",
"bytes": "182334"
},
{
"name": "JavaScript",
"bytes": "1770499"
},
{
"name": "Python",
"bytes": "3842787"
},
{
"name": "Shell",
"bytes": "20225"
}
],
"symlink_target": ""
} |
"""Test homekit_controller stateless triggers."""
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.homekit_controller.const import DOMAIN
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.setup import async_setup_component
from tests.common import (
assert_lists_same,
async_get_device_automations,
async_mock_service,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
from tests.components.homekit_controller.common import setup_test_component
# pylint: disable=redefined-outer-name
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
def create_remote(accessory):
"""Define characteristics for a button (that is inn a group)."""
service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL)
char = service_label.add_char(CharacteristicsTypes.SERVICE_LABEL_NAMESPACE)
char.value = 1
for i in range(4):
button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH)
button.linked.append(service_label)
char = button.add_char(CharacteristicsTypes.INPUT_EVENT)
char.value = 0
char.perms = ["pw", "pr", "ev"]
char = button.add_char(CharacteristicsTypes.NAME)
char.value = f"Button {i + 1}"
char = button.add_char(CharacteristicsTypes.SERVICE_LABEL_INDEX)
char.value = i
battery = accessory.add_service(ServicesTypes.BATTERY_SERVICE)
battery.add_char(CharacteristicsTypes.BATTERY_LEVEL)
def create_button(accessory):
"""Define a button (that is not in a group)."""
button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH)
char = button.add_char(CharacteristicsTypes.INPUT_EVENT)
char.value = 0
char.perms = ["pw", "pr", "ev"]
char = button.add_char(CharacteristicsTypes.NAME)
char.value = "Button 1"
battery = accessory.add_service(ServicesTypes.BATTERY_SERVICE)
battery.add_char(CharacteristicsTypes.BATTERY_LEVEL)
def create_doorbell(accessory):
"""Define a button (that is not in a group)."""
button = accessory.add_service(ServicesTypes.DOORBELL)
char = button.add_char(CharacteristicsTypes.INPUT_EVENT)
char.value = 0
char.perms = ["pw", "pr", "ev"]
char = button.add_char(CharacteristicsTypes.NAME)
char.value = "Doorbell"
battery = accessory.add_service(ServicesTypes.BATTERY_SERVICE)
battery.add_char(CharacteristicsTypes.BATTERY_LEVEL)
async def test_enumerate_remote(hass, utcnow):
"""Test that remote is correctly enumerated."""
await setup_test_component(hass, create_remote)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get("sensor.testdevice_battery")
device_registry = dr.async_get(hass)
device = device_registry.async_get(entry.device_id)
expected = [
{
"device_id": device.id,
"domain": "sensor",
"entity_id": "sensor.testdevice_battery",
"platform": "device",
"type": "battery_level",
}
]
for button in ("button1", "button2", "button3", "button4"):
for subtype in ("single_press", "double_press", "long_press"):
expected.append(
{
"device_id": device.id,
"domain": "homekit_controller",
"platform": "device",
"type": button,
"subtype": subtype,
}
)
triggers = await async_get_device_automations(hass, "trigger", device.id)
assert_lists_same(triggers, expected)
async def test_enumerate_button(hass, utcnow):
"""Test that a button is correctly enumerated."""
await setup_test_component(hass, create_button)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get("sensor.testdevice_battery")
device_registry = dr.async_get(hass)
device = device_registry.async_get(entry.device_id)
expected = [
{
"device_id": device.id,
"domain": "sensor",
"entity_id": "sensor.testdevice_battery",
"platform": "device",
"type": "battery_level",
}
]
for subtype in ("single_press", "double_press", "long_press"):
expected.append(
{
"device_id": device.id,
"domain": "homekit_controller",
"platform": "device",
"type": "button1",
"subtype": subtype,
}
)
triggers = await async_get_device_automations(hass, "trigger", device.id)
assert_lists_same(triggers, expected)
async def test_enumerate_doorbell(hass, utcnow):
"""Test that a button is correctly enumerated."""
await setup_test_component(hass, create_doorbell)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get("sensor.testdevice_battery")
device_registry = dr.async_get(hass)
device = device_registry.async_get(entry.device_id)
expected = [
{
"device_id": device.id,
"domain": "sensor",
"entity_id": "sensor.testdevice_battery",
"platform": "device",
"type": "battery_level",
}
]
for subtype in ("single_press", "double_press", "long_press"):
expected.append(
{
"device_id": device.id,
"domain": "homekit_controller",
"platform": "device",
"type": "doorbell",
"subtype": subtype,
}
)
triggers = await async_get_device_automations(hass, "trigger", device.id)
assert_lists_same(triggers, expected)
async def test_handle_events(hass, utcnow, calls):
"""Test that events are handled."""
helper = await setup_test_component(hass, create_remote)
entity_registry = er.async_get(hass)
entry = entity_registry.async_get("sensor.testdevice_battery")
device_registry = dr.async_get(hass)
device = device_registry.async_get(entry.device_id)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"alias": "single_press",
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": device.id,
"type": "button1",
"subtype": "single_press",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"{{ trigger.platform}} - "
"{{ trigger.type }} - {{ trigger.subtype }} - "
"{{ trigger.id }}"
)
},
},
},
{
"alias": "long_press",
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": device.id,
"type": "button2",
"subtype": "long_press",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"{{ trigger.platform}} - "
"{{ trigger.type }} - {{ trigger.subtype }} - "
"{{ trigger.id }}"
)
},
},
},
]
},
)
# Make sure first automation (only) fires for single press
helper.pairing.testing.update_named_service(
"Button 1", {CharacteristicsTypes.INPUT_EVENT: 0}
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "device - button1 - single_press - 0"
# Make sure automation doesn't trigger for long press
helper.pairing.testing.update_named_service(
"Button 1", {CharacteristicsTypes.INPUT_EVENT: 1}
)
await hass.async_block_till_done()
assert len(calls) == 1
# Make sure automation doesn't trigger for double press
helper.pairing.testing.update_named_service(
"Button 1", {CharacteristicsTypes.INPUT_EVENT: 2}
)
await hass.async_block_till_done()
assert len(calls) == 1
# Make sure second automation fires for long press
helper.pairing.testing.update_named_service(
"Button 2", {CharacteristicsTypes.INPUT_EVENT: 2}
)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "device - button2 - long_press - 0"
# Turn the automations off
await hass.services.async_call(
"automation",
"turn_off",
{"entity_id": "automation.long_press"},
blocking=True,
)
await hass.services.async_call(
"automation",
"turn_off",
{"entity_id": "automation.single_press"},
blocking=True,
)
# Make sure event no longer fires
helper.pairing.testing.update_named_service(
"Button 2", {CharacteristicsTypes.INPUT_EVENT: 2}
)
await hass.async_block_till_done()
assert len(calls) == 2
| {
"content_hash": "862949e4dd9f0fdb7bea80a3ee377d4c",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 85,
"avg_line_length": 32.32119205298013,
"alnum_prop": 0.5663354164532323,
"repo_name": "kennedyshead/home-assistant",
"id": "7c02c1a6456eb1ca0f3afdd3683201a761909441",
"size": "9761",
"binary": false,
"copies": "8",
"ref": "refs/heads/dev",
"path": "tests/components/homekit_controller/test_device_trigger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "33970989"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
} |
import numpy as np
import pytest
from pyspark.ml import Pipeline
from pyspark.mllib.evaluation import MulticlassMetrics, RegressionMetrics
from pyspark.sql.types import DoubleType
from tensorflow.keras import optimizers
from tensorflow.keras.activations import relu
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Sequential
from elephas.ml.adapter import to_data_frame
from elephas.ml_model import ElephasEstimator, load_ml_estimator, ElephasTransformer, load_ml_transformer
from elephas.utils.model_utils import ModelType, argmax
def test_serialization_transformer(classification_model):
transformer = ElephasTransformer()
transformer.set_keras_model_config(classification_model.to_json())
transformer.save("test.h5")
loaded_model = load_ml_transformer("test.h5")
assert loaded_model.get_model().to_json() == classification_model.to_json()
def test_serialization_estimator(classification_model):
estimator = ElephasEstimator()
estimator.set_keras_model_config(classification_model.to_json())
estimator.set_loss("categorical_crossentropy")
estimator.save("test.h5")
loaded_model = load_ml_estimator("test.h5")
assert loaded_model.get_model().to_json() == classification_model.to_json()
def test_serialization_transformer_and_predict(spark_context, classification_model, mnist_data):
_, _, x_test, y_test = mnist_data
df = to_data_frame(spark_context, x_test, y_test, categorical=True)
transformer = ElephasTransformer(weights=classification_model.get_weights(), model_type=ModelType.CLASSIFICATION)
transformer.set_keras_model_config(classification_model.to_json())
transformer.save("test.h5")
loaded_transformer = load_ml_transformer("test.h5")
loaded_transformer.transform(df)
def test_spark_ml_model_classification(spark_context, classification_model, mnist_data):
batch_size = 64
nb_classes = 10
epochs = 1
x_train, y_train, x_test, y_test = mnist_data
x_train = x_train[:1000]
y_train = y_train[:1000]
df = to_data_frame(spark_context, x_train, y_train, categorical=True)
test_df = to_data_frame(spark_context, x_test, y_test, categorical=True)
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)
# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(classification_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
# Fitting a model returns a Transformer
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
# Evaluate Spark model by evaluating the underlying model
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("label", "prediction")
pnl.show(100)
# since prediction in a multiclass classification problem is a vector, we need to compute argmax
# the casting to a double is just necessary for using MulticlassMetrics
pnl = pnl.select('label', argmax('prediction').astype(DoubleType()).alias('prediction'))
prediction_and_label = pnl.rdd.map(lambda row: (row.label, row.prediction))
metrics = MulticlassMetrics(prediction_and_label)
print(metrics.accuracy)
def test_functional_model(spark_context, classification_model_functional, mnist_data):
batch_size = 64
epochs = 1
x_train, y_train, x_test, y_test = mnist_data
x_train = x_train[:1000]
y_train = y_train[:1000]
df = to_data_frame(spark_context, x_train, y_train, categorical=True)
test_df = to_data_frame(spark_context, x_test, y_test, categorical=True)
sgd = optimizers.SGD()
sgd_conf = optimizers.serialize(sgd)
estimator = ElephasEstimator()
estimator.set_keras_model_config(classification_model_functional.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(10)
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("label", "prediction")
pnl = pnl.select('label', argmax('prediction').astype(DoubleType()).alias('prediction'))
pnl.show(100)
prediction_and_label = pnl.rdd.map(lambda row: (row.label, row.prediction))
metrics = MulticlassMetrics(prediction_and_label)
print(metrics.accuracy)
def test_regression_model(spark_context, regression_model, boston_housing_dataset):
batch_size = 64
epochs = 10
x_train, y_train, x_test, y_test = boston_housing_dataset
df = to_data_frame(spark_context, x_train, y_train)
test_df = to_data_frame(spark_context, x_test, y_test)
sgd = optimizers.SGD(lr=0.00001)
sgd_conf = optimizers.serialize(sgd)
estimator = ElephasEstimator()
estimator.set_keras_model_config(regression_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("mae")
estimator.set_metrics(['mae'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.01)
estimator.set_categorical_labels(False)
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("label", "prediction")
pnl.show(100)
prediction_and_observations = pnl.rdd.map(lambda row: (row.label, row.prediction))
metrics = RegressionMetrics(prediction_and_observations)
print(metrics.r2)
def test_set_cols_deprecated(spark_context, regression_model, boston_housing_dataset):
with pytest.deprecated_call():
batch_size = 64
epochs = 10
x_train, y_train, x_test, y_test = boston_housing_dataset
df = to_data_frame(spark_context, x_train, y_train)
df = df.withColumnRenamed('features', 'scaled_features')
df = df.withColumnRenamed('label', 'ground_truth')
test_df = to_data_frame(spark_context, x_test, y_test)
test_df = test_df.withColumnRenamed('features', 'scaled_features')
test_df = test_df.withColumnRenamed('label', 'ground_truth')
sgd = optimizers.SGD(lr=0.00001)
sgd_conf = optimizers.serialize(sgd)
estimator = ElephasEstimator()
estimator.set_keras_model_config(regression_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.setFeaturesCol('scaled_features')
estimator.setOutputCol('output')
estimator.setLabelCol('ground_truth')
estimator.set_mode("synchronous")
estimator.set_loss("mae")
estimator.set_metrics(['mae'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.01)
estimator.set_categorical_labels(False)
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("ground_truth", "output")
pnl.show(100)
prediction_and_observations = pnl.rdd.map(lambda row: (row['ground_truth'], row['output']))
metrics = RegressionMetrics(prediction_and_observations)
print(metrics.r2)
def test_set_cols(spark_context, regression_model, boston_housing_dataset):
batch_size = 64
epochs = 10
x_train, y_train, x_test, y_test = boston_housing_dataset
df = to_data_frame(spark_context, x_train, y_train)
df = df.withColumnRenamed('features', 'scaled_features')
df = df.withColumnRenamed('label', 'ground_truth')
test_df = to_data_frame(spark_context, x_test, y_test)
test_df = test_df.withColumnRenamed('features', 'scaled_features')
test_df = test_df.withColumnRenamed('label', 'ground_truth')
sgd = optimizers.SGD(lr=0.00001)
sgd_conf = optimizers.serialize(sgd)
estimator = ElephasEstimator(labelCol='ground_truth', outputCol='output', featuresCol='scaled_features')
estimator.set_keras_model_config(regression_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("mae")
estimator.set_metrics(['mae'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.01)
estimator.set_categorical_labels(False)
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("ground_truth", "output")
pnl.show(100)
prediction_and_observations = pnl.rdd.map(lambda row: (row['ground_truth'], row['output']))
metrics = RegressionMetrics(prediction_and_observations)
print(metrics.r2)
def test_custom_objects(spark_context, boston_housing_dataset):
def custom_activation(x):
return 2 * relu(x)
model = Sequential()
model.add(Dense(64, input_shape=(13,)))
model.add(Dense(64, activation=custom_activation))
model.add(Dense(1, activation='linear'))
x_train, y_train, x_test, y_test = boston_housing_dataset
df = to_data_frame(spark_context, x_train, y_train)
test_df = to_data_frame(spark_context, x_test, y_test)
sgd = optimizers.SGD(lr=0.00001)
sgd_conf = optimizers.serialize(sgd)
estimator = ElephasEstimator()
estimator.set_keras_model_config(model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("mae")
estimator.set_metrics(['mae'])
estimator.set_epochs(10)
estimator.set_batch_size(32)
estimator.set_validation_split(0.01)
estimator.set_categorical_labels(False)
estimator.set_custom_objects({'custom_activation': custom_activation})
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
prediction = fitted_pipeline.transform(test_df)
def test_predict_classes_probability(spark_context, classification_model, mnist_data):
batch_size = 64
nb_classes = 10
epochs = 1
x_train, y_train, x_test, y_test = mnist_data
x_train = x_train[:1000]
y_train = y_train[:1000]
df = to_data_frame(spark_context, x_train, y_train, categorical=True)
test_df = to_data_frame(spark_context, x_test, y_test, categorical=True)
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)
# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(classification_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
# Fitting a model returns a Transformer
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
results = fitted_pipeline.transform(test_df)
# we should have an array of 10 elements in the prediction column, since we have 10 classes
# and therefore 10 probabilities
assert len(results.take(1)[0].prediction) == 10
def test_batch_predict_classes_probability(spark_context, classification_model, mnist_data):
batch_size = 64
nb_classes = 10
epochs = 1
x_train, y_train, x_test, y_test = mnist_data
x_train = x_train[:1000]
y_train = y_train[:1000]
df = to_data_frame(spark_context, x_train, y_train, categorical=True)
test_df = to_data_frame(spark_context, x_test, y_test, categorical=True)
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)
# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(classification_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
# Fitting a model returns a Transformer
fitted_pipeline = estimator.fit(df)
results = fitted_pipeline.transform(test_df)
# Set inference batch size and do transform again on the same test_df
inference_batch_size = int(len(y_test) / 10)
fitted_pipeline.set_params(inference_batch_size=inference_batch_size)
fitted_pipeline.set_params(outputCol="prediction_via_batch_inference")
results_with_batch_prediction = fitted_pipeline.transform(results)
# we should have an array of 10 elements in the prediction column, since we have 10 classes
# and therefore 10 probabilities
results_np = results_with_batch_prediction.take(1)[0]
assert len(results_np.prediction) == 10
assert len(results_np.prediction_via_batch_inference) == 10
assert np.array_equal(results_np.prediction, results_np.prediction_via_batch_inference)
def test_save_pipeline(spark_context, classification_model):
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)
# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(classification_model.to_json())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(10)
estimator.set_batch_size(10)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(10)
# Fitting a model returns a Transformer
pipeline = Pipeline(stages=[estimator])
pipeline.save('tmp')
| {
"content_hash": "17f79f84784b17883b2587c90f8c0109",
"timestamp": "",
"source": "github",
"line_count": 363,
"max_line_length": 117,
"avg_line_length": 40.07713498622589,
"alnum_prop": 0.7105444047291724,
"repo_name": "maxpumperla/elephas",
"id": "d7a8cb32905e5c1d6835fc67329c13b23fb2dedb",
"size": "14548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_ml_model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1781"
},
{
"name": "Python",
"bytes": "102076"
},
{
"name": "Shell",
"bytes": "232"
}
],
"symlink_target": ""
} |
from time import sleep
#--CORBA STUBS-----------------------------------------------------------------
import contLogTest__POA
import contLogTest
#--ACS Imports-----------------------------------------------------------------
from Acspy.Servants.ContainerServices import ContainerServices
from Acspy.Servants.ComponentLifecycle import ComponentLifecycle
from Acspy.Servants.ACSComponent import ACSComponent
from Acspy.Common.Log import getLevelName
#--GLOBALS---------------------------------------------------------------------
#------------------------------------------------------------------------------
class TestLogLevelsComp(contLogTest__POA.TestLogLevelsComp, #CORBA stubs for IDL interface
ACSComponent, #Base IDL interface
ContainerServices, #Developer niceties
ComponentLifecycle): #HLA stuff
'''
Simple component implementation provided as a reference for developers.
'''
def __init__(self):
'''
Just call superclass constructors here.
'''
ACSComponent.__init__(self)
ContainerServices.__init__(self)
return
#------------------------------------------------------------------------------
#--Override ComponentLifecycle methods-----------------------------------------
#------------------------------------------------------------------------------
def initialize(self):
'''
Override this method inherited from ComponentLifecycle
'''
self.getLogger().logTrace("TestLogLevelsComp.TestLogLevelsComp")
#------------------------------------------------------------------------------
def cleanUp(self):
'''
Override this method inherited from ComponentLifecycle
'''
self.getLogger().logTrace("destroying " + self.name + " ...")
#------------------------------------------------------------------------------
#--Implementation of IDL methods-----------------------------------------------
#------------------------------------------------------------------------------
def getLevels(self):
'''
Python implementation of IDL method.
LongSeq getLevels();
'''
mylogger = self.getLogger()
mylogger.logTrace("called ...")
levels = mylogger.getLevels()
return [2, 2, min(levels.minLogLevel, levels.minLogLevelLocal), levels.minLogLevel, levels.minLogLevelLocal]
#------------------------------------------------------------------------------
def logDummyMessages(self, levels):
'''
Python implementation of IDL method.
void logDummyMessages(in LongSeq levels);
'''
mylogger = self.getLogger()
# Next is to avoid that Python sends logs in packets of 10 logs.
mylogger.acshandler.capacity = 0
# give client time to get ready for logs
sleep(0.250)
for l in levels:
try:
mylogger.logAtLevel(l, "dummy log message for core level %d/%s" % (l, getLevelName(l)))
except:
pass
mylogger.logAtLevel(levels[-1], "===last log message===")
# Python seems to sends logs in packets of 10 logs, so add 9 messages to
# ensure all the above logs get sent across right now.
##for i in range(1,10):
## mylogger.logAtLevel(levels[-2], "===packet fill-up message===")
#------------------------------------------------------------------------------
#--Main defined only for generic testing---------------------------------------
#------------------------------------------------------------------------------
if __name__ == "__main__":
print "Creating an object"
g = TestLogLevelsComp()
print "Done..."
#
# ___oOo___
| {
"content_hash": "55b5ccde3d3cc06ddf346b6b9484c18e",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 116,
"avg_line_length": 42.62222222222222,
"alnum_prop": 0.44629822732012514,
"repo_name": "csrg-utfsm/acscb",
"id": "d18be9587167be785dd4c8723632140a854c392b",
"size": "5045",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "LGPL/CommonSoftware/containerTests/contLogTest/src/pyContLogTest/TestLogLevelsComp.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "633"
},
{
"name": "Batchfile",
"bytes": "2346"
},
{
"name": "C",
"bytes": "751150"
},
{
"name": "C++",
"bytes": "7892598"
},
{
"name": "CSS",
"bytes": "21364"
},
{
"name": "Elixir",
"bytes": "906"
},
{
"name": "Emacs Lisp",
"bytes": "1990066"
},
{
"name": "FreeMarker",
"bytes": "7369"
},
{
"name": "GAP",
"bytes": "14867"
},
{
"name": "Gnuplot",
"bytes": "437"
},
{
"name": "HTML",
"bytes": "1857062"
},
{
"name": "Haskell",
"bytes": "764"
},
{
"name": "Java",
"bytes": "13573740"
},
{
"name": "JavaScript",
"bytes": "19058"
},
{
"name": "Lex",
"bytes": "5101"
},
{
"name": "Makefile",
"bytes": "1624406"
},
{
"name": "Module Management System",
"bytes": "4925"
},
{
"name": "Objective-C",
"bytes": "3223"
},
{
"name": "PLSQL",
"bytes": "9496"
},
{
"name": "Perl",
"bytes": "120411"
},
{
"name": "Python",
"bytes": "4191000"
},
{
"name": "Roff",
"bytes": "9920"
},
{
"name": "Shell",
"bytes": "1198375"
},
{
"name": "Smarty",
"bytes": "21615"
},
{
"name": "Tcl",
"bytes": "227078"
},
{
"name": "XSLT",
"bytes": "100454"
},
{
"name": "Yacc",
"bytes": "5006"
}
],
"symlink_target": ""
} |
import operator
import re
import numpy as np
import pytest
import pandas._testing as tm
from pandas.core.api import (
DataFrame,
Index,
Series,
)
from pandas.core.computation import expressions as expr
_frame = DataFrame(np.random.randn(10001, 4), columns=list("ABCD"), dtype="float64")
_frame2 = DataFrame(np.random.randn(100, 4), columns=list("ABCD"), dtype="float64")
_mixed = DataFrame(
{
"A": _frame["A"].copy(),
"B": _frame["B"].astype("float32"),
"C": _frame["C"].astype("int64"),
"D": _frame["D"].astype("int32"),
}
)
_mixed2 = DataFrame(
{
"A": _frame2["A"].copy(),
"B": _frame2["B"].astype("float32"),
"C": _frame2["C"].astype("int64"),
"D": _frame2["D"].astype("int32"),
}
)
_integer = DataFrame(
np.random.randint(1, 100, size=(10001, 4)), columns=list("ABCD"), dtype="int64"
)
_integer2 = DataFrame(
np.random.randint(1, 100, size=(101, 4)), columns=list("ABCD"), dtype="int64"
)
_array = _frame["A"].values.copy()
_array2 = _frame2["A"].values.copy()
_array_mixed = _mixed["D"].values.copy()
_array_mixed2 = _mixed2["D"].values.copy()
@pytest.mark.skipif(not expr.USE_NUMEXPR, reason="not using numexpr")
class TestExpressions:
def setup_method(self, method):
self.frame = _frame.copy()
self.frame2 = _frame2.copy()
self.mixed = _mixed.copy()
self.mixed2 = _mixed2.copy()
self._MIN_ELEMENTS = expr._MIN_ELEMENTS
def teardown_method(self, method):
expr._MIN_ELEMENTS = self._MIN_ELEMENTS
@staticmethod
def call_op(df, other, flex: bool, opname: str):
if flex:
op = lambda x, y: getattr(x, opname)(y)
op.__name__ = opname
else:
op = getattr(operator, opname)
expr.set_use_numexpr(False)
expected = op(df, other)
expr.set_use_numexpr(True)
expr.get_test_result()
result = op(df, other)
return result, expected
def run_arithmetic(self, df, other, flex: bool):
expr._MIN_ELEMENTS = 0
operations = ["add", "sub", "mul", "mod", "truediv", "floordiv"]
for arith in operations:
result, expected = self.call_op(df, other, flex, arith)
if arith == "truediv":
if expected.ndim == 1:
assert expected.dtype.kind == "f"
else:
assert all(x.kind == "f" for x in expected.dtypes.values)
tm.assert_equal(expected, result)
def run_binary(self, df, other, flex: bool):
"""
tests solely that the result is the same whether or not numexpr is
enabled. Need to test whether the function does the correct thing
elsewhere.
"""
expr._MIN_ELEMENTS = 0
expr.set_test_mode(True)
operations = ["gt", "lt", "ge", "le", "eq", "ne"]
for arith in operations:
result, expected = self.call_op(df, other, flex, arith)
used_numexpr = expr.get_test_result()
assert used_numexpr, "Did not use numexpr as expected."
tm.assert_equal(expected, result)
def run_frame(self, df, other, flex: bool):
self.run_arithmetic(df, other, flex)
expr.set_use_numexpr(False)
binary_comp = other + 1
expr.set_use_numexpr(True)
self.run_binary(df, binary_comp, flex)
for i in range(len(df.columns)):
self.run_arithmetic(df.iloc[:, i], other.iloc[:, i], flex)
# FIXME: dont leave commented-out
# series doesn't uses vec_compare instead of numexpr...
# binary_comp = other.iloc[:, i] + 1
# self.run_binary(df.iloc[:, i], binary_comp, flex)
@pytest.mark.parametrize(
"df",
[
_integer,
_integer2,
# randint to get a case with zeros
_integer * np.random.randint(0, 2, size=np.shape(_integer)),
_frame,
_frame2,
_mixed,
_mixed2,
],
)
@pytest.mark.parametrize("flex", [True, False])
def test_arithmetic(self, df, flex):
self.run_frame(df, df, flex)
def test_invalid(self):
array = np.random.randn(1_000_001)
array2 = np.random.randn(100)
# no op
result = expr._can_use_numexpr(operator.add, None, array, array, "evaluate")
assert not result
# min elements
result = expr._can_use_numexpr(operator.add, "+", array2, array2, "evaluate")
assert not result
# ok, we only check on first part of expression
result = expr._can_use_numexpr(operator.add, "+", array, array2, "evaluate")
assert result
@pytest.mark.parametrize(
"opname,op_str",
[("add", "+"), ("sub", "-"), ("mul", "*"), ("truediv", "/"), ("pow", "**")],
)
@pytest.mark.parametrize(
"left,right", [(_array, _array2), (_array_mixed, _array_mixed2)]
)
def test_binary_ops(self, opname, op_str, left, right):
def testit():
if opname == "pow":
# TODO: get this working
return
op = getattr(operator, opname)
result = expr.evaluate(op, left, left, use_numexpr=True)
expected = expr.evaluate(op, left, left, use_numexpr=False)
tm.assert_numpy_array_equal(result, expected)
result = expr._can_use_numexpr(op, op_str, right, right, "evaluate")
assert not result
expr.set_use_numexpr(False)
testit()
expr.set_use_numexpr(True)
expr.set_numexpr_threads(1)
testit()
expr.set_numexpr_threads()
testit()
@pytest.mark.parametrize(
"opname,op_str",
[
("gt", ">"),
("lt", "<"),
("ge", ">="),
("le", "<="),
("eq", "=="),
("ne", "!="),
],
)
@pytest.mark.parametrize(
"left,right", [(_array, _array2), (_array_mixed, _array_mixed2)]
)
def test_comparison_ops(self, opname, op_str, left, right):
def testit():
f12 = left + 1
f22 = right + 1
op = getattr(operator, opname)
result = expr.evaluate(op, left, f12, use_numexpr=True)
expected = expr.evaluate(op, left, f12, use_numexpr=False)
tm.assert_numpy_array_equal(result, expected)
result = expr._can_use_numexpr(op, op_str, right, f22, "evaluate")
assert not result
expr.set_use_numexpr(False)
testit()
expr.set_use_numexpr(True)
expr.set_numexpr_threads(1)
testit()
expr.set_numexpr_threads()
testit()
@pytest.mark.parametrize("cond", [True, False])
@pytest.mark.parametrize("df", [_frame, _frame2, _mixed, _mixed2])
def test_where(self, cond, df):
def testit():
c = np.empty(df.shape, dtype=np.bool_)
c.fill(cond)
result = expr.where(c, df.values, df.values + 1)
expected = np.where(c, df.values, df.values + 1)
tm.assert_numpy_array_equal(result, expected)
expr.set_use_numexpr(False)
testit()
expr.set_use_numexpr(True)
expr.set_numexpr_threads(1)
testit()
expr.set_numexpr_threads()
testit()
@pytest.mark.parametrize(
"op_str,opname", [("/", "truediv"), ("//", "floordiv"), ("**", "pow")]
)
def test_bool_ops_raise_on_arithmetic(self, op_str, opname):
df = DataFrame({"a": np.random.rand(10) > 0.5, "b": np.random.rand(10) > 0.5})
msg = f"operator '{opname}' not implemented for bool dtypes"
f = getattr(operator, opname)
err_msg = re.escape(msg)
with pytest.raises(NotImplementedError, match=err_msg):
f(df, df)
with pytest.raises(NotImplementedError, match=err_msg):
f(df.a, df.b)
with pytest.raises(NotImplementedError, match=err_msg):
f(df.a, True)
with pytest.raises(NotImplementedError, match=err_msg):
f(False, df.a)
with pytest.raises(NotImplementedError, match=err_msg):
f(False, df)
with pytest.raises(NotImplementedError, match=err_msg):
f(df, True)
@pytest.mark.parametrize(
"op_str,opname", [("+", "add"), ("*", "mul"), ("-", "sub")]
)
def test_bool_ops_warn_on_arithmetic(self, op_str, opname):
n = 10
df = DataFrame({"a": np.random.rand(n) > 0.5, "b": np.random.rand(n) > 0.5})
subs = {"+": "|", "*": "&", "-": "^"}
sub_funcs = {"|": "or_", "&": "and_", "^": "xor"}
f = getattr(operator, opname)
fe = getattr(operator, sub_funcs[subs[op_str]])
if op_str == "-":
# raises TypeError
return
with tm.use_numexpr(True, min_elements=5):
with tm.assert_produces_warning(check_stacklevel=False):
r = f(df, df)
e = fe(df, df)
tm.assert_frame_equal(r, e)
with tm.assert_produces_warning(check_stacklevel=False):
r = f(df.a, df.b)
e = fe(df.a, df.b)
tm.assert_series_equal(r, e)
with tm.assert_produces_warning(check_stacklevel=False):
r = f(df.a, True)
e = fe(df.a, True)
tm.assert_series_equal(r, e)
with tm.assert_produces_warning(check_stacklevel=False):
r = f(False, df.a)
e = fe(False, df.a)
tm.assert_series_equal(r, e)
with tm.assert_produces_warning(check_stacklevel=False):
r = f(False, df)
e = fe(False, df)
tm.assert_frame_equal(r, e)
with tm.assert_produces_warning(check_stacklevel=False):
r = f(df, True)
e = fe(df, True)
tm.assert_frame_equal(r, e)
@pytest.mark.parametrize(
"test_input,expected",
[
(
DataFrame(
[[0, 1, 2, "aa"], [0, 1, 2, "aa"]], columns=["a", "b", "c", "dtype"]
),
DataFrame([[False, False], [False, False]], columns=["a", "dtype"]),
),
(
DataFrame(
[[0, 3, 2, "aa"], [0, 4, 2, "aa"], [0, 1, 1, "bb"]],
columns=["a", "b", "c", "dtype"],
),
DataFrame(
[[False, False], [False, False], [False, False]],
columns=["a", "dtype"],
),
),
],
)
def test_bool_ops_column_name_dtype(self, test_input, expected):
# GH 22383 - .ne fails if columns containing column name 'dtype'
result = test_input.loc[:, ["a", "dtype"]].ne(test_input.loc[:, ["a", "dtype"]])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"arith", ("add", "sub", "mul", "mod", "truediv", "floordiv")
)
@pytest.mark.parametrize("axis", (0, 1))
def test_frame_series_axis(self, axis, arith):
# GH#26736 Dataframe.floordiv(Series, axis=1) fails
df = self.frame
if axis == 1:
other = self.frame.iloc[0, :]
else:
other = self.frame.iloc[:, 0]
expr._MIN_ELEMENTS = 0
op_func = getattr(df, arith)
expr.set_use_numexpr(False)
expected = op_func(other, axis=axis)
expr.set_use_numexpr(True)
result = op_func(other, axis=axis)
tm.assert_frame_equal(expected, result)
@pytest.mark.parametrize(
"op",
[
"__mod__",
pytest.param("__rmod__", marks=pytest.mark.xfail(reason="GH-36552")),
"__floordiv__",
"__rfloordiv__",
],
)
@pytest.mark.parametrize("box", [DataFrame, Series, Index])
@pytest.mark.parametrize("scalar", [-5, 5])
def test_python_semantics_with_numexpr_installed(self, op, box, scalar):
# https://github.com/pandas-dev/pandas/issues/36047
expr._MIN_ELEMENTS = 0
data = np.arange(-50, 50)
obj = box(data)
method = getattr(obj, op)
result = method(scalar)
# compare result with numpy
expr.set_use_numexpr(False)
expected = method(scalar)
expr.set_use_numexpr(True)
tm.assert_equal(result, expected)
# compare result element-wise with Python
for i, elem in enumerate(data):
if box == DataFrame:
scalar_result = result.iloc[i, 0]
else:
scalar_result = result[i]
try:
expected = getattr(int(elem), op)(scalar)
except ZeroDivisionError:
pass
else:
assert scalar_result == expected
| {
"content_hash": "db851c65f7a5aefabaa9e8b18311ba7e",
"timestamp": "",
"source": "github",
"line_count": 401,
"max_line_length": 88,
"avg_line_length": 32.27431421446384,
"alnum_prop": 0.5222531293463143,
"repo_name": "gfyoung/pandas",
"id": "6ac85f9d36fdc252620e273db8a2a7fd0aee584e",
"size": "12942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/tests/test_expressions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4912"
},
{
"name": "C",
"bytes": "404689"
},
{
"name": "C++",
"bytes": "17194"
},
{
"name": "HTML",
"bytes": "551714"
},
{
"name": "Makefile",
"bytes": "574"
},
{
"name": "Python",
"bytes": "14336547"
},
{
"name": "Shell",
"bytes": "29174"
},
{
"name": "Smarty",
"bytes": "2069"
}
],
"symlink_target": ""
} |
import logging
import subprocess
import time
import unittest
import os
from skydive.graph import Node, Edge
from skydive.rest.client import RESTClient
from skydive.websocket.client import WSClient
from skydive.websocket.client import WSClientDebugProtocol
from skydive.websocket.client import WSMessage
from skydive.websocket.client import NodeAddedMsgType, EdgeAddedMsgType
class WSTestClient(WSClientDebugProtocol):
def onOpen(self):
self.factory.kwargs["test"](self)
self.stop_when_complete()
class SkydiveWSTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
logging.basicConfig(level=logging.DEBUG)
cls.schemeWS = "ws"
cls.schemeHTTP = "http"
if "SKYDIVE_PYTHON_TESTS_TLS" in os.environ:
cls.schemeWS = "wss"
cls.schemeHTTP = "https"
cls.username = ""
cls.password = ""
cls.auth = False
cls.insecure = True
cls.cafile = ""
cls.certfile = ""
cls.keyfile = ""
if "SKYDIVE_PYTHON_TESTS_USERPASS" in os.environ:
cls.auth = True
userpass = os.environ["SKYDIVE_PYTHON_TESTS_USERPASS"]
cls.username, cls.password = userpass.split(":")
if "SKYDIVE_PYTHON_TESTS_CERTIFICATES" in os.environ:
cls.insecure = False
certificates = os.environ["SKYDIVE_PYTHON_TESTS_CERTIFICATES"]
cls.cafile, cls.certfile, cls.keyfile = certificates.split(":")
def new_rest_client(self):
return RESTClient("localhost:8082",
scheme=self.schemeHTTP,
username=self.username,
password=self.password,
insecure=self.insecure,
cafile=self.cafile,
certfile=self.certfile,
keyfile=self.keyfile)
def new_ws_client(self, id, endpoint, test):
return WSClient(id,
self.schemeWS +
"://localhost:8082/ws/"+endpoint,
protocol=WSTestClient, test=test,
username=self.username,
password=self.password,
insecure=self.insecure,
cafile=self.cafile,
certfile=self.certfile,
keyfile=self.keyfile)
def test_connection(self):
self.connected = False
def is_connected(protocol):
self.connected = True
self.wsclient = self.new_ws_client(id="host-test",
endpoint="publisher",
test=is_connected)
self.wsclient.connect()
if self.auth:
ret = self.wsclient.login("localhost:8082", "toto")
self.assertEqual(ret, False, "login() should failed")
ret = self.wsclient.login("localhost:8082", "admin", "pass")
self.assertEqual(ret, True, "login() failed")
ret = self.wsclient.login()
self.assertEqual(ret, True, "login() failed")
self.wsclient.start()
self.assertEqual(self.connected, True, "failed to connect")
def test_injection(self):
def create_node(protocol):
node = Node("TOR_TEST", "",
metadata={"Name": "Test TOR", "Type": "fabric"})
msg = WSMessage("Graph", NodeAddedMsgType, node)
protocol.sendWSMessage(msg)
node = Node("PORT_TEST", "",
metadata={"Name": "Test port", "Type": "fabric"})
msg = WSMessage("Graph", NodeAddedMsgType, node)
protocol.sendWSMessage(msg)
node = Node("BAD_NODE", "",
metadata={"Name": "Bad node"})
msg = WSMessage("Graph", NodeAddedMsgType, node)
protocol.sendWSMessage(msg)
node = Node("BAD_NETNS", "",
metadata={"Name": "Bad netns", "Type": "netns"})
msg = WSMessage("Graph", NodeAddedMsgType, node)
protocol.sendWSMessage(msg)
edge = Edge("TOR_L2LINK", "",
"TOR_TEST", "PORT_TEST",
metadata={"RelationType": "layer2"})
msg = WSMessage("Graph", EdgeAddedMsgType, edge)
protocol.sendWSMessage(msg)
edge = Edge("BAD_LINK", "",
"", "",
metadata={"RelationType": "layer2"})
msg = WSMessage("Graph", EdgeAddedMsgType, edge)
protocol.sendWSMessage(msg)
self.wsclient = self.new_ws_client(id="host-test2",
endpoint="publisher",
test=create_node)
self.wsclient.connect()
self.wsclient.start()
time.sleep(1)
restclient = self.new_rest_client()
nodes = restclient.lookup_nodes("G.V().Has('Name', 'Test port')")
self.assertEqual(len(nodes), 1, "should find one an only one node")
tor_id = nodes[0].id
self.assertEqual(tor_id, nodes[0].id, "wrong id for node")
nodes = restclient.lookup_nodes("G.V().Has('Name', 'Bad netns')")
self.assertEqual(len(nodes), 0, "should find no 'Bad netns' node")
nodes = restclient.lookup_nodes("G.V().Has('Name', 'Bad node')")
self.assertEqual(len(nodes), 0, "should find no 'Bad node' node")
edges = restclient.lookup_edges(
"G.E().Has('RelationType', 'layer2')")
self.assertEqual(len(edges), 1, "should find one an only one edge")
def test_capture(self):
restclient = self.new_rest_client()
capture1 = restclient.capture_create(
"G.V().Has('Name', 'test', 'Type', 'netns')")
captures = restclient.capture_list()
self.assertGreaterEqual(len(captures), 1, "no capture found")
for capture in captures:
if (capture.uuid == capture1.uuid):
found = True
break
self.assertTrue(found, "created capture not found")
restclient.capture_delete(capture1.uuid)
def test_alert(self):
restclient = self.new_rest_client()
alert1 = restclient.alert_create(
"https://localhost:8081",
"G.V().Has('Name', 'alert-ns-webhook', 'Type', 'netns')")
alerts = restclient.alert_list()
self.assertGreaterEqual(len(alerts), 1, "no alerts found")
for alert in alerts:
if (alert.uuid == alert1.uuid):
found = True
break
self.assertTrue(found, "created alert not found")
restclient.alert_delete(alert1.uuid)
def test_topology_rules(self):
restclient = self.new_rest_client()
noderule1 = restclient.noderule_create(
"create", metadata={"Name": "node1", "Type": "fabric"})
noderule2 = restclient.noderule_create(
"create", metadata={"Name": "node2", "Type": "fabric"})
time.sleep(1)
edgerule1 = restclient.edgerule_create(
"G.V().Has('Name', 'node1')", "G.V().Has('Name', 'node2')",
{"RelationType": "layer2", "EdgeName": "my_edge"})
time.sleep(1)
node1 = restclient.lookup_nodes("G.V().Has('Name', 'node1')")
self.assertEqual(len(node1), 1, "should find only one node as node1")
node2 = restclient.lookup_nodes("G.V().Has('Name', 'node2')")
self.assertEqual(len(node2), 1, "should find only one node as node2")
edge = restclient.lookup_edges(
"G.E().Has('RelationType', 'layer2', 'EdgeName', 'my_edge')")
self.assertEqual(len(edge), 1, "should find only one edge")
noderules = restclient.noderule_list()
self.assertGreaterEqual(len(noderules), 2, "no noderules found")
found = False
for noderule in noderules:
if (noderule.uuid == noderule1.uuid):
found = True
break
self.assertTrue(found, "created noderule not found")
edgerules = restclient.edgerule_list()
self.assertGreaterEqual(len(edgerules), 1, "no edgerules found")
found = False
for edgerule in edgerules:
if (edgerule.uuid == edgerule1.uuid):
found = True
break
self.assertTrue(found, "created edgerule not found")
restclient.edgerule_delete(edgerule1.uuid)
restclient.noderule_delete(noderule1.uuid)
restclient.noderule_delete(noderule2.uuid)
def test_injections(self):
restclient = self.new_rest_client()
nodes = restclient.lookup("G.V().Has('Name', 'eth0')")
testnode = nodes[0]["Metadata"]["TID"]
query = "G.V().Has('TID', '" + testnode + "')"
num_injections_before = len(restclient.injection_list())
injection_response = restclient.injection_create(query, query,
count=1000)
num_injections_after = len(restclient.injection_list())
self.assertEqual(num_injections_after, num_injections_before + 1,
"injection creation didn't succeed")
restclient.injection_delete(injection_response.uuid)
num_injections_after_deletion = len(restclient.injection_list())
self.assertEqual(num_injections_after_deletion, num_injections_before,
"injection deletion didn't succeed")
| {
"content_hash": "5fd1b596921117f6ec374cd34a5b5d09",
"timestamp": "",
"source": "github",
"line_count": 260,
"max_line_length": 78,
"avg_line_length": 36.83461538461538,
"alnum_prop": 0.5569593818523546,
"repo_name": "skydive-project/skydive",
"id": "776e387730ead73a3f05b08af31192001f1d0e5c",
"size": "9577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/python/api/tests/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "41318"
},
{
"name": "CSS",
"bytes": "61325"
},
{
"name": "Dockerfile",
"bytes": "1205"
},
{
"name": "Go",
"bytes": "2511767"
},
{
"name": "HTML",
"bytes": "7140"
},
{
"name": "JavaScript",
"bytes": "997192"
},
{
"name": "Jinja",
"bytes": "650"
},
{
"name": "Makefile",
"bytes": "29947"
},
{
"name": "Mustache",
"bytes": "5940"
},
{
"name": "Nix",
"bytes": "818"
},
{
"name": "Python",
"bytes": "119288"
},
{
"name": "Roff",
"bytes": "6623"
},
{
"name": "Shell",
"bytes": "103957"
},
{
"name": "TypeScript",
"bytes": "28054"
}
],
"symlink_target": ""
} |
import os.path
from setuptools import setup, find_packages
from sys import version_info, platform
if version_info[:2] > (2, 5):
install_requires = []
else:
install_requires = ['simplejson >= 2.0.0']
# Python 2.6 and below requires argparse
if version_info[:2] < (2, 7):
install_requires += ['argparse']
# Mac does not come default with readline, this is needed for autocomplete
# in the cmps shell
if platform == 'darwin':
install_requires += ['readline']
# Optional PySocks support
extras_require = dict(Socks=['PySocks >= 1.5.0'])
setup_dir = os.path.split(__file__)[0]
if setup_dir == '':
setup_dir = '.';
base_dir = os.path.relpath(os.path.normpath(setup_dir), os.getcwd())
src_dir = os.path.normpath(os.path.join(base_dir, 'src'))
setup(
name = 'cm_api',
version = '16.0.0', # Compatible with API v16 (CM 5.11)
packages = find_packages(src_dir, exclude=['cm_api_tests']),
package_dir = {'': src_dir },
zip_safe = True,
# Project uses simplejson, so ensure that it gets installed or upgraded
# on the target machine
install_requires = install_requires,
extras_require = extras_require,
author = 'Cloudera, Inc.',
author_email = 'scm-users@cloudera.org',
description = 'Cloudera Manager API client',
long_desc = 'cm_api is a Python client to the Cloudera Manager REST API',
license = 'Apache License 2.0',
url = 'http://cloudera.github.com/cm_api/',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
entry_points = { 'console_scripts': [ 'cmps = cm_shell.cmps:main', ]}
)
| {
"content_hash": "6639dc828e3e8cd1b236d3b6b3ed53f7",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 75,
"avg_line_length": 31.836363636363636,
"alnum_prop": 0.6567675613934895,
"repo_name": "cloudera/cm_api",
"id": "df8447af9884f8f2e493b5c172a7aea71696e09a",
"size": "2543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19844"
},
{
"name": "Java",
"bytes": "1147385"
},
{
"name": "Makefile",
"bytes": "1429"
},
{
"name": "Python",
"bytes": "511302"
},
{
"name": "Shell",
"bytes": "8032"
}
],
"symlink_target": ""
} |
import logging
from concurrent import futures as cfutures
from ..errors import TimeoutError
from .scoring_system import ScoringSystem
logger = logging.getLogger(__name__)
class ProcessPool(ScoringSystem):
def __init__(self, *args, workers=None, **kwargs):
super().__init__(*args, **kwargs)
self.workers = int(workers) if workers is not None else None
def _process_missing_scores(self, request, missing_model_set_revs,
root_caches, inprogress_results=None):
rev_scores = {}
errors = {}
futures = {}
with cfutures.ProcessPoolExecutor(max_workers=self.workers) as executor:
for missing_models, rev_ids in missing_model_set_revs.items():
for rev_id in rev_ids:
if rev_id not in root_caches:
continue
root_cache = root_caches[rev_id]
logger.debug("Submitting _process_score_map for {0}"
.format(request.format(rev_id, missing_models)))
future = executor.submit(
self._process_score_map, request, rev_id, missing_models,
root_cache)
futures[rev_id] = future
for rev_id, future in futures.items():
try:
rev_scores[rev_id] = future.result(timeout=self.timeout)
except cfutures.TimeoutError:
errors[rev_id] = TimeoutError(
"Timed out after {0} seconds.".format(self.timeout))
except Exception as error:
errors[rev_id] = error
return rev_scores, errors
@classmethod
def from_config(cls, config, name, section_key="scoring_systems"):
logger.info("Loading ProcessPool '{0}' from config.".format(name))
section = config[section_key][name]
kwargs = cls._kwargs_from_config(
config, name, section_key=section_key)
workers = section.get('workers')
return cls(workers=workers, **kwargs)
| {
"content_hash": "6630ae1b52de554a96edcc974ecdffbd",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 81,
"avg_line_length": 38.45454545454545,
"alnum_prop": 0.5612293144208038,
"repo_name": "wiki-ai/ores",
"id": "ebb7bac3536be586f2a4722d1828ef5990de075c",
"size": "2115",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ores/scoring_systems/process_pool.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "433"
},
{
"name": "Dockerfile",
"bytes": "481"
},
{
"name": "HTML",
"bytes": "9290"
},
{
"name": "JavaScript",
"bytes": "5003"
},
{
"name": "Jupyter Notebook",
"bytes": "44108"
},
{
"name": "Makefile",
"bytes": "276"
},
{
"name": "Python",
"bytes": "157474"
}
],
"symlink_target": ""
} |
import os
from typing import List, Type
from maya.api.OpenMaya import MFnPlugin, MSyntax
from pulse.plugins import PulseCmdBase, CmdArg, CmdFlag
from pulse.core import serialize_attr_value, deserialize_attr_value
# the list of all cmd classes in this plugin
_CMD_CLASSES: List[Type[PulseCmdBase]] = []
def maya_useNewAPI():
"""
The presence of this function tells Maya that the plugin produces, and
expects to be passed, objects created using the Maya Python API 2.0.
"""
pass
def initializePlugin(plugin):
plugin_fn = MFnPlugin(plugin)
cmd_cls: Type[PulseCmdBase]
for cmd_cls in _CMD_CLASSES:
plugin_fn.registerCommand(cmd_cls.get_name(), cmd_cls.create_cmd, cmd_cls.create_syntax)
def uninitializePlugin(plugin):
plugin_fn = MFnPlugin(plugin)
cmd_cls: Type[PulseCmdBase]
for cmd_cls in _CMD_CLASSES:
plugin_fn.deregisterCommand(cmd_cls.get_name())
class PulseCreateStepCmd(PulseCmdBase):
"""
Command to create a Pulse BuildStep.
"""
step_path_arg = CmdArg(MSyntax.kString)
child_index_arg = CmdArg(MSyntax.kLong)
str_data_arg = CmdArg(MSyntax.kString)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.step_path_arg, cls.child_index_arg, cls.str_data_arg]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.step_path: str = self.get_arg(self.step_path_arg, parser)
self.child_index: int = self.get_arg(self.child_index_arg, parser)
self.str_data: str = self.get_arg(self.str_data_arg, parser)
def doIt(self, args):
self.parse_arguments(args)
return self.redoIt()
def redoIt(self):
if self.blueprint_model:
step_data = deserialize_attr_value(self.str_data)
new_step = self.blueprint_model.create_step(self.step_path, self.child_index, step_data)
if not new_step:
raise RuntimeError("Failed to create BuildStep")
self.new_step_path = new_step.get_full_path()
self.clearResult()
self.setResult(self.new_step_path)
def undoIt(self):
if self.blueprint_model and self.new_step_path:
self.blueprint_model.delete_step(self.new_step_path)
_CMD_CLASSES.append(PulseCreateStepCmd)
class PulseDeleteStepCmd(PulseCmdBase):
"""
Command to delete a Pulse BuildStep.
"""
step_path_arg = CmdArg(MSyntax.kString)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.step_path_arg]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.step_path = self.get_arg(self.step_path_arg, parser)
def doIt(self, args):
self.parse_arguments(args)
self.redoIt()
def redoIt(self):
# save the serialized step data before deleting
step = self.blueprint_model.get_step(self.step_path)
if not step:
raise RuntimeError(f"BuildStep not found: '{self.step_path}'")
self.deleted_str_data = serialize_attr_value(step.serialize())
self.deleted_child_index = step.index_in_parent()
if not self.blueprint_model.delete_step(self.step_path):
raise RuntimeError("Failed to delete BuildStep")
def undoIt(self):
if self.deleted_str_data:
deleted_data = deserialize_attr_value(self.deleted_str_data)
parent_path = os.path.dirname(self.step_path)
self.blueprint_model.create_step(parent_path, self.deleted_child_index, deleted_data)
_CMD_CLASSES.append(PulseDeleteStepCmd)
class PulseMoveStepCmd(PulseCmdBase):
"""
Command to move or rename a Pulse BuildStep.
"""
source_path_arg = CmdArg(MSyntax.kString)
target_path_arg = CmdArg(MSyntax.kString)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.source_path_arg, cls.target_path_arg]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.source_path: str = self.get_arg(self.source_path_arg, parser)
self.target_path: str = self.get_arg(self.target_path_arg, parser)
def doIt(self, args):
self.parse_arguments(args)
self.redoIt()
def redoIt(self):
# save the resolved path after performing the move
self.resolved_target_path = self.blueprint_model.move_step(self.source_path, self.target_path)
if self.resolved_target_path is None:
raise RuntimeError("Failed to move BuildStep")
def undoIt(self):
self.blueprint_model.move_step(self.resolved_target_path, self.source_path)
_CMD_CLASSES.append(PulseMoveStepCmd)
class PulseRenameStepCmd(PulseCmdBase):
"""
Command to move or rename a Pulse BuildStep.
"""
step_path_arg = CmdArg(MSyntax.kString)
new_name_arg = CmdArg(MSyntax.kString)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.step_path_arg, cls.new_name_arg]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.step_path: str = self.get_arg(self.step_path_arg, parser)
self.new_name: str = self.get_arg(self.new_name_arg, parser)
def doIt(self, args):
self.parse_arguments(args)
self.redoIt()
def redoIt(self):
# save the resolved path after performing the move
step = self.blueprint_model.get_step(self.step_path)
self.old_name = step.name if step else ""
self.resolved_target_path = self.blueprint_model.rename_step(self.step_path, self.new_name)
if self.resolved_target_path is None:
raise RuntimeError("Failed to rename BuildStep")
def undoIt(self):
self.blueprint_model.rename_step(self.resolved_target_path, self.old_name)
_CMD_CLASSES.append(PulseRenameStepCmd)
class PulseSetActionAttrCmd(PulseCmdBase):
"""
Command to modify the value of a Pulse BuildAction attribute.
"""
# the full path to the attribute, e.g. 'My/Build/Step.myAttr'
attr_path_arg = CmdArg(MSyntax.kString)
# the serialized value of the attribute, e.g. '123'
new_value_arg = CmdArg(MSyntax.kString)
# the index of the variant to modify
variant_index_flag = CmdFlag("-v", "-variant", MSyntax.kLong, -1)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.attr_path_arg, cls.new_value_arg]
@classmethod
def get_flags(cls) -> List[CmdFlag]:
return [cls.variant_index_flag]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.attr_path: str = self.get_arg(self.attr_path_arg, parser)
self.new_value: str = self.get_arg(self.new_value_arg, parser)
self.variant_index: int = self.get_flag(self.variant_index_flag, parser)
def doIt(self, args):
self.parse_arguments(args)
self.redoIt()
def redoIt(self):
# store old value as str
self.old_str_value = serialize_attr_value(
self.blueprint_model.get_action_attr(self.attr_path, self.variant_index)
)
# deserialize str value into objects
value = deserialize_attr_value(self.new_value)
self.blueprint_model.set_action_attr(self.attr_path, value, self.variant_index)
def undoIt(self):
# deserialize str value into objects
value = deserialize_attr_value(self.old_str_value)
self.blueprint_model.set_action_attr(self.attr_path, value, self.variant_index)
_CMD_CLASSES.append(PulseSetActionAttrCmd)
class PulseSetIsVariantAttrCmd(PulseCmdBase):
"""
Command to change an attribute of a Pulse BuildAction attribute
from being constant or variant.
"""
# the full path to the attribute, e.g. 'My/Build/Step.myAttr'
attr_path_arg = CmdArg(MSyntax.kString)
# whether the attribute should be variant
new_value_arg = CmdArg(MSyntax.kBoolean)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.attr_path_arg, cls.new_value_arg]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.attr_path: str = self.get_arg(self.attr_path_arg, parser)
self.new_value: bool = self.get_arg(self.new_value_arg, parser)
self.step_path: str = self.attr_path.split(".")[0]
def doIt(self, args):
self.parse_arguments(args)
self.redoIt()
def redoIt(self):
# TODO: fail if not changing anything
# snapshot the whole action proxy, since it may change
# significantly when modifying variant attrs
self.old_str_data = serialize_attr_value(self.blueprint_model.get_action_data(self.step_path))
self.blueprint_model.set_is_action_attr_variant(self.attr_path, self.new_value)
def undoIt(self):
old_data = deserialize_attr_value(self.old_str_data)
self.blueprint_model.set_action_data(self.step_path, old_data)
_CMD_CLASSES.append(PulseSetIsVariantAttrCmd)
class PulseSetIsActionMirroredCmd(PulseCmdBase):
"""
Command to change whether a Pulse BuildAction is mirrored or not.
"""
# the full path to the attribute, e.g. 'My/Build/Step.myAttr'
attr_path_arg = CmdArg(MSyntax.kString)
# whether the action should be mirrored
new_value_arg = CmdArg(MSyntax.kBoolean)
@classmethod
def get_args(cls) -> List[CmdArg]:
return [cls.attr_path_arg, cls.new_value_arg]
def parse_arguments(self, args):
parser = self.get_arg_parser(args)
self.step_path: str = self.get_arg(self.attr_path_arg, parser)
self.new_value: bool = self.get_arg(self.new_value_arg, parser)
def doIt(self, args):
self.parse_arguments(args)
self.redoIt()
def redoIt(self):
# TODO: fail if not changing anything
self.old_value = self.blueprint_model.is_action_mirrored(self.step_path)
self.blueprint_model.set_is_action_mirrored(self.step_path, self.new_value)
def undoIt(self):
self.blueprint_model.set_is_action_attr_variant(self.step_path, self.old_value)
_CMD_CLASSES.append(PulseSetIsActionMirroredCmd)
| {
"content_hash": "862324929d88a3c82863b17f75458bee",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 102,
"avg_line_length": 32.86644951140065,
"alnum_prop": 0.664420218037661,
"repo_name": "bohdon/maya-pulse",
"id": "679e6e487e9c56358ad9f99ee11074cb8d619611",
"size": "10090",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/pulse/plug-ins/pulse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "354"
},
{
"name": "Mathematica",
"bytes": "74473"
},
{
"name": "Python",
"bytes": "831422"
},
{
"name": "Shell",
"bytes": "3636"
}
],
"symlink_target": ""
} |
'''
algorithm:
time_saving = max(calc_time_saving_matrix)
while time_saving > 0:
calc_time_saving_matrix <- this accounts for servers being full
pick video and server that gives biggest saving
add video to server
remove the request(s) that are satisfied by the current arrangement
'''
import os, numpy, copy
from VideoCache import data_reader, time_saved_calculator
from VideoCache.time_saved_calculator import calculate_time_saved
from VideoCache import output
import time
def iterative_method(fname):
stime = time.time()
file = os.path.join("../", "VideoCache", "input", fname+".in")
#file = os.path.join("../", "VideoCache", "input", "me_at_the_zoo.in")
reader = data_reader.DataReader(file)
reader.read()
# requests_to_vid_mapping = {}
# requests_to_cs_mapping = {}
#
# for vid in reader.videos:
# requests_to_vid_mapping[vid] = []
#
# for cs in reader.cache_servers:
# requests_to_cs_mapping[cs] = []
#
# for req in reader.request_descriptions:
# requests_to_vid_mapping[req.video].append(req)
#
# for req in reader.request_descriptions:
# servers = [conn.cache_server for conn in req.endpoint.cache_server_connections]
# for server in servers: requests_to_cs_mapping[server].append(req)
#reqs = copy.copy(reader.request_descriptions)
max_time_save=1e50
time_saving_matrix = time_saved_calculator.calc_time_saved_matrix(reader.request_descriptions,
reader.videos,
reader.cache_servers)
request_to_video_map = {}
for request in reader.request_descriptions:
try:
request_to_video_map[request.video.ID].append(request)
except:
request_to_video_map[request.video.ID] = []
request_to_video_map[request.video.ID].append(request)
#videos_at_endpoint = numpy.zeros((len(reader.videos), len(reader.endpoints)))
videos_at_endpoint = None
step = 0
#delta = 10000
while(max_time_save>0):
#time_saving_matrix = time_saved_calculator.calc_time_saved_matrix(reader.request_descriptions,
# reader.videos,
# reader.cache_servers)
c_max_time_save = numpy.max(time_saving_matrix)
#delta = max_time_save - c_max_time_save
max_time_save = c_max_time_save
server_id, video_id = numpy.unravel_index(numpy.argmax(time_saving_matrix),
time_saving_matrix.shape)
server = reader.cache_servers[server_id]
video = reader.videos[video_id]
print("adding video {0} size {1}".format(video_id, video.size))
server.add_video(video)
#for conn in server.cache_server_connections:
# videos_at_endpoint[video.ID][conn.endpoint.ID] = 1
# for req in reqs:
# if req in requests_to_cs_mapping[server] and req in requests_to_vid_mapping[video]:
# reqs.remove(req)
# for req in reqs:
# if(does_request_involve_video_and_server(req, reader.videos[video_id], server)):
# reqs.remove(req)
print(" max_time_save " + str(max_time_save))
print("step {0}".format(step))
#print(" delta " + str(delta))
time_saved_calculator.update_matrix(time_saving_matrix, request_to_video_map, video_id, videos_at_endpoint)
#print(" length_req " + str(len(reqs)))
step+=1
#print(time_saving_matrix)
total_time_saved2, total_requests2 = calculate_time_saved(reader.request_descriptions)
score2 = total_time_saved2 / total_requests2 * 1000
print(" actual total time saved " + str(total_time_saved2))
print(" actual score " + str(score2))
strategy = "iterative"
o = output.Output()
o.set_cache_list(reader.cache_servers)
o.set_output_filename(os.path.join("output", fname + "_" + strategy + ".out"))
o.write()
print("time taken "+str(time.time()-stime))
def does_request_involve_video_and_server(request_descriptor, video, server):
if(request_descriptor.video == video):
servers = [conn.cache_server for conn in request_descriptor.endpoint.cache_server_connections]
if server in servers:
return True
else:
return False
else:
return False
if __name__=="__main__":
fname = ["videos_worth_spreading",
"kittens",
"me_at_the_zoo",
"as",
"trending_today"]
#fname = ["me_at_the_zoo",]
for f in fname:
iterative_method(f)
| {
"content_hash": "763f8d2b874a2efe79c1bf8b2d5d6646",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 115,
"avg_line_length": 29.115151515151513,
"alnum_prop": 0.5928393005828476,
"repo_name": "marcrobinson/hc2017",
"id": "18a1dac82b4999fa60e6f80630590647ef38d015",
"size": "4804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "VideoCache/iterative.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27111"
}
],
"symlink_target": ""
} |
import igraph as ig
import numpy as np
import fastlayout as fl
import time
# load graph data
g = ig.load( filename='data/medium_example_network.txt', format='ncol' )
# set various constants
dimension = 2
max_it = 500
temp = 1
# get number of nodes
nodes = len(g.vs)
# get edges in the right format
edgelist = np.array( [edge.tuple for edge in g.es], np.int32)
# call igraph first
# this is only to compare - timing as well as output
print 'igraph'
start = time.clock()
igLayout = g.layout_fruchterman_reingold()
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fruchterman_reingold) is: ", elapsed
ig.plot(g, "output/plot_igraph.png", layout=igLayout)
# call all versions with old data format
# this is only for reference
print 'layout_fr (float baseline)'
start = time.clock()
pos = fl.layout_fr(nodes*dimension, edgelist, max_it, temp )
pos = np.reshape(pos, (nodes, dimension))
floatLayout = ig.Layout( tuple(map(tuple, pos )) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr) is: ", elapsed
ig.plot(g, "output/plot_fr_float_baseline.png", layout=floatLayout)
print 'layout_fr_omp'
start = time.clock()
pos = fl.layout_fr_omp( nodes*dimension, edgelist, max_it, temp )
pos = np.reshape(pos, (nodes, dimension))
float2Layout = ig.Layout( tuple(map(tuple, pos)) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_omp) is: ", elapsed
ig.plot(g, "output/plot_fr_omp.png", layout=float2Layout)
print 'layout_fr_simd'
start = time.clock()
pos = fl.layout_fr_simd( nodes*dimension, edgelist, max_it, temp )
pos = np.reshape(pos, (nodes, dimension))
float3Layout = ig.Layout( tuple(map(tuple, pos)) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_simd) is: ", elapsed
ig.plot(g, "output/plot_fr_simd.png", layout=float3Layout)
print 'layout_fr_omp_simd'
start = time.clock()
pos = fl.layout_fr_omp_simd( nodes*dimension, edgelist, max_it, temp )
pos = np.reshape( pos, (nodes, dimension) )
float6Layout = ig.Layout( tuple(map(tuple, pos)) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_omp_simd) is: ", elapsed
ig.plot(g, "output/plot_fr_omp_simd.png", layout=float6Layout)
# call new implementations with strided data layout
# NOTE
# this one works different as it uses a different layout
# for the data, so no reshaping has to be done, but you
# have to zip first and second half of the array
print 'layout_fr_stride (strided float baseline)'
start = time.clock()
pos = fl.layout_fr_stride( nodes*dimension, edgelist, max_it, temp )
float6Layout = ig.Layout( tuple(zip(pos[0:nodes], pos[nodes:2*nodes])) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_stride) is: ", elapsed
ig.plot(g, "output/plot_fr_stride.png", layout=float6Layout)
print 'layout_fr_stride_simd'
start = time.clock()
pos = fl.layout_fr_stride_simd( nodes*dimension, edgelist, max_it, temp )
myLayout = ig.Layout( tuple(zip(pos[0:nodes], pos[nodes:2*nodes])) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_stride_simd) is: ", elapsed
ig.plot(g, "output/plot_fr_simd_stride.png", layout=myLayout)
print 'layout_fr_stride_omp'
start = time.clock()
pos = fl.layout_fr_stride_omp( nodes*dimension, edgelist, max_it, temp )
myLayout = ig.Layout( tuple(zip(pos[0:nodes], pos[nodes:2*nodes])) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_stride_omp) is: ", elapsed
ig.plot(g, "output/plot_fr_omp_stride.png", layout=myLayout)
print 'layout_fr_stride_omp_simd'
start = time.clock()
pos = fl.layout_fr_stride_omp_simd( nodes*dimension, edgelist, max_it, temp )
myLayout = ig.Layout( tuple(zip(pos[0:nodes], pos[nodes:2*nodes])) )
elapsed = time.clock()
elapsed = elapsed - start
print "Time spent in (layout_fr_stride_omp_simd) is: ", elapsed
ig.plot(g, "output/plot_fr_omp_stride_simd.png", layout=myLayout)
| {
"content_hash": "29ace082105a85eda1b776278dc1fc50",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 77,
"avg_line_length": 35.63963963963964,
"alnum_prop": 0.7196663296258847,
"repo_name": "sg-dev/pyFastLayout",
"id": "50383add52bc4a5258f29dbea836c334036d221e",
"size": "4052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tags/testing-3.0/benchmark.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "804158"
},
{
"name": "Python",
"bytes": "37734"
}
],
"symlink_target": ""
} |
from repyportability import *
import repyhelper
mycontext = repyhelper.get_shared_context()
callfunc = 'import'
callargs = []
"""
<Program Name>
time.repy
<Author>
Eric Kimbrel
<Started>
Jul 2, 2009
<Purpose>
replaces the previous time.repy by use of the active interface
time_interface.repy and the implementors ntp_time.repy and tcp_time.repy
see time_interface.repy for details
"""
repyhelper.translate_and_import('ntp_time.repy')
repyhelper.translate_and_import('tcp_time.repy')
### Automatically generated by repyhelper.py ### C:\Dropbox\uni\y1p2\dist\lab\demokit\time.repy
| {
"content_hash": "307960c91e36d2cd9c4527de8fcb4671",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 95,
"avg_line_length": 19.966666666666665,
"alnum_prop": 0.7495826377295493,
"repo_name": "toregreijer/horse-battery",
"id": "ed8c566cf12b56c9e1d759717cfceac6c56adf22",
"size": "979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "time_repy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1210574"
}
],
"symlink_target": ""
} |
"""
flask.testsuite.helpers
~~~~~~~~~~~~~~~~~~~~~~~
Various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import flask
import unittest
from logging import StreamHandler
from flask.testsuite import FlaskTestCase, catch_warnings, catch_stderr
from werkzeug.http import parse_cache_control_header, parse_options_header
from flask._compat import StringIO, text_type
def has_encoding(name):
try:
import codecs
codecs.lookup(name)
return True
except LookupError:
return False
class JSONTestCase(FlaskTestCase):
def test_json_bad_requests(self):
app = flask.Flask(__name__)
@app.route('/json', methods=['POST'])
def return_json():
return flask.jsonify(foo=text_type(flask.request.get_json()))
c = app.test_client()
rv = c.post('/json', data='malformed', content_type='application/json')
self.assert_equal(rv.status_code, 400)
def test_json_body_encoding(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
return flask.request.get_json()
c = app.test_client()
resp = c.get('/', data=u'"Hällo Wörld"'.encode('iso-8859-15'),
content_type='application/json; charset=iso-8859-15')
self.assert_equal(resp.data, u'Hällo Wörld'.encode('utf-8'))
def test_jsonify(self):
d = dict(a=23, b=42, c=[1, 2, 3])
app = flask.Flask(__name__)
@app.route('/kw')
def return_kwargs():
return flask.jsonify(**d)
@app.route('/dict')
def return_dict():
return flask.jsonify(d)
c = app.test_client()
for url in '/kw', '/dict':
rv = c.get(url)
self.assert_equal(rv.mimetype, 'application/json')
self.assert_equal(flask.json.loads(rv.data), d)
def test_json_as_unicode(self):
app = flask.Flask(__name__)
app.config['JSON_AS_ASCII'] = True
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, '"\\u2603"')
app.config['JSON_AS_ASCII'] = False
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, u'"\u2603"')
def test_json_attr(self):
app = flask.Flask(__name__)
@app.route('/add', methods=['POST'])
def add():
json = flask.request.get_json()
return text_type(json['a'] + json['b'])
c = app.test_client()
rv = c.post('/add', data=flask.json.dumps({'a': 1, 'b': 2}),
content_type='application/json')
self.assert_equal(rv.data, b'3')
def test_template_escaping(self):
app = flask.Flask(__name__)
render = flask.render_template_string
with app.test_request_context():
rv = flask.json.htmlsafe_dumps('</script>')
self.assert_equal(rv, u'"\\u003c/script\\u003e"')
self.assert_equal(type(rv), text_type)
rv = render('{{ "</script>"|tojson }}')
self.assert_equal(rv, '"\\u003c/script\\u003e"')
rv = render('{{ "<\0/script>"|tojson }}')
self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"')
rv = render('{{ "<!--<script>"|tojson }}')
self.assert_equal(rv, '"\\u003c!--\\u003cscript\\u003e"')
rv = render('{{ "&"|tojson }}')
self.assert_equal(rv, '"\\u0026"')
rv = render('{{ "\'"|tojson }}')
self.assert_equal(rv, '"\\u0027"')
rv = render("<a ng-data='{{ data|tojson }}'></a>",
data={'x': ["foo", "bar", "baz'"]})
self.assert_equal(rv,
'<a ng-data=\'{"x": ["foo", "bar", "baz\\u0027"]}\'></a>')
def test_json_customization(self):
class X(object):
def __init__(self, val):
self.val = val
class MyEncoder(flask.json.JSONEncoder):
def default(self, o):
if isinstance(o, X):
return '<%d>' % o.val
return flask.json.JSONEncoder.default(self, o)
class MyDecoder(flask.json.JSONDecoder):
def __init__(self, *args, **kwargs):
kwargs.setdefault('object_hook', self.object_hook)
flask.json.JSONDecoder.__init__(self, *args, **kwargs)
def object_hook(self, obj):
if len(obj) == 1 and '_foo' in obj:
return X(obj['_foo'])
return obj
app = flask.Flask(__name__)
app.testing = True
app.json_encoder = MyEncoder
app.json_decoder = MyDecoder
@app.route('/', methods=['POST'])
def index():
return flask.json.dumps(flask.request.get_json()['x'])
c = app.test_client()
rv = c.post('/', data=flask.json.dumps({
'x': {'_foo': 42}
}), content_type='application/json')
self.assertEqual(rv.data, b'"<42>"')
def test_modified_url_encoding(self):
class ModifiedRequest(flask.Request):
url_charset = 'euc-kr'
app = flask.Flask(__name__)
app.testing = True
app.request_class = ModifiedRequest
app.url_map.charset = 'euc-kr'
@app.route('/')
def index():
return flask.request.args['foo']
rv = app.test_client().get(u'/?foo=정상처리'.encode('euc-kr'))
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, u'정상처리'.encode('utf-8'))
if not has_encoding('euc-kr'):
test_modified_url_encoding = None
def test_json_key_sorting(self):
app = flask.Flask(__name__)
app.testing = True
self.assert_equal(app.config['JSON_SORT_KEYS'], True)
d = dict.fromkeys(range(20), 'foo')
@app.route('/')
def index():
return flask.jsonify(values=d)
c = app.test_client()
rv = c.get('/')
lines = [x.strip() for x in rv.data.strip().decode('utf-8').splitlines()]
sorted_by_str = [
'{',
'"values": {',
'"0": "foo",',
'"1": "foo",',
'"10": "foo",',
'"11": "foo",',
'"12": "foo",',
'"13": "foo",',
'"14": "foo",',
'"15": "foo",',
'"16": "foo",',
'"17": "foo",',
'"18": "foo",',
'"19": "foo",',
'"2": "foo",',
'"3": "foo",',
'"4": "foo",',
'"5": "foo",',
'"6": "foo",',
'"7": "foo",',
'"8": "foo",',
'"9": "foo"',
'}',
'}'
]
sorted_by_int = [
'{',
'"values": {',
'"0": "foo",',
'"1": "foo",',
'"2": "foo",',
'"3": "foo",',
'"4": "foo",',
'"5": "foo",',
'"6": "foo",',
'"7": "foo",',
'"8": "foo",',
'"9": "foo",',
'"10": "foo",',
'"11": "foo",',
'"12": "foo",',
'"13": "foo",',
'"14": "foo",',
'"15": "foo",',
'"16": "foo",',
'"17": "foo",',
'"18": "foo",',
'"19": "foo"',
'}',
'}'
]
try:
self.assert_equal(lines, sorted_by_int)
except AssertionError:
self.assert_equal(lines, sorted_by_str)
class SendfileTestCase(FlaskTestCase):
def test_send_file_regular(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_equal(rv.mimetype, 'text/html')
with app.open_resource('static/index.html') as f:
rv.direct_passthrough = False
self.assert_equal(rv.data, f.read())
rv.close()
def test_send_file_xsendfile(self):
app = flask.Flask(__name__)
app.use_x_sendfile = True
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
def test_send_file_object(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
rv.direct_passthrough = False
with app.open_resource('static/index.html') as f:
self.assert_equal(rv.data, f.read())
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
self.assert_equal(rv.mimetype, 'text/html')
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = False
with app.test_request_context():
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f)
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'application/octet-stream')
rv.close()
# etags
self.assert_equal(len(captured), 1)
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f, mimetype='text/plain')
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'text/plain')
rv.close()
# etags
self.assert_equal(len(captured), 1)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = StringIO('Test')
rv = flask.send_file(f)
self.assert_not_in('x-sendfile', rv.headers)
rv.close()
# etags
self.assert_equal(len(captured), 1)
def test_attachment(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f, as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
with app.test_request_context():
self.assert_equal(options['filename'], 'index.html')
rv = flask.send_file('static/index.html', as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.html')
rv.close()
with app.test_request_context():
rv = flask.send_file(StringIO('Test'), as_attachment=True,
attachment_filename='index.txt',
add_etags=False)
self.assert_equal(rv.mimetype, 'text/plain')
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.txt')
rv.close()
def test_static_file(self):
app = flask.Flask(__name__)
# default cache timeout is 12 hours
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 3600
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
class StaticFileApp(flask.Flask):
def get_send_file_max_age(self, filename):
return 10
app = StaticFileApp(__name__)
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
class LoggingTestCase(FlaskTestCase):
def test_logger_cache(self):
app = flask.Flask(__name__)
logger1 = app.logger
self.assert_true(app.logger is logger1)
self.assert_equal(logger1.name, __name__)
app.logger_name = __name__ + '/test_logger_cache'
self.assert_true(app.logger is not logger1)
def test_debug_log(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
app.logger.warning('the standard library is dead')
app.logger.debug('this is a debug statement')
return ''
@app.route('/exc')
def exc():
1 // 0
with app.test_client() as c:
with catch_stderr() as err:
c.get('/')
out = err.getvalue()
self.assert_in('WARNING in helpers [', out)
self.assert_in(os.path.basename(__file__.rsplit('.', 1)[0] + '.py'), out)
self.assert_in('the standard library is dead', out)
self.assert_in('this is a debug statement', out)
with catch_stderr() as err:
try:
c.get('/exc')
except ZeroDivisionError:
pass
else:
self.assert_true(False, 'debug log ate the exception')
def test_debug_log_override(self):
app = flask.Flask(__name__)
app.debug = True
app.logger_name = 'flask_tests/test_debug_log_override'
app.logger.level = 10
self.assert_equal(app.logger.level, 10)
def test_exception_logging(self):
out = StringIO()
app = flask.Flask(__name__)
app.logger_name = 'flask_tests/test_exception_logging'
app.logger.addHandler(StreamHandler(out))
@app.route('/')
def index():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
err = out.getvalue()
self.assert_in('Exception on / [GET]', err)
self.assert_in('Traceback (most recent call last):', err)
self.assert_in('1 // 0', err)
self.assert_in('ZeroDivisionError:', err)
def test_processor_exceptions(self):
app = flask.Flask(__name__)
@app.before_request
def before_request():
if trigger == 'before':
1 // 0
@app.after_request
def after_request(response):
if trigger == 'after':
1 // 0
return response
@app.route('/')
def index():
return 'Foo'
@app.errorhandler(500)
def internal_server_error(e):
return 'Hello Server Error', 500
for trigger in 'before', 'after':
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_equal(rv.data, b'Hello Server Error')
def test_url_for_with_anchor(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index', _anchor='x y'),
'/#x%20y')
def test_url_for_with_scheme(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index',
_external=True,
_scheme='https'),
'https://localhost/')
def test_url_for_with_scheme_not_external(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_raises(ValueError,
flask.url_for,
'index',
_scheme='https')
def test_url_with_method(self):
from flask.views import MethodView
app = flask.Flask(__name__)
class MyView(MethodView):
def get(self, id=None):
if id is None:
return 'List'
return 'Get %d' % id
def post(self):
return 'Create'
myview = MyView.as_view('myview')
app.add_url_rule('/myview/', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/<int:id>', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/create', methods=['POST'],
view_func=myview)
with app.test_request_context():
self.assert_equal(flask.url_for('myview', _method='GET'),
'/myview/')
self.assert_equal(flask.url_for('myview', id=42, _method='GET'),
'/myview/42')
self.assert_equal(flask.url_for('myview', _method='POST'),
'/myview/create')
class NoImportsTestCase(FlaskTestCase):
"""Test Flasks are created without import.
Avoiding ``__import__`` helps create Flask instances where there are errors
at import time. Those runtime errors will be apparent to the user soon
enough, but tools which build Flask instances meta-programmatically benefit
from a Flask which does not ``__import__``. Instead of importing to
retrieve file paths or metadata on a module or package, use the pkgutil and
imp modules in the Python standard library.
"""
def test_name_with_import_error(self):
try:
flask.Flask('importerror')
except NotImplementedError:
self.fail('Flask(import_name) is importing import_name.')
class StreamingTestCase(FlaskTestCase):
def test_streaming_with_context(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(generate()))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_as_decorator(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.stream_with_context
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(generate())
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_and_custom_close(self):
app = flask.Flask(__name__)
app.testing = True
called = []
class Wrapper(object):
def __init__(self, gen):
self._gen = gen
def __iter__(self):
return self
def close(self):
called.append(42)
def __next__(self):
return next(self._gen)
next = __next__
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(
Wrapper(generate())))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
self.assertEqual(called, [42])
def suite():
suite = unittest.TestSuite()
if flask.json_available:
suite.addTest(unittest.makeSuite(JSONTestCase))
suite.addTest(unittest.makeSuite(SendfileTestCase))
suite.addTest(unittest.makeSuite(LoggingTestCase))
suite.addTest(unittest.makeSuite(NoImportsTestCase))
suite.addTest(unittest.makeSuite(StreamingTestCase))
return suite
| {
"content_hash": "15dfd7234a1ba9c379c1501c368dd8fd",
"timestamp": "",
"source": "github",
"line_count": 622,
"max_line_length": 89,
"avg_line_length": 36.541800643086816,
"alnum_prop": 0.509525276079018,
"repo_name": "sparkmorry/flask",
"id": "7de70c0aeb0c3bb419985861c71036293e11cd21",
"size": "22773",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "flask/testsuite/helpers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
This file specifically includes utilities for security.
"""
import threading
import struct
import uuid
import random
import time
import os
import re
import sys
import logging
import socket
import base64
import zlib
_struct_2_long_long = struct.Struct('=QQ')
python_version = sys.version_info[0]
if python_version == 2:
import cPickle as pickle
else:
import pickle
from hashlib import md5, sha1, sha224, sha256, sha384, sha512
try:
from Crypto.Cipher import AES
except ImportError:
import gluon.contrib.aes as AES
import hmac
try:
try:
from gluon.contrib.pbkdf2_ctypes import pbkdf2_hex
except (ImportError, AttributeError):
from gluon.contrib.pbkdf2 import pbkdf2_hex
HAVE_PBKDF2 = True
except ImportError:
try:
from .pbkdf2 import pbkdf2_hex
HAVE_PBKDF2 = True
except (ImportError, ValueError):
HAVE_PBKDF2 = False
logger = logging.getLogger("web2py")
def AES_new(key, IV=None):
""" Returns an AES cipher object and random IV if None specified """
if IV is None:
IV = fast_urandom16()
return AES.new(key, AES.MODE_CBC, IV), IV
def compare(a, b):
""" compares two strings and not vulnerable to timing attacks """
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def md5_hash(text):
""" Generate a md5 hash with the given text """
return md5(text).hexdigest()
def simple_hash(text, key='', salt='', digest_alg='md5'):
"""
Generates hash with the given text using the specified
digest hashing algorithm
"""
if not digest_alg:
raise RuntimeError("simple_hash with digest_alg=None")
elif not isinstance(digest_alg, str): # manual approach
h = digest_alg(text + key + salt)
elif digest_alg.startswith('pbkdf2'): # latest and coolest!
iterations, keylen, alg = digest_alg[7:-1].split(',')
return pbkdf2_hex(text, salt, int(iterations),
int(keylen), get_digest(alg))
elif key: # use hmac
digest_alg = get_digest(digest_alg)
h = hmac.new(key + salt, text, digest_alg)
else: # compatible with third party systems
h = get_digest(digest_alg)()
h.update(text + salt)
return h.hexdigest()
def get_digest(value):
"""
Returns a hashlib digest algorithm from a string
"""
if not isinstance(value, str):
return value
value = value.lower()
if value == "md5":
return md5
elif value == "sha1":
return sha1
elif value == "sha224":
return sha224
elif value == "sha256":
return sha256
elif value == "sha384":
return sha384
elif value == "sha512":
return sha512
else:
raise ValueError("Invalid digest algorithm: %s" % value)
DIGEST_ALG_BY_SIZE = {
128 / 4: 'md5',
160 / 4: 'sha1',
224 / 4: 'sha224',
256 / 4: 'sha256',
384 / 4: 'sha384',
512 / 4: 'sha512',
}
def pad(s, n=32, padchar=' '):
return s + (32 - len(s) % 32) * padchar
def secure_dumps(data, encryption_key, hash_key=None, compression_level=None):
if not hash_key:
hash_key = sha1(encryption_key).hexdigest()
dump = pickle.dumps(data)
if compression_level:
dump = zlib.compress(dump, compression_level)
key = pad(encryption_key[:32])
cipher, IV = AES_new(key)
encrypted_data = base64.urlsafe_b64encode(IV + cipher.encrypt(pad(dump)))
signature = hmac.new(hash_key, encrypted_data).hexdigest()
return signature + ':' + encrypted_data
def secure_loads(data, encryption_key, hash_key=None, compression_level=None):
if not ':' in data:
return None
if not hash_key:
hash_key = sha1(encryption_key).hexdigest()
signature, encrypted_data = data.split(':', 1)
actual_signature = hmac.new(hash_key, encrypted_data).hexdigest()
if not compare(signature, actual_signature):
return None
key = pad(encryption_key[:32])
encrypted_data = base64.urlsafe_b64decode(encrypted_data)
IV, encrypted_data = encrypted_data[:16], encrypted_data[16:]
cipher, _ = AES_new(key, IV=IV)
try:
data = cipher.decrypt(encrypted_data)
data = data.rstrip(' ')
if compression_level:
data = zlib.decompress(data)
return pickle.loads(data)
except Exception, e:
return None
### compute constant CTOKENS
def initialize_urandom():
"""
This function and the web2py_uuid follow from the following discussion:
http://groups.google.com/group/web2py-developers/browse_thread/thread/7fd5789a7da3f09
At startup web2py compute a unique ID that identifies the machine by adding
uuid.getnode() + int(time.time() * 1e3)
This is a 48-bit number. It converts the number into 16 8-bit tokens.
It uses this value to initialize the entropy source ('/dev/urandom') and to seed random.
If os.random() is not supported, it falls back to using random and issues a warning.
"""
node_id = uuid.getnode()
microseconds = int(time.time() * 1e6)
ctokens = [((node_id + microseconds) >> ((i % 6) * 8)) %
256 for i in range(16)]
random.seed(node_id + microseconds)
try:
os.urandom(1)
have_urandom = True
try:
# try to add process-specific entropy
frandom = open('/dev/urandom', 'wb')
try:
if python_version == 2:
frandom.write(''.join(chr(t) for t in ctokens)) # python 2
else:
frandom.write(bytes([]).join(bytes([t]) for t in ctokens)) # python 3
finally:
frandom.close()
except IOError:
# works anyway
pass
except NotImplementedError:
have_urandom = False
logger.warning(
"""Cryptographically secure session management is not possible on your system because
your system does not provide a cryptographically secure entropy source.
This is not specific to web2py; consider deploying on a different operating system.""")
if python_version == 2:
packed = ''.join(chr(x) for x in ctokens) # python 2
else:
packed = bytes([]).join(bytes([x]) for x in ctokens) # python 3
unpacked_ctokens = _struct_2_long_long.unpack(packed)
return unpacked_ctokens, have_urandom
UNPACKED_CTOKENS, HAVE_URANDOM = initialize_urandom()
def fast_urandom16(urandom=[], locker=threading.RLock()):
"""
this is 4x faster than calling os.urandom(16) and prevents
the "too many files open" issue with concurrent access to os.urandom()
"""
try:
return urandom.pop()
except IndexError:
try:
locker.acquire()
ur = os.urandom(16 * 1024)
urandom += [ur[i:i + 16] for i in xrange(16, 1024 * 16, 16)]
return ur[0:16]
finally:
locker.release()
def web2py_uuid(ctokens=UNPACKED_CTOKENS):
"""
This function follows from the following discussion:
http://groups.google.com/group/web2py-developers/browse_thread/thread/7fd5789a7da3f09
It works like uuid.uuid4 except that tries to use os.urandom() if possible
and it XORs the output with the tokens uniquely associated with this machine.
"""
rand_longs = (random.getrandbits(64), random.getrandbits(64))
if HAVE_URANDOM:
urand_longs = _struct_2_long_long.unpack(fast_urandom16())
byte_s = _struct_2_long_long.pack(rand_longs[0] ^ urand_longs[0] ^ ctokens[0],
rand_longs[1] ^ urand_longs[1] ^ ctokens[1])
else:
byte_s = _struct_2_long_long.pack(rand_longs[0] ^ ctokens[0],
rand_longs[1] ^ ctokens[1])
return str(uuid.UUID(bytes=byte_s, version=4))
REGEX_IPv4 = re.compile('(\d+)\.(\d+)\.(\d+)\.(\d+)')
def is_valid_ip_address(address):
"""
>>> is_valid_ip_address('127.0')
False
>>> is_valid_ip_address('127.0.0.1')
True
>>> is_valid_ip_address('2001:660::1')
True
"""
# deal with special cases
if address.lower() in ('127.0.0.1', 'localhost', '::1', '::ffff:127.0.0.1'):
return True
elif address.lower() in ('unknown', ''):
return False
elif address.count('.') == 3: # assume IPv4
if address.startswith('::ffff:'):
address = address[7:]
if hasattr(socket, 'inet_aton'): # try validate using the OS
try:
socket.inet_aton(address)
return True
except socket.error: # invalid address
return False
else: # try validate using Regex
match = REGEX_IPv4.match(address)
if match and all(0 <= int(match.group(i)) < 256 for i in (1, 2, 3, 4)):
return True
return False
elif hasattr(socket, 'inet_pton'): # assume IPv6, try using the OS
try:
socket.inet_pton(socket.AF_INET6, address)
return True
except socket.error: # invalid address
return False
else: # do not know what to do? assume it is a valid address
return True
def is_loopback_ip_address(ip=None, addrinfo=None):
"""
Determines whether the address appears to be a loopback address.
This assumes that the IP is valid.
"""
if addrinfo: # see socket.getaddrinfo() for layout of addrinfo tuple
if addrinfo[0] == socket.AF_INET or addrinfo[0] == socket.AF_INET6:
ip = addrinfo[4]
if not isinstance(ip, basestring):
return False
# IPv4 or IPv6-embedded IPv4 or IPv4-compatible IPv6
if ip.count('.') == 3:
return ip.lower().startswith(('127', '::127', '0:0:0:0:0:0:127',
'::ffff:127', '0:0:0:0:0:ffff:127'))
return ip == '::1' or ip == '0:0:0:0:0:0:0:1' # IPv6 loopback
def getipaddrinfo(host):
"""
Filter out non-IP and bad IP addresses from getaddrinfo
"""
try:
return [addrinfo for addrinfo in socket.getaddrinfo(host, None)
if (addrinfo[0] == socket.AF_INET or
addrinfo[0] == socket.AF_INET6)
and isinstance(addrinfo[4][0], basestring)]
except socket.error:
return []
| {
"content_hash": "405f7127526959f7d226b6e8293b141b",
"timestamp": "",
"source": "github",
"line_count": 326,
"max_line_length": 97,
"avg_line_length": 32.28834355828221,
"alnum_prop": 0.607923237697131,
"repo_name": "maoxuxiang/termite_mallet_project",
"id": "dbffeb4f468d333056c879843120fe2d5b81f7f5",
"size": "10573",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "web2py/gluon/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "581895"
},
{
"name": "Java",
"bytes": "4515"
},
{
"name": "JavaScript",
"bytes": "2239800"
},
{
"name": "Perl",
"bytes": "1688"
},
{
"name": "Python",
"bytes": "6130120"
},
{
"name": "Shell",
"bytes": "106587"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0004_auto_20150623_1623'),
]
operations = [
migrations.AlterField(
model_name='machine',
name='manifest',
field=models.CharField(max_length=256, null=True, blank=True),
),
migrations.AlterField(
model_name='machine',
name='operating_system',
field=models.CharField(max_length=256, null=True, blank=True),
),
]
| {
"content_hash": "7f92b3e6d680c411ba9c81aca4f5ad29",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 74,
"avg_line_length": 25.73913043478261,
"alnum_prop": 0.5861486486486487,
"repo_name": "macjustice/sal",
"id": "626d29ce7b01e80a24a9ada0b3491e0c4025b782",
"size": "616",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "server/migrations/0005_auto_20150717_1827.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "189639"
},
{
"name": "HTML",
"bytes": "88321"
},
{
"name": "JavaScript",
"bytes": "682521"
},
{
"name": "Makefile",
"bytes": "1143"
},
{
"name": "Nginx",
"bytes": "1955"
},
{
"name": "Python",
"bytes": "169952"
},
{
"name": "Shell",
"bytes": "1573"
}
],
"symlink_target": ""
} |
import serial
import sonic
import glob
sonic_pi = sonic.SonicPi()
connection = serial.Serial(glob.glob('/dev/tty.usbmodem*')[0], 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
| {
"content_hash": "f72cbf3653b11e751576b6aca5f7292a",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 70,
"avg_line_length": 23.533333333333335,
"alnum_prop": 0.6430594900849859,
"repo_name": "CoderDojoScotland/coderdojo-sequencer",
"id": "02b35a54ef3a3921afdbf20a10e201965cb972e9",
"size": "354",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "touchpad_listener/touchpad_listener.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Arduino",
"bytes": "845"
},
{
"name": "Python",
"bytes": "5409"
}
],
"symlink_target": ""
} |
from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console
from .ansi import Fore, Back, Style, Cursor
from .ansitowin32 import AnsiToWin32
__version__ = '0.4.7dev1'
| {
"content_hash": "39c6df4fc473fadf93d7106f31ddc783",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 85,
"avg_line_length": 32.5,
"alnum_prop": 0.7589743589743589,
"repo_name": "tartley/colorama",
"id": "ab62b5328b8d915f9a9fbe75f3bfc50020ef62d8",
"size": "270",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "colorama/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1579"
},
{
"name": "PowerShell",
"bytes": "1928"
},
{
"name": "Python",
"bytes": "57581"
},
{
"name": "Shell",
"bytes": "1699"
}
],
"symlink_target": ""
} |
"""Standalone helper functions"""
from maya import cmds, mel
def read(node):
"""Return user-defined attributes from `node`
"""
data = dict()
for attr in cmds.listAttr(node, userDefined=True) or list():
try:
value = cmds.getAttr(node + "." + attr)
except:
# Some attributes cannot be read directly,
# such as mesh and color attributes. These
# are considered non-essential to this
# particular publishing pipeline.
value = None
data[attr] = value
return data
def export_alembic(nodes, file, frame_range=None, uv_write=True):
"""Wrap native MEL command with limited set of arguments
Arguments:
nodes (list): Long names of nodes to cache
file (str): Absolute path to output destination
frame_range (tuple, optional): Start- and end-frame of cache,
default to current animation range.
uv_write (bool, optional): Whether or not to include UVs,
default to True
"""
options = [
("file", file),
("frameRange", "%s %s" % frame_range),
] + [("root", mesh) for mesh in nodes]
if uv_write:
options.append(("uvWrite", ""))
if frame_range is None:
frame_range = (
cmds.playbackOptions(query=True, ast=True),
cmds.playbackOptions(query=True, aet=True)
)
# Generate MEL command
mel_args = list()
for key, value in options:
mel_args.append("-{0} {1}".format(key, value))
mel_args_string = " ".join(mel_args)
mel_cmd = "AbcExport -j \"{0}\"".format(mel_args_string)
return mel.eval(mel_cmd)
def imprint(node, data):
"""Write `data` to `node` as userDefined attributes
Arguments:
node (str): Long name of node
data (dict): Dictionary of key/value pairs
"""
for key, value in data.items():
if isinstance(value, bool):
add_type = {"attributeType": "bool"}
set_type = {"keyable": False, "channelBox": True}
elif isinstance(value, basestring):
add_type = {"dataType": "string"}
set_type = {"type": "string"}
elif isinstance(value, int):
add_type = {"attributeType": "long"}
set_type = {"keyable": False, "channelBox": True}
elif isinstance(value, float):
add_type = {"attributeType": "double"}
set_type = {"keyable": False, "channelBox": True}
else:
raise TypeError("Unsupported type: %r" % type(value))
cmds.addAttr(node, longName=key, **add_type)
cmds.setAttr(node + "." + key, value, **set_type)
| {
"content_hash": "fe6661e1ee89bc98abd0890f117a57d4",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 69,
"avg_line_length": 29.26086956521739,
"alnum_prop": 0.5720653789004457,
"repo_name": "pyblish/pyblish-starter",
"id": "f6f8003f46ffbc92d21b72aa6674b16418e83734",
"size": "2692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyblish_starter/maya/lib.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "70739"
}
],
"symlink_target": ""
} |
from uai.operation.base_operation import BaseUaiServiceOp
from uai.api.modify_uai_srv_version_memo import ModifyUAISrvVersionMemoApiOp
class UaiServiceModifySrvVersionMemoOp(BaseUaiServiceOp):
"""
Base Modify Version Memo Tool Class with UAI
"""
def __int__(self, parser):
super(UaiServiceModifySrvVersionMemoOp, self).__init__(parser)
def _add_memo_args(self, memo_parser):
memo_parse = memo_parser.add_argument_group(
'Memo-Params', 'Modify Version Memo Parameters, help to modify version memo'
)
memo_parse.add_argument(
'--service_id',
type=str,
required=True,
help='the service id of UAI Inference'
)
memo_parse.add_argument(
'--srv_version',
type=str,
required=True,
help='the service version of UAI Inference'
)
memo_parse.add_argument(
'--srv_version_memo',
type=str,
required=True,
help='the modified service version memo of UAI Inference'
)
def _add_args(self):
super(UaiServiceModifySrvVersionMemoOp, self)._add_args()
self._add_memo_args(self.parser)
def _parse_memo_args(self, args):
self.service_id = args['service_id']
self.srv_version = args['srv_version']
self.srv_version_memo = args['srv_version_memo']
def _parse_args(self, args):
super(UaiServiceModifySrvVersionMemoOp, self)._parse_args(args)
self._parse_memo_args(args)
def cmd_run(self, args):
self._parse_args(args)
modifyOp = ModifyUAISrvVersionMemoApiOp(
public_key=self.public_key,
private_key=self.private_key,
project_id=self.project_id,
region=self.region,
zone=self.zone,
service_id=self.service_id,
srv_version=self.srv_version,
srv_version_memo=self.srv_version_memo
)
succ, rsp = modifyOp.call_api()
if not succ:
raise RuntimeError('Call ModifyUAISrvVersionMemo error, Error message: {0}'.format(rsp['Message']))
return succ, rsp
| {
"content_hash": "91be5e9ddb9dedd095a73b7587f706e3",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 112,
"avg_line_length": 34.375,
"alnum_prop": 0.600909090909091,
"repo_name": "ucloud/uai-sdk",
"id": "9afcfce19778938f416b2d91c9929549fda56efd",
"size": "2887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uai/operation/modify_version_memo/modify_version_memo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "470557"
}
],
"symlink_target": ""
} |
import unittest
from app.schema_loader.schema_loader import load_schema
class SchemaLoaderTest(unittest.TestCase):
def test_load_schema(self):
self.assertIsNotNone(load_schema("1", "0203"))
def test_load_schema_with_different_form_type(self):
self.assertIsNotNone(load_schema("1", "0205"))
def test_load_schema_with_invalid_form_type(self):
self.assertIsNone(load_schema("1", "0309"))
def test_load_schema_with_invalid_eq_id(self):
self.assertIsNone(load_schema("99", "0205"))
def test_load_schema_with_default_language_code(self):
self.assertIsNotNone(load_schema("test", "language"))
def test_load_schema_with_passing_default_language_code(self):
self.assertIsNotNone(load_schema("test", "language", "en"))
def test_load_schema_with_language_code(self):
self.assertIsNotNone(load_schema("test", "language", "cy"))
| {
"content_hash": "d20417d6cc7a402b290f0bc324121b73",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 67,
"avg_line_length": 33.7037037037037,
"alnum_prop": 0.689010989010989,
"repo_name": "qateam123/eq",
"id": "28247e0054739c0ae9df59e0d62072673374a4f8",
"size": "910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/app/schema_loader/test_schema_loader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56444"
},
{
"name": "HTML",
"bytes": "64720"
},
{
"name": "JavaScript",
"bytes": "752517"
},
{
"name": "Python",
"bytes": "735531"
},
{
"name": "Shell",
"bytes": "7685"
}
],
"symlink_target": ""
} |
"""
Tests for some things in L{loadtest.population}.
"""
from twisted.trial.unittest import TestCase
from contrib.performance.loadtest.population import ReportStatistics
class ReportStatisticsTests(TestCase):
"""
Tests for L{loadtest.population.ReportStatistics}.
"""
def test_countUsers(self):
"""
L{ReportStatistics.countUsers} returns the number of users observed to
have acted in the simulation.
"""
logger = ReportStatistics()
users = ['user01', 'user02', 'user03']
for user in users:
logger.observe(dict(
type='response', method='GET', success=True,
duration=1.23, user=user, client_type="test", client_id="1234"))
self.assertEqual(len(users), logger.countUsers())
def test_countClients(self):
"""
L{ReportStatistics.countClients} returns the number of clients observed to
have acted in the simulation.
"""
logger = ReportStatistics()
clients = ['c01', 'c02', 'c03']
for client in clients:
logger.observe(dict(
type='response', method='GET', success=True,
duration=1.23, user="user01", client_type="test", client_id=client))
self.assertEqual(len(clients), logger.countClients())
def test_clientFailures(self):
"""
L{ReportStatistics.countClientFailures} returns the number of clients observed to
have failed in the simulation.
"""
logger = ReportStatistics()
clients = ['c01', 'c02', 'c03']
for client in clients:
logger.observe(dict(
type='client-failure', reason="testing %s" % (client,)))
self.assertEqual(len(clients), logger.countClientFailures())
def test_simFailures(self):
"""
L{ReportStatistics.countSimFailures} returns the number of clients observed to
have caused an error in the simulation.
"""
logger = ReportStatistics()
clients = ['c01', 'c02', 'c03']
for client in clients:
logger.observe(dict(
type='sim-failure', reason="testing %s" % (client,)))
self.assertEqual(len(clients), logger.countSimFailures())
def test_noFailures(self):
"""
If fewer than 1% of requests fail, fewer than 1% of requests take 5
seconds or more, and fewer than 5% of requests take 3 seconds or more,
L{ReportStatistics.failures} returns an empty list.
"""
logger = ReportStatistics()
logger.observe(dict(
type='response', method='GET', success=True,
duration=2.5, user='user01', client_type="test", client_id="1234"))
self.assertEqual([], logger.failures())
def test_requestFailures(self):
"""
If more than 1% of requests fail, L{ReportStatistics.failures} returns a
list containing a string describing this.
"""
logger = ReportStatistics()
for _ignore in range(98):
logger.observe(dict(
type='response', method='GET', success=True,
duration=2.5, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='GET', success=False,
duration=2.5, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 1% GET failed"],
logger.failures())
def test_threeSecondFailure(self):
"""
If more than 5% of requests take longer than 3 seconds,
L{ReportStatistics.failures} returns a list containing a string
describing that.
"""
logger = ReportStatistics()
for _ignore in range(94):
logger.observe(dict(
type='response', method='GET', success=True,
duration=2.5, user='user01', client_type="test", client_id="1234"))
for _ignore in range(5):
logger.observe(dict(
type='response', method='GET', success=True,
duration=3.5, user='user02', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 5% GET exceeded 3 second response time"],
logger.failures())
def test_fiveSecondFailure(self):
"""
If more than 1% of requests take longer than 5 seconds,
L{ReportStatistics.failures} returns a list containing a string
describing that.
"""
logger = ReportStatistics()
for _ignore in range(98):
logger.observe(dict(
type='response', method='GET', success=True,
duration=2.5, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='GET', success=True,
duration=5.5, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 1% GET exceeded 5 second response time"],
logger.failures())
def test_methodsCountedSeparately(self):
"""
The counts for one method do not affect the results of another method.
"""
logger = ReportStatistics()
for _ignore in range(99):
logger.observe(dict(
type='response', method='GET', success=True,
duration=2.5, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='POST', success=True,
duration=2.5, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='GET', success=False,
duration=2.5, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='POST', success=False,
duration=2.5, user='user01', client_type="test", client_id="1234"))
self.assertEqual([], logger.failures())
def test_bucketRequest(self):
"""
PUT(xxx-huge/large/medium/small} have different thresholds. Test that requests straddling
each of those are correctly determined to be failures or not.
"""
_thresholds = {
"requests": {
"limits": [0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
"thresholds": {
"default": [100.0, 100.0, 100.0, 5.0, 1.0, 0.5, 0.0],
"PUT{organizer-small}": [100.0, 50.0, 25.0, 5.0, 1.0, 0.5, 0.0],
"PUT{organizer-medium}": [100.0, 100.0, 50.0, 25.0, 5.0, 1.0, 0.5],
"PUT{organizer-large}": [100.0, 100.0, 100.0, 50.0, 25.0, 5.0, 1.0],
"PUT{organizer-huge}": [100.0, 100.0, 100.0, 100.0, 100.0, 50.0, 25.0],
}
}
}
# -small below threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
self.assertEqual([], logger.failures())
# -small above 0.5 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-small}', success=True,
duration=0.6, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 50% PUT{organizer-small} exceeded 0.5 second response time"],
logger.failures()
)
# -medium below 0.5 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=0.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=0.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=0.6, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
[],
logger.failures()
)
# -medium above 1.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=1.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=1.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-medium}', success=True,
duration=1.6, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 50% PUT{organizer-medium} exceeded 1 second response time"],
logger.failures()
)
# -large below 1.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=1.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=1.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=1.6, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
[],
logger.failures()
)
# -large above 3.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=3.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=3.6, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-large}', success=True,
duration=3.6, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 50% PUT{organizer-large} exceeded 3 second response time"],
logger.failures()
)
# -huge below 10.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=12.0, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=8, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=11.0, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=9.0, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
[],
logger.failures()
)
# -huge above 10.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=12.0, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=9.0, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=12.0, user='user01', client_type="test", client_id="1234"))
logger.observe(dict(
type='response', method='PUT{organizer-huge}', success=True,
duration=42.42, user='user01', client_type="test", client_id="1234"))
self.assertEqual(
["Greater than 50% PUT{organizer-huge} exceeded 10 second response time"],
logger.failures()
)
| {
"content_hash": "3f29c5110f7f84a4cb9955b623fe8aeb",
"timestamp": "",
"source": "github",
"line_count": 326,
"max_line_length": 97,
"avg_line_length": 45.352760736196316,
"alnum_prop": 0.566452485627325,
"repo_name": "trevor/calendarserver",
"id": "beae49d14954e241db3da82eed99fec6dd1ee599",
"size": "15394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/performance/loadtest/test_population.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4214"
},
{
"name": "D",
"bytes": "13143"
},
{
"name": "JavaScript",
"bytes": "76566"
},
{
"name": "Python",
"bytes": "9260291"
},
{
"name": "Shell",
"bytes": "78964"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
from wagtail.wagtailcore import blocks
from wagtail.wagtailcore.fields import StreamField
class BioField(StreamField):
def __init__(self, block_types=None, **kwargs):
block_types = [
('Paragraph', blocks.RichTextBlock(icon="doc-full")),
]
super(BioField, self).__init__(block_types, **kwargs)
| {
"content_hash": "de969d81f72669689e7cb54bdf55c01b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 65,
"avg_line_length": 30.307692307692307,
"alnum_prop": 0.6725888324873096,
"repo_name": "CIGIHub/greyjay",
"id": "5662758773d8e0731b26f9d0809d51e41e443293",
"size": "394",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "greyjay/people/fields.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "76438"
},
{
"name": "JavaScript",
"bytes": "5702"
},
{
"name": "Python",
"bytes": "290198"
}
],
"symlink_target": ""
} |
"""
Configuration for sts samples.
"""
import logging
from baidubce.bce_client_configuration import BceClientConfiguration
from baidubce.auth.bce_credentials import BceCredentials
HOST = b'http://cfc.bj.baidubce.com'
AK = b'ak'
SK = b'sk'
logger = logging.getLogger('baidubce.services.cfc.client')
fh = logging.FileHandler('sample.log')
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.setLevel(logging.DEBUG)
logger.addHandler(fh)
config = BceClientConfiguration(credentials=BceCredentials(AK, SK), endpoint=HOST)
| {
"content_hash": "08abb4797ecc745f1da175a1a14591e7",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 85,
"avg_line_length": 28.09090909090909,
"alnum_prop": 0.7702265372168284,
"repo_name": "baidubce/bce-sdk-python",
"id": "31936744b0d16529f61174cee31fc0e591330bec",
"size": "1211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sample/cfc/cfc_sample_conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1275911"
},
{
"name": "Shell",
"bytes": "561"
}
],
"symlink_target": ""
} |
from Framework.Pedido import Pedido
from Framework.ErroNoHTTP import ErroNoHTTP
class PedidoListar(Pedido):
def __init__(self,variaveis_do_ambiente):
super(PedidoListar, self).__init__(variaveis_do_ambiente) | {
"content_hash": "de1c9b83410724fa5ea350b891209d36",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 59,
"avg_line_length": 30.285714285714285,
"alnum_prop": 0.7877358490566038,
"repo_name": "AEDA-Solutions/matweb",
"id": "b6660ec8295e89fc89fb6d03621ed343008e7632",
"size": "212",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/Models/Horario/PedidoListar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "475557"
},
{
"name": "HTML",
"bytes": "12097161"
},
{
"name": "JavaScript",
"bytes": "190487"
},
{
"name": "PHP",
"bytes": "1122"
},
{
"name": "Python",
"bytes": "152996"
},
{
"name": "Shell",
"bytes": "80"
}
],
"symlink_target": ""
} |
""" Runner views
"""
from django.shortcuts import render
from haystack.views import SearchView
from runners.models import Runner
def results(request, runner_pk):
"""Home page for teams
"""
runner = Runner.objects.get(pk=runner_pk)
the_results = runner.result_set.all().order_by('-meet__date')
return render(
request,
'runners/results.html',
{'runner': runner,
'results': the_results})
def search(request):
"""Runner search
"""
return SearchView(template='search/search.html')(request)
| {
"content_hash": "e60e4eb818af522a8e8cd48e80f2dc27",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 63,
"avg_line_length": 23,
"alnum_prop": 0.6824196597353497,
"repo_name": "ColCarroll/bugbug",
"id": "cf3016a6614ce7e1553e2c1a2a7f836944e0c42c",
"size": "529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runners/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "214027"
},
{
"name": "JavaScript",
"bytes": "106778"
},
{
"name": "Python",
"bytes": "52381"
}
],
"symlink_target": ""
} |
from pandas import DataFrame, Series
import numpy
def avg_medal_count():
'''
Compute the average number of bronze medals earned by countries who
earned at least one gold medal.
Save this to a variable named avg_bronze_at_least_one_gold. You do not
need to call the function in your code when running it in the browser -
the grader will do that automatically when you submit or test it.
HINT-1:
You can retrieve all of the values of a Pandas column from a
data frame, "df", as follows:
df['column_name']
HINT-2:
The numpy.mean function can accept as an argument a single
Pandas column.
For example, numpy.mean(df["col_name"]) would return the
mean of the values located in "col_name" of a dataframe df.
'''
countries = ['Russian Fed.', 'Norway', 'Canada', 'United States',
'Netherlands', 'Germany', 'Switzerland', 'Belarus',
'Austria', 'France', 'Poland', 'China', 'Korea',
'Sweden', 'Czech Republic', 'Slovenia', 'Japan',
'Finland', 'Great Britain', 'Ukraine', 'Slovakia',
'Italy', 'Latvia', 'Australia', 'Croatia', 'Kazakhstan']
gold = [13, 11, 10, 9, 8, 8, 6, 5, 4, 4, 4, 3, 3, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0]
silver = [11, 5, 10, 7, 7, 6, 3, 0, 8, 4, 1, 4, 3, 7, 4, 2, 4, 3, 1, 0, 0, 2, 2, 2, 1, 0]
bronze = [9, 10, 5, 12, 9, 5, 2, 1, 5, 7, 1, 2, 2, 6, 2, 4, 3, 1, 2, 1, 0, 6, 2, 1, 0, 1]
olympic_medal_counts = {'country_name': Series(countries),
'gold': Series(gold),
'silver': Series(silver),
'bronze': Series(bronze)}
df = DataFrame(olympic_medal_counts)
# YOUR CODE HERE
bronze_atleast_one_gold = df['bronze'][df['gold']>=1]
avg_bronze_at_least_one_gold = numpy.mean(bronze_atleast_one_gold)
return avg_bronze_at_least_one_gold
print avg_medal_count() | {
"content_hash": "42859697943a2474dab0dd2d24feef58",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 93,
"avg_line_length": 39.02,
"alnum_prop": 0.5750896975909789,
"repo_name": "coolsgupta/machine_learning_nanodegree",
"id": "1e8b293fcb8a813ae46639e7a5da50baa9050f1d",
"size": "1951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Model_Evaluation_and_Validation/numpy_and_pandas_tutorial/eg_6.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "82767"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('polls', '0052_response_comment'),
]
operations = [
migrations.CreateModel(
name='Email',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.IntegerField()),
('subject', models.CharField(max_length=100)),
('message', models.CharField(max_length=500)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question')),
],
),
]
| {
"content_hash": "b102ba75e8d0eaa95ebf1989388bcba8",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 114,
"avg_line_length": 31.833333333333332,
"alnum_prop": 0.5850785340314136,
"repo_name": "PrefPy/opra",
"id": "9e854f5592801fb5f03585af8d9a894004f14739",
"size": "836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compsocsite/polls/migrations/0053_email.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "61532"
},
{
"name": "HTML",
"bytes": "574736"
},
{
"name": "JavaScript",
"bytes": "144116"
},
{
"name": "Python",
"bytes": "347118"
}
],
"symlink_target": ""
} |
import mpld3
import os
import glob
import matplotlib
from . import export
from unittest import SkipTest
matplotlib.use('Agg')
TEST_PLOT_FILES = os.path.join(mpld3.BASE_PATH, 'mpld3/test_plots/*.py')
TEST_PLOT_SNAPSHOT_DIR = os.path.join(mpld3.BASE_PATH, 'mpld3/test_plots_snapshots/')
TEST_PLOT_RESULT_SNAPSHOT_DIR = os.path.join(mpld3.BASE_PATH, 'mpld3/test_plots_results/')
def test_snapshots():
print("Checking test_plots against stored snapshots")
plot_files = glob.glob(TEST_PLOT_FILES)
expected_snapshots = {}
for plot_file in plot_files:
plot_snapshot = export.snapshot_path(plot_file, TEST_PLOT_SNAPSHOT_DIR)
if not os.path.isfile(plot_snapshot):
continue
expected_snapshots[plot_snapshot] = plot_file
got = export.snapshot_mpld3_plots_consecutive(expected_snapshots.values(),
TEST_PLOT_RESULT_SNAPSHOT_DIR)
expected = expected_snapshots.keys()
message_frmt = "Unexpected plot output in d3: {plot_file} {percent}"
message_frmt_success = "Plot test passed: {plot_file}"
for got, expected in zip(got, expected):
percent_diff = export.is_images_identical(got, expected, output_bool=False)
if percent_diff == 0:
print(message_frmt_success.format(plot_file=expected_snapshots.get(expected)))
else:
print(message_frmt.format(plot_file=expected_snapshots.get(expected), percent=percent_diff))
| {
"content_hash": "7a146cdef8598f3cd4b75a25f574f7f9",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 104,
"avg_line_length": 38.75675675675676,
"alnum_prop": 0.699442119944212,
"repo_name": "mpld3/mpld3",
"id": "9721310843475d4b8828b9ad89ad6d0bfa44bd58",
"size": "1434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpld3/tests/test_d3_snapshots.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "234962"
},
{
"name": "Jupyter Notebook",
"bytes": "1911743"
},
{
"name": "Makefile",
"bytes": "943"
},
{
"name": "Python",
"bytes": "149910"
},
{
"name": "Vim script",
"bytes": "25"
}
],
"symlink_target": ""
} |
import sys
import argparse
import webbrowser
from flask import Flask, Blueprint, url_for
import tornado.ioloop
import tornado.web
import tornado.wsgi
import sockjs.tornado
blueprint = Blueprint('views', __name__)
from . import views
def static(filename):
return url_for('static', filename=filename)
def start_server(args):
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret'
app.jinja_env.globals['static'] = static
blueprint.url_prefix = args.url_prefix
app.register_blueprint(blueprint)
# app.run(port=args.port, debug=args.debug)
wsgi_app = tornado.wsgi.WSGIContainer(app)
condajs_ws = sockjs.tornado.SockJSRouter(views.CondaJsWebSocketRouter, '/condajs_ws')
routes = condajs_ws.urls
routes.append((r".*", tornado.web.FallbackHandler, dict(fallback=wsgi_app)))
application = tornado.web.Application(routes, debug=args.debug)
try:
application.listen(args.port)
except OSError as e:
print("There was an error starting the server:")
print(e)
return
ioloop = tornado.ioloop.IOLoop.instance()
if not args.debug:
callback = lambda: webbrowser.open_new_tab('http://localhost:%s' % args.port)
ioloop.add_callback(callback)
ioloop.start()
def main():
parser = argparse.ArgumentParser(description="Web user interface for Conda")
parser.add_argument("-d", "--debug", action="store_true", default=False)
parser.add_argument("-p", "--port", type=int, default=4888)
parser.add_argument("--url-prefix", default=None)
args = parser.parse_args()
start_server(args)
if __name__ == '__main__':
main()
| {
"content_hash": "c49eb18f3e4a1ce4d4349660b786215a",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 89,
"avg_line_length": 28.43103448275862,
"alnum_prop": 0.6810187992722863,
"repo_name": "conda/conda-ui",
"id": "b9d4816270e6dae2c1e5e31841d4091561578caa",
"size": "1649",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "conda_ui/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "945"
},
{
"name": "CoffeeScript",
"bytes": "55112"
},
{
"name": "JavaScript",
"bytes": "533123"
},
{
"name": "Python",
"bytes": "5775"
},
{
"name": "Shell",
"bytes": "311"
}
],
"symlink_target": ""
} |
import csv
import os
import frontmatter
CSV_DIR = './highlights/csv'
HIGHLIGHTS_DIR = './highlights'
def create_markdown_file(filename: str):
content = ''
title = ''
with open(os.path.join(CSV_DIR, filename)) as f:
filereader = csv.reader(f)
for index, row in enumerate(filereader):
if index == 1:
title = row[0]
if 'Highlight' in row[0]:
content += '"{}"'.format(row[3])
content += '\n\n'
content += '---------'
content += '\n\n'
post = frontmatter.Post(content)
post['layout'] = 'page'
post['title'] = title.capitalize()
with open(os.path.join(HIGHLIGHTS_DIR, filename + '.md'), 'w') as f:
print(frontmatter.dumps(post), file=f)
def main():
for csv_file in os.listdir(CSV_DIR):
create_markdown_file(csv_file)
if __name__ == '__main__':
main()
| {
"content_hash": "ecb8a7eb397b68fdfbde8392a20ce1bf",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 72,
"avg_line_length": 26.4,
"alnum_prop": 0.5357142857142857,
"repo_name": "paramsingh/paramsingh.github.io",
"id": "68e043477bf1c2bfc36383a8d0d2e39da7cd32f4",
"size": "924",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kindle_highlights_to_markdown.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "18477"
},
{
"name": "Python",
"bytes": "5585"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "SCSS",
"bytes": "15505"
},
{
"name": "TeX",
"bytes": "7157"
}
],
"symlink_target": ""
} |
import time
from bluetooth import *
def banner():
print "[***] Bluetooth Detector p201 [***]"
def lookup_name(device):
devList =discover_devices()
for device in devList:
name = str(lookup_name(device))
print "[+] Found Bluetooth Device "+str(name)
print "[+] MAC address: "+str(devices)
alreadyFound.append(addr)
while True:
findDevs()
time.sleep(5)
def findDevs():
foundDevs = discover_devices(lookup_names = True)
for (addr,name)
'''
def main():
if __name__ == '__main__':
main()
'''
| {
"content_hash": "3d43386f7c6e1db46ed06a836a1a7092",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 50,
"avg_line_length": 17.1,
"alnum_prop": 0.6510721247563352,
"repo_name": "n1cfury/ViolentPython",
"id": "ef913a36f0b2b957e243dabcb602cee4a1403e98",
"size": "535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "btFind.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63274"
}
],
"symlink_target": ""
} |
import os
import shutil
import subprocess
import onnx
from onnx import numpy_helper
TOP_DIR = os.path.realpath(os.path.dirname(__file__))
DATA_DIR = os.path.join(TOP_DIR, "..", "testdata/")
def prepare_dir(path):
if os.path.exists(path):
shutil.rmtree(path)
os.makedirs(path)
def _extract_value_info(arr, name, ele_type=None):
return onnx.helper.make_tensor_value_info(
name=name,
elem_type=ele_type if ele_type else onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[arr.dtype],
shape=arr.shape,
)
def generate_data(graph, inputs, outputs, name):
output_dir = os.path.join(DATA_DIR, name)
prepare_dir(output_dir)
model = onnx.helper.make_model(graph)
with open(os.path.join(output_dir, "model.onnx"), "wb") as f:
f.write(model.SerializeToString())
data_set = os.path.join(output_dir, "test_data_set_0")
prepare_dir(data_set)
for j, input_np in enumerate(inputs):
tensor = numpy_helper.from_array(input_np, model.graph.input[j].name)
with open(os.path.join(data_set, "input_{}.pb".format(j)), "wb") as f:
f.write(tensor.SerializeToString())
for j, output_np in enumerate(outputs):
tensor = numpy_helper.from_array(output_np, model.graph.output[j].name)
with open(os.path.join(data_set, "output_{}.pb".format(j)), "wb") as f:
f.write(tensor.SerializeToString())
def expect(
node, # type: onnx.NodeProto
inputs,
outputs,
name,
**kwargs,
): # type: (...) -> None
present_inputs = [x for x in node.input if (x != "")]
present_outputs = [x for x in node.output if (x != "")]
input_types = [None] * len(inputs)
if "input_types" in kwargs:
input_types = kwargs[str("input_types")]
del kwargs[str("input_types")]
output_types = [None] * len(outputs)
if "output_types" in kwargs:
output_types = kwargs[str("output_types")]
del kwargs[str("output_types")]
inputs_vi = [
_extract_value_info(arr, arr_name, input_type)
for arr, arr_name, input_type in zip(inputs, present_inputs, input_types)
]
outputs_vi = [
_extract_value_info(arr, arr_name, output_type)
for arr, arr_name, output_type in zip(outputs, present_outputs, output_types)
]
graph = onnx.helper.make_graph(nodes=[node], name=name, inputs=inputs_vi, outputs=outputs_vi)
generate_data(graph, inputs, outputs, name)
cwd = os.getcwd()
onnx_test_runner = os.path.join(cwd, "onnx_test_runner")
subprocess.run([onnx_test_runner, DATA_DIR + name], check=True, cwd=cwd)
| {
"content_hash": "dd52c2dc013bdf06eaa1cb342934bf0c",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 97,
"avg_line_length": 34.653333333333336,
"alnum_prop": 0.6344747979992305,
"repo_name": "microsoft/onnxruntime",
"id": "521938b14b65e9ee21390682649570b105917eda",
"size": "2783",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "onnxruntime/test/python/contrib_ops/onnx_contrib_ops_helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1763425"
},
{
"name": "Batchfile",
"bytes": "17040"
},
{
"name": "C",
"bytes": "955390"
},
{
"name": "C#",
"bytes": "2304597"
},
{
"name": "C++",
"bytes": "39435305"
},
{
"name": "CMake",
"bytes": "514764"
},
{
"name": "CSS",
"bytes": "138431"
},
{
"name": "Cuda",
"bytes": "1104338"
},
{
"name": "Dockerfile",
"bytes": "8089"
},
{
"name": "HLSL",
"bytes": "11234"
},
{
"name": "HTML",
"bytes": "5933"
},
{
"name": "Java",
"bytes": "418665"
},
{
"name": "JavaScript",
"bytes": "212575"
},
{
"name": "Jupyter Notebook",
"bytes": "218327"
},
{
"name": "Kotlin",
"bytes": "4653"
},
{
"name": "Liquid",
"bytes": "5457"
},
{
"name": "NASL",
"bytes": "2628"
},
{
"name": "Objective-C",
"bytes": "151027"
},
{
"name": "Objective-C++",
"bytes": "107084"
},
{
"name": "Pascal",
"bytes": "9597"
},
{
"name": "PowerShell",
"bytes": "16419"
},
{
"name": "Python",
"bytes": "5041661"
},
{
"name": "Roff",
"bytes": "27539"
},
{
"name": "Ruby",
"bytes": "3545"
},
{
"name": "Shell",
"bytes": "116513"
},
{
"name": "Swift",
"bytes": "115"
},
{
"name": "TypeScript",
"bytes": "973087"
}
],
"symlink_target": ""
} |
from flask import Flask
from flaskext.actions import Manager
from chili import app
manager = Manager(app, default_server_actions=True)
if __name__ == "__main__":
manager.run()
| {
"content_hash": "bffc235a680582f87ebbca6a15b749e8",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 51,
"avg_line_length": 20.333333333333332,
"alnum_prop": 0.7158469945355191,
"repo_name": "clvrobj/chili",
"id": "ad4e0138332955610bdb58ebdf2c17b5ff1cfa01",
"size": "232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3878"
},
{
"name": "HTML",
"bytes": "8106"
},
{
"name": "JavaScript",
"bytes": "251"
},
{
"name": "Python",
"bytes": "17878"
},
{
"name": "Shell",
"bytes": "284"
}
],
"symlink_target": ""
} |
import re
from enum import Enum
from functools import wraps
from .callback import callback, MSG_RECEIVED
def msg_received(func):
@callback(MSG_RECEIVED)
@wraps(func)
def _msg_received(*args, bot, msg, **kwargs):
return func(*args, bot=bot, msg=msg, **kwargs)
return _msg_received
def command(cmd, aliases=None):
def _command(func):
@msg_received
@wraps(func)
def _wrapper(*args, bot, msg, **kwargs):
if msg.text.startswith('{}{}'.format(bot.pfx, cmd)):
return func(*args, bot=bot, msg=msg, **kwargs)
return _wrapper
return _command
def expand(expr):
def _expand(func):
@msg_received
@wraps(func)
def _wrapper(*args, bot, msg, **kwargs):
match = re.search(expr, msg.text)
if match:
keyword_args = kwargs.copy()
keyword_args.update(match.groupdict())
return func(*args, bot=bot, msg=msg, **keyword_args)
return _wrapper
return _expand
| {
"content_hash": "ff7c47461be48ae87da340b9f2738e6e",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 68,
"avg_line_length": 30.705882352941178,
"alnum_prop": 0.5795019157088123,
"repo_name": "Thom7/telex",
"id": "4b59e9fbba53b85651842bcba3c8f7b76c9ef266",
"size": "1044",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "telex/callbacks/msgreceived.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "149117"
},
{
"name": "Shell",
"bytes": "1452"
}
],
"symlink_target": ""
} |
__author__ = 'study_sun'
from convenient import *
from url_manager import *
from downloader import *
from entity import *
| {
"content_hash": "7a0c9266881d5ff377d90cf7b21576ec",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 25,
"avg_line_length": 24.4,
"alnum_prop": 0.7377049180327869,
"repo_name": "s6530085/FundSpider",
"id": "7ab4122e196015706498dbef74a2be34fb4add7e",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spider_base/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "135052"
}
],
"symlink_target": ""
} |
'''
This script can be run with no arguments, in which case it will produce an
APK with native libraries for all four architectures: arm, arm64, x86, and
x64. You can instead list the architectures you want as arguments to this
script. For example:
python create_apk.py arm x86
The environment variables ANDROID_NDK and ANDROID_HOME must be set to the
locations of the Android NDK and SDK.
Additionally, `ninja` should be in your path.
It assumes that the source tree is in the desired state, e.g. by having
run 'python tools/git-sync-deps' in the root of the skia checkout.
We also assume that the 'resources' directory has been copied to
'platform_tools/android/apps/skqp/src/main/assets', and the
'tools/skqp/download_model' script has been run.
Also:
* If the environment variable SKQP_BUILD_DIR is set, many of the
intermediate build objects will be placed here.
* If the environment variable SKQP_OUTPUT_DIR is set, the final APK
will be placed in this directory.
* If the environment variable SKQP_DEBUG is set, Skia will be compiled
in debug mode.
'''
import os
import re
import subprocess
import sys
import shutil
import time
import skqp_gn_args
def print_cmd(cmd, o):
m = re.compile('[^A-Za-z0-9_./-]')
o.write('+ ')
for c in cmd:
if m.search(c) is not None:
o.write(repr(c) + ' ')
else:
o.write(c + ' ')
o.write('\n')
o.flush()
def check_call(cmd, **kwargs):
print_cmd(cmd, sys.stdout)
return subprocess.check_call(cmd, **kwargs)
def find_name(searchpath, filename):
for dirpath, _, filenames in os.walk(searchpath):
if filename in filenames:
yield os.path.join(dirpath, filename)
def check_ninja():
with open(os.devnull, 'w') as devnull:
return subprocess.call(['ninja', '--version'],
stdout=devnull, stderr=devnull) == 0
def remove(p):
if not os.path.islink(p) and os.path.isdir(p):
shutil.rmtree(p)
elif os.path.lexists(p):
os.remove(p)
assert not os.path.exists(p)
def makedirs(dst):
if not os.path.exists(dst):
os.makedirs(dst)
class RemoveFiles(object):
def __init__(self, *args):
self.args = args
def __enter__(self):
pass
def __exit__(self, a, b, c):
for arg in self.args:
remove(arg)
class ChDir(object):
def __init__(self, d):
self.orig = os.getcwd()
os.chdir(d)
def __enter__(self):
pass
def __exit__(self, a, b, c):
os.chdir(self.orig)
def make_symlinked_subdir(target, working_dir):
newdir = os.path.join(working_dir, os.path.basename(target))
makedirs(newdir)
os.symlink(os.path.relpath(newdir, os.path.dirname(target)), target)
def accept_android_license(android_home):
proc = subprocess.Popen(
[android_home + '/tools/bin/sdkmanager', '--licenses'],
stdin=subprocess.PIPE)
while proc.poll() is None:
proc.stdin.write('y\n')
time.sleep(1)
# pylint: disable=bad-whitespace
skia_to_android_arch_name_map = {'arm' : 'armeabi-v7a',
'arm64': 'arm64-v8a' ,
'x86' : 'x86' ,
'x64' : 'x86_64' }
def create_apk_impl(opts):
build_dir, final_output_dir = opts.build_dir, opts.final_output_dir
assert os.path.exists('bin/gn') # Did you `tools/git-syc-deps`?
for d in [build_dir, final_output_dir]:
makedirs(d)
apps_dir = 'platform_tools/android/apps'
app = 'skqp'
lib = 'lib%s_app.so' % app
# These are the locations in the tree where the gradle needs or will create
# not-checked-in files. Treat them specially to keep the tree clean.
remove(build_dir + '/libs')
build_paths = [apps_dir + '/.gradle',
apps_dir + '/' + app + '/build',
apps_dir + '/' + app + '/src/main/libs']
for path in build_paths:
remove(path)
try:
make_symlinked_subdir(path, build_dir)
except OSError:
sys.stderr.write('failed to create symlink "%s"\n' % path)
lib_dir = '%s/%s/src/main/libs' % (apps_dir, app)
apk_build_dir = '%s/%s/build/outputs/apk' % (apps_dir, app)
for d in [lib_dir, apk_build_dir]:
shutil.rmtree(d, True) # force rebuild
with RemoveFiles(*build_paths):
for arch in opts.architectures:
build = os.path.join(build_dir, arch)
gn_args = opts.gn_args(arch)
args = ' '.join('%s=%s' % (k, v) for k, v in gn_args.items())
check_call(['bin/gn', 'gen', build, '--args=' + args])
try:
check_call(['ninja', '-C', build, lib])
except subprocess.CalledProcessError:
check_call(['ninja', '-C', build, '-t', 'clean'])
check_call(['ninja', '-C', build, lib])
dst = '%s/%s' % (lib_dir, skia_to_android_arch_name_map[arch])
makedirs(dst)
shutil.copy(os.path.join(build, lib), dst)
accept_android_license(opts.android_home)
env_copy = os.environ.copy()
env_copy['ANDROID_HOME'] = opts.android_home
env_copy['ANDROID_NDK_HOME'] = opts.android_ndk
# Why does gradlew need to be called from this directory?
check_call(['apps/gradlew', '-p' 'apps/' + app,
'-P', 'suppressNativeBuild',
':%s:assembleUniversalDebug' % app],
env=env_copy, cwd='platform_tools/android')
apk_name = app + "-universal-debug.apk"
apk_list = list(find_name(apk_build_dir, apk_name))
assert len(apk_list) == 1
out = os.path.join(final_output_dir, apk_name)
shutil.move(apk_list[0], out)
sys.stdout.write(out + '\n')
arches = '_'.join(sorted(opts.architectures))
copy = os.path.join(final_output_dir, "%s-%s-debug.apk" % (app, arches))
shutil.copyfile(out, copy)
sys.stdout.write(copy + '\n')
sys.stdout.write('* * * COMPLETE * * *\n\n')
def create_apk(opts):
skia_dir = os.path.abspath(os.path.dirname(__file__) + '/../..')
assert os.path.exists(skia_dir)
with ChDir(skia_dir):
create_apk_impl(opts)
class SkQP_Build_Options(object):
def __init__(self):
assert '/' in [os.sep, os.altsep] # 'a/b' over os.path.join('a', 'b')
self.error = ''
if not check_ninja():
self.error += '`ninja` is not in the path.\n'
for var in ['ANDROID_NDK', 'ANDROID_HOME']:
if not os.path.exists(os.environ.get(var, '')):
self.error += 'Environment variable `%s` is not set.\n' % var
self.android_ndk = os.path.abspath(os.environ['ANDROID_NDK'])
self.android_home = os.path.abspath(os.environ['ANDROID_HOME'])
args = sys.argv[1:]
for arg in args:
if arg not in skia_to_android_arch_name_map:
self.error += ('Argument %r is not in %r\n' %
(arg, skia_to_android_arch_name_map.keys()))
self.architectures = args if args else skia_to_android_arch_name_map.keys()
default_build = os.path.dirname(__file__) + '/../../out/skqp'
self.build_dir = os.path.abspath(os.environ.get('SKQP_BUILD_DIR', default_build))
self.final_output_dir = os.path.abspath(os.environ.get('SKQP_OUTPUT_DIR', default_build))
self.debug = bool(os.environ.get('SKQP_DEBUG', ''))
def gn_args(self, arch):
return skqp_gn_args.GetGNArgs(arch, self.android_ndk, self.debug, 26)
def write(self, o):
for k, v in [('ANDROID_NDK', self.android_ndk),
('ANDROID_HOME', self.android_home),
('SKQP_OUTPUT_DIR', self.final_output_dir),
('SKQP_BUILD_DIR', self.build_dir),
('SKQP_DEBUG', self.debug),
('Architectures', self.architectures)]:
o.write('%s = %r\n' % (k, v))
o.flush()
def main():
options = SkQP_Build_Options()
if options.error:
sys.stderr.write(options.error + __doc__)
sys.exit(1)
options.write(sys.stdout)
create_apk(options)
if __name__ == '__main__':
main()
| {
"content_hash": "2418f551d3831e17b71f6c0785e806df",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 97,
"avg_line_length": 35.414529914529915,
"alnum_prop": 0.579461807650537,
"repo_name": "HalCanary/skia-hc",
"id": "27bff792fbae3183f27b6a3f95ce7646da73a97a",
"size": "8442",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/skqp/create_apk.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1277297"
},
{
"name": "Batchfile",
"bytes": "865"
},
{
"name": "C",
"bytes": "505166"
},
{
"name": "C#",
"bytes": "4683"
},
{
"name": "C++",
"bytes": "32234337"
},
{
"name": "CMake",
"bytes": "2850"
},
{
"name": "CSS",
"bytes": "3078"
},
{
"name": "Dockerfile",
"bytes": "14764"
},
{
"name": "GLSL",
"bytes": "109164"
},
{
"name": "Go",
"bytes": "135327"
},
{
"name": "HTML",
"bytes": "1321397"
},
{
"name": "Java",
"bytes": "167849"
},
{
"name": "JavaScript",
"bytes": "463920"
},
{
"name": "Lex",
"bytes": "2521"
},
{
"name": "Lua",
"bytes": "70982"
},
{
"name": "Makefile",
"bytes": "13502"
},
{
"name": "Objective-C",
"bytes": "83351"
},
{
"name": "Objective-C++",
"bytes": "366996"
},
{
"name": "PHP",
"bytes": "139510"
},
{
"name": "PowerShell",
"bytes": "1432"
},
{
"name": "Python",
"bytes": "1055437"
},
{
"name": "Shell",
"bytes": "95010"
}
],
"symlink_target": ""
} |
import sys
if sys.version_info < (3,):
def u_encode(string):
return string.encode('utf8')
def u_decode(string):
return string.decode('utf8')
else:
def u_encode(string):
return string
def u_decode(string):
return string
| {
"content_hash": "8d2aa3e6d21dc4d0f8aa53664aafcaf7",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 30,
"avg_line_length": 21,
"alnum_prop": 0.7012987012987013,
"repo_name": "lucianp/dotfiles",
"id": "50ba796317b5c41cc605136de5300f423569950f",
"size": "231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "link/.vim/ftplugin/orgmode/py3compat/encode_compatibility.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12728"
},
{
"name": "Erlang",
"bytes": "1972"
},
{
"name": "HTML",
"bytes": "16488"
},
{
"name": "JavaScript",
"bytes": "1064"
},
{
"name": "Makefile",
"bytes": "208"
},
{
"name": "Python",
"bytes": "232360"
},
{
"name": "Ruby",
"bytes": "248"
},
{
"name": "Shell",
"bytes": "8489"
},
{
"name": "Vim script",
"bytes": "1616576"
}
],
"symlink_target": ""
} |
"""
Display bitcoin prices using bitcoincharts.com.
Configuration parameters:
- cache_timeout: Should be at least 15 min according to bitcoincharts.
- color_index : Index of the market responsible for coloration,
meaning that the output is going to be green if the
price went up and red if it went down.
default: -1 means no coloration,
except when only one market is selected
- field : Field that is displayed per market,
see http://bitcoincharts.com/about/markets-api/
- hide_on_error: Display empty response if True, else an error message
- markets : Comma-separated list of markets. Supported markets can
be found at http://bitcoincharts.com/markets/list/
- symbols : Try to match currency abbreviations to symbols,
e.g. USD -> $, EUR -> € and so on
@author Andre Doser <doser.andre AT gmail.com>
"""
import json
from time import time
try:
# python 3
from urllib.error import URLError
from urllib.request import urlopen
except ImportError:
# python 2
from urllib2 import URLError
from urllib2 import urlopen
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 900
color_index = -1
field = 'close'
hide_on_error = False
markets = 'btceUSD, btcdeEUR'
symbols = True
def __init__(self):
"""
Initialize last_price, set the currency mapping
and the url containing the data.
"""
self.currency_map = {
'AUD': '$',
'CNY': '¥',
'EUR': '€',
'GBP': '£',
'USD': '$',
'YEN': '¥'
}
self.last_price = 0
self.url = 'http://api.bitcoincharts.com/v1/markets.json'
def _get_price(self, data, market, field):
"""
Given the data (in json format), returns the
field for a given market.
"""
for m in data:
if m['symbol'] == market:
return m[field]
def get_rate(self, i3s_output_list, i3s_config):
response = {
'cached_until': time() + self.cache_timeout,
'full_text': ''
}
# get the data from the bitcoincharts website
try:
data = json.loads(urlopen(self.url).read().decode())
except URLError:
if not self.hide_on_error:
response['color'] = i3s_config['color_bad']
response['full_text'] = 'Bitcoincharts unreachable'
return response
# get the rate for each market given
rates, markets = [], self.markets.split(',')
color_rate = None
for i, market in enumerate(markets):
market = market.strip()
try:
rate = self._get_price(data, market, self.field)
# coloration
if i == self.color_index or len(markets) == 1:
color_rate = rate
except KeyError:
continue
# market name
out = market[:-3] if rate else market
out += ': '
# rate
out += 'N/A' if not rate else '{:.2f}'.format(rate)
currency_sym = self.currency_map.get(market[-3:], market[-3:])
out += currency_sym if self.symbols else market
rates.append(out)
# only colorize if an index is given or
# if only one market is selected
if len(rates) == 1 or self.color_index > -1:
if self.last_price == 0:
pass
elif color_rate < self.last_price:
response['color'] = i3s_config['color_bad']
elif color_rate > self.last_price:
response['color'] = i3s_config['color_good']
self.last_price = color_rate
response['full_text'] = ', '.join(rates)
return response
if __name__ == '__main__':
"""
Test this module by calling it directly.
"""
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_bad': '#FF0000',
}
while True:
print(x.get_rate([], config))
sleep(5)
| {
"content_hash": "1223caa84a0c55206ca3fb28ff387b7d",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 75,
"avg_line_length": 32.5,
"alnum_prop": 0.534965034965035,
"repo_name": "sethwoodworth/py3status",
"id": "f530d2716501696746615906d55a6c0f275f3b26",
"size": "4321",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "py3status/modules/bitcoin_price.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "182160"
}
],
"symlink_target": ""
} |
from haystack import indexes
from oscar.core.loading import get_class, get_model
# Load default strategy (without a user/request)
is_solr_supported = get_class('search.features', 'is_solr_supported')
Selector = get_class('partner.strategy', 'Selector')
strategy = Selector().strategy()
class ProductIndex(indexes.SearchIndex, indexes.Indexable):
# Search text
text = indexes.EdgeNgramField(
document=True, use_template=True,
template_name='search/indexes/product/item_text.txt')
upc = indexes.CharField(model_attr="upc", null=True)
title = indexes.EdgeNgramField(model_attr='title', null=True)
# Fields for faceting
product_class = indexes.CharField(null=True, faceted=True)
category = indexes.MultiValueField(null=True, faceted=True)
price = indexes.DecimalField(null=True, faceted=True)
num_in_stock = indexes.IntegerField(null=True, faceted=True)
rating = indexes.IntegerField(null=True, faceted=True)
# Spelling suggestions
suggestions = indexes.FacetCharField()
date_created = indexes.DateTimeField(model_attr='date_created')
date_updated = indexes.DateTimeField(model_attr='date_updated')
def get_model(self):
return get_model('catalogue', 'Product')
def index_queryset(self, using=None):
# Only index browsable products (not each individual child product)
return self.get_model().browsable.order_by('-date_updated')
def read_queryset(self, using=None):
return self.get_model().browsable.base_queryset()
def prepare_product_class(self, obj):
return obj.get_product_class().name
def prepare_category(self, obj):
categories = obj.categories.all()
if len(categories) > 0:
return [category.full_name for category in categories]
def prepare_rating(self, obj):
if obj.rating is not None:
return int(obj.rating)
# Pricing and stock is tricky as it can vary per customer. However, the
# most common case is for customers to see the same prices and stock levels
# and so we implement that case here.
def prepare_price(self, obj):
result = None
if obj.is_parent:
result = strategy.fetch_for_parent(obj)
elif obj.has_stockrecords:
result = strategy.fetch_for_product(obj)
if result:
if result.price.is_tax_known:
return result.price.incl_tax
return result.price.excl_tax
def prepare_num_in_stock(self, obj):
if obj.is_parent:
# Don't return a stock level for parent products
return None
elif obj.has_stockrecords:
result = strategy.fetch_for_product(obj)
return result.stockrecord.net_stock_level
def prepare(self, obj):
prepared_data = super(ProductIndex, self).prepare(obj)
# We use Haystack's dynamic fields to ensure that the title field used
# for sorting is of type "string'.
if is_solr_supported():
prepared_data['title_s'] = prepared_data['title']
# Use title to for spelling suggestions
prepared_data['suggestions'] = prepared_data['text']
return prepared_data
def get_updated_field(self):
"""
Used to specify the field used to determine if an object has been
updated
Can be used to filter the query set when updating the index
"""
return 'date_updated'
| {
"content_hash": "289b06eefa9fb2e874b7f9f9797e1867",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 79,
"avg_line_length": 35.06060606060606,
"alnum_prop": 0.6606165370210314,
"repo_name": "okfish/django-oscar",
"id": "db6bd1aa1cfd44d68e79a8520bcc54450bb178dd",
"size": "3471",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/oscar/apps/search/search_indexes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "542048"
},
{
"name": "HTML",
"bytes": "498872"
},
{
"name": "JavaScript",
"bytes": "423552"
},
{
"name": "Makefile",
"bytes": "2653"
},
{
"name": "Python",
"bytes": "1738863"
},
{
"name": "Shell",
"bytes": "2751"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.extensions_v1beta1_deployment_rollback import ExtensionsV1beta1DeploymentRollback
class TestExtensionsV1beta1DeploymentRollback(unittest.TestCase):
""" ExtensionsV1beta1DeploymentRollback unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testExtensionsV1beta1DeploymentRollback(self):
"""
Test ExtensionsV1beta1DeploymentRollback
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.extensions_v1beta1_deployment_rollback.ExtensionsV1beta1DeploymentRollback()
pass
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "56e5538607489e71ee60dc2204dee5c7",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 118,
"avg_line_length": 26.142857142857142,
"alnum_prop": 0.7340619307832422,
"repo_name": "mbohlool/client-python",
"id": "8c6f51f7961c54cdb73b7183eba7da81a575f2bb",
"size": "1115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/test/test_extensions_v1beta1_deployment_rollback.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
} |
from datetime import datetime
import calendar
from direct.gui.DirectGui import DirectFrame, DirectLabel
from toontown.toonbase import TTLocalizer
from direct.showbase import PythonUtil
from direct.fsm.FSM import FSM
from toontown.parties import PartyGlobals
from toontown.parties import PartyUtils
from toontown.toonbase.ToontownGlobals import VALENTINES_DAY
class InviteVisual(DirectFrame):
notify = directNotify.newCategory('InviteVisual')
def __init__(self, parent):
DirectFrame.__init__(self, parent=parent)
self.gui = loader.loadModel('phase_5.5/models/parties/partyInviteGUI')
self.inviteThemesIdToInfo = {PartyGlobals.InviteTheme.Birthday: (self.gui.find('**/birthdayPage'), TTLocalizer.PartyPlannerBirthdayTheme, (0.0,
0.0,
0.0,
1.0)),
PartyGlobals.InviteTheme.GenericMale: (self.gui.find('**/genericMalePage'), TTLocalizer.PartyPlannerGenericMaleTheme, (0.7,
0.7,
0.0,
1.0)),
PartyGlobals.InviteTheme.GenericFemale: (self.gui.find('**/genericFemalePage'), TTLocalizer.PartyPlannerGenericFemaleTheme, (0.0,
1.0,
0.5,
1.0)),
PartyGlobals.InviteTheme.Racing: (self.gui.find('**/racingPage'), TTLocalizer.PartyPlannerRacingTheme, (0.0,
0.0,
0.0,
1.0)),
PartyGlobals.InviteTheme.Valentoons: (self.gui.find('**/valentinePage1'), TTLocalizer.PartyPlannerValentoonsTheme, (0.0,
0.0,
0.0,
1.0)),
PartyGlobals.InviteTheme.VictoryParty: (self.gui.find('**/victoryPartyPage'), TTLocalizer.PartyPlannerVictoryPartyTheme, (0.0,
0.0,
0.0,
1.0)),
PartyGlobals.InviteTheme.Winter: (self.gui.find('**/winterPartyPage1'), TTLocalizer.PartyPlannerWinterPartyTheme, (1.0,
1.0,
1.0,
1.0))}
self.inviteThemeBackground = DirectFrame(parent=self, image=self.inviteThemesIdToInfo[0][0], relief=None)
self.whosePartyLabel = DirectLabel(parent=self, relief=None, pos=self.gui.find('**/who_locator').getPos(), text='.', text_scale=0.067, textMayChange=True)
self.activityTextLabel = DirectLabel(parent=self, relief=None, text='.\n.\n.\n.', pos=self.gui.find('**/what_locator').getPos(), text_scale=TTLocalizer.IVactivityTextLabel, textMayChange=True)
self.whenTextLabel = DirectLabel(parent=self, relief=None, text='.\n.\n.', pos=self.gui.find('**/when_locator').getPos(), text_scale=TTLocalizer.IVwhenTextLabel, textMayChange=True)
self.noFriends = False
return None
def setNoFriends(self, noFriends):
self.noFriends = noFriends
self.inviteThemeBackground.show()
def updateInvitation(self, hostsName, partyInfo):
self.partyInfo = partyInfo
hostsName = TTLocalizer.GetPossesive(hostsName, 'party')
self.whosePartyLabel['text'] = TTLocalizer.PartyPlannerInvitationWhoseSentence % hostsName
if self.partyInfo.isPrivate:
publicPrivateText = TTLocalizer.PartyPlannerPrivate.lower()
else:
publicPrivateText = TTLocalizer.PartyPlannerPublic.lower()
activities = self.getActivitiesFormattedCorrectly()
if self.noFriends:
self.activityTextLabel['text'] = TTLocalizer.PartyPlannerInvitationThemeWhatSentenceNoFriends % (publicPrivateText, activities)
else:
self.activityTextLabel['text'] = TTLocalizer.PartyPlannerInvitationThemeWhatSentence % (publicPrivateText, activities)
if self.noFriends:
self.whenTextLabel['text'] = TTLocalizer.PartyPlannerInvitationWhenSentenceNoFriends % (PartyUtils.formatDate(self.partyInfo.startTime.year, self.partyInfo.startTime.month, self.partyInfo.startTime.day), PartyUtils.formatTime(self.partyInfo.startTime.hour, self.partyInfo.startTime.minute))
else:
self.whenTextLabel['text'] = TTLocalizer.PartyPlannerInvitationWhenSentence % (PartyUtils.formatDate(self.partyInfo.startTime.year, self.partyInfo.startTime.month, self.partyInfo.startTime.day), PartyUtils.formatTime(self.partyInfo.startTime.hour, self.partyInfo.startTime.minute))
self.changeTheme(partyInfo.inviteTheme)
def getActivitiesFormattedCorrectly(self):
activitiesString = ''
activityList = []
for activity in self.partyInfo.activityList:
text = TTLocalizer.PartyActivityNameDict[activity.activityId]['invite']
if text not in activityList:
activityList.append(text)
if len(activityList) == 1:
return '\n' + TTLocalizer.PartyPlannerInvitationThemeWhatActivitiesBeginning + activityList[0]
conjunction = TTLocalizer.PartyActivityConjunction
for activity in activityList:
activitiesString = '%s, %s' % (activitiesString, activity)
activitiesString = activitiesString[2:]
activitiesString = activitiesString[:activitiesString.rfind(',')] + conjunction + activitiesString[activitiesString.rfind(',') + 1:]
activitiesString = TTLocalizer.PartyPlannerInvitationThemeWhatActivitiesBeginning + activitiesString
return self.insertCarriageReturn(activitiesString)
def insertCarriageReturn(self, stringLeft, stringDone = ''):
desiredNumberOfCharactersInLine = 42
if len(stringLeft) < desiredNumberOfCharactersInLine:
return stringDone + '\n' + stringLeft
for i in range(desiredNumberOfCharactersInLine - 6, len(stringLeft)):
if stringLeft[i] == ' ':
return self.insertCarriageReturn(stringLeft[i:], stringDone + '\n' + stringLeft[:i])
return stringDone + '\n' + stringLeft
def changeTheme(self, newTheme):
self.inviteThemeBackground['image'] = self.inviteThemesIdToInfo[newTheme][0]
self.whosePartyLabel['text_fg'] = self.inviteThemesIdToInfo[newTheme][2]
self.activityTextLabel['text_fg'] = self.inviteThemesIdToInfo[newTheme][2]
self.whenTextLabel['text_fg'] = self.inviteThemesIdToInfo[newTheme][2]
def close(self):
self.destroy()
del self
| {
"content_hash": "bcd8471c444fbf72bf62e9b4bc696db5",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 302,
"avg_line_length": 61.69642857142857,
"alnum_prop": 0.6166425470332851,
"repo_name": "silly-wacky-3-town-toon/SOURCE-COD",
"id": "a54ecfc89fe1cc06842968a4f6d4c252cb89ae8d",
"size": "6910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toontown/parties/InviteVisual.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "10249"
},
{
"name": "C",
"bytes": "1752256"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "5485400"
},
{
"name": "Emacs Lisp",
"bytes": "210083"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "NSIS",
"bytes": "1009050"
},
{
"name": "Objective-C",
"bytes": "21821"
},
{
"name": "PLSQL",
"bytes": "10200"
},
{
"name": "Pascal",
"bytes": "4986"
},
{
"name": "Perl6",
"bytes": "30612"
},
{
"name": "Puppet",
"bytes": "259"
},
{
"name": "Python",
"bytes": "33566014"
},
{
"name": "Shell",
"bytes": "14642"
},
{
"name": "Tcl",
"bytes": "2084458"
}
],
"symlink_target": ""
} |
"""Auto-generated file, do not edit by hand. GW metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_GW = PhoneMetadata(id='GW', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d\\d', possible_length=(3,)),
toll_free=PhoneNumberDesc(national_number_pattern='11[378]', example_number='113', possible_length=(3,)),
emergency=PhoneNumberDesc(national_number_pattern='11[378]', example_number='113', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='11[378]', example_number='113', possible_length=(3,)),
short_data=True)
| {
"content_hash": "63ac73ab803b6d77038698ec0f5cfc39",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 110,
"avg_line_length": 74,
"alnum_prop": 0.7432432432432432,
"repo_name": "daviddrysdale/python-phonenumbers",
"id": "2da4557d349dee7693eecea80e1a50d8c9c7ce11",
"size": "666",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "python/phonenumbers/shortdata/region_GW.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3898"
},
{
"name": "Makefile",
"bytes": "9034"
},
{
"name": "Python",
"bytes": "22052087"
},
{
"name": "Ruby",
"bytes": "237"
}
],
"symlink_target": ""
} |
"""Test code for vision package"""
from __future__ import print_function
import numpy as np
import tvm
import topi
import topi.testing
def test_argsort():
dshape = (20, 100)
data = tvm.placeholder(dshape, name="data", dtype="float32")
np_data = np.random.rand(dshape[0], dshape[1]).astype(data.dtype)
np_result = np.argsort(-np_data)
def check_device(device):
ctx = tvm.context(device, 0)
if not ctx.exist:
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
with tvm.target.create(device):
out = topi.argsort(data, axis=-1, is_ascend=False)
s = topi.generic.schedule_argsort(out)
tvm_data = tvm.nd.array(np_data, ctx)
tvm_out = tvm.nd.array(np.zeros(dshape, dtype="float32"), ctx)
f = tvm.build(s, [data, out], device)
f(tvm_data, tvm_out)
tvm.testing.assert_allclose(tvm_out.asnumpy(), np_result.astype("float32"), rtol=1e0)
for device in ['llvm', 'cuda', 'opencl']:
check_device(device)
def verify_topk(k, axis, ret_type, is_ascend, dtype):
shape = (20, 100)
data_dtype = "float32"
data = tvm.placeholder(shape, name="data", dtype=data_dtype)
np_data = np.random.uniform(size=shape).astype(data_dtype)
if is_ascend:
np_indices = np.argsort(np_data, axis=axis)
else:
np_indices = np.argsort(-np_data, axis=axis)
kk = k if k >= 1 else shape[axis]
if axis == 0:
np_indices = np_indices[:kk, :]
np_values = np.zeros(np_indices.shape).astype(data_dtype)
for i in range(shape[1]):
np_values[:, i] = np_data[np_indices[:, i], i]
else:
np_indices = np_indices[:, :kk]
np_values = np.zeros(np_indices.shape).astype(data_dtype)
for i in range(shape[0]):
np_values[i, :] = np_data[i, np_indices[i, :]]
np_indices = np_indices.astype(dtype)
def check_device(device):
ctx = tvm.context(device, 0)
if not ctx.exist:
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
with tvm.target.create(device):
outs = topi.topk(data, k, axis, ret_type, is_ascend, dtype)
outs = outs if isinstance(outs, list) else [outs]
s = topi.generic.schedule_topk(outs)
tvm_data = tvm.nd.array(np_data, ctx)
tvm_res = []
for t in outs:
tvm_res.append(tvm.nd.empty(t.shape, dtype=t.dtype, ctx=ctx))
f = tvm.build(s, [data] + outs, device)
f(tvm_data, *tvm_res)
if ret_type == "both":
tvm.testing.assert_allclose(tvm_res[0].asnumpy(), np_values)
tvm.testing.assert_allclose(tvm_res[1].asnumpy(), np_indices)
elif ret_type == "values":
tvm.testing.assert_allclose(tvm_res[0].asnumpy(), np_values)
else:
tvm.testing.assert_allclose(tvm_res[0].asnumpy(), np_indices)
for device in ['llvm', 'cuda', 'opencl']:
check_device(device)
def test_topk():
np.random.seed(0)
for k in [0, 1, 5]:
for axis in [0, -1, 1]:
for ret_type in ["both", "values", "indices"]:
verify_topk(k, axis, ret_type, True, "int64")
verify_topk(k, axis, ret_type, False, "float32")
if __name__ == "__main__":
test_argsort()
test_topk()
| {
"content_hash": "cedf3da1c210328a1e27a52d2ceb37eb",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 93,
"avg_line_length": 37.03225806451613,
"alnum_prop": 0.5752032520325203,
"repo_name": "mlperf/training_results_v0.7",
"id": "c084a7c431b638ccece63b85a4b527ad3c946482",
"size": "4229",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/3rdparty/tvm/topi/tests/python/test_topi_sort.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Awk",
"bytes": "14530"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "172914"
},
{
"name": "C++",
"bytes": "13037795"
},
{
"name": "CMake",
"bytes": "113458"
},
{
"name": "CSS",
"bytes": "70255"
},
{
"name": "Clojure",
"bytes": "622652"
},
{
"name": "Cuda",
"bytes": "1974745"
},
{
"name": "Dockerfile",
"bytes": "149523"
},
{
"name": "Groovy",
"bytes": "160449"
},
{
"name": "HTML",
"bytes": "171537"
},
{
"name": "Java",
"bytes": "189275"
},
{
"name": "JavaScript",
"bytes": "98224"
},
{
"name": "Julia",
"bytes": "430755"
},
{
"name": "Jupyter Notebook",
"bytes": "11091342"
},
{
"name": "Lua",
"bytes": "17720"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "215967"
},
{
"name": "Perl",
"bytes": "1551186"
},
{
"name": "PowerShell",
"bytes": "13906"
},
{
"name": "Python",
"bytes": "36943114"
},
{
"name": "R",
"bytes": "134921"
},
{
"name": "Raku",
"bytes": "7280"
},
{
"name": "Ruby",
"bytes": "4930"
},
{
"name": "SWIG",
"bytes": "140111"
},
{
"name": "Scala",
"bytes": "1304960"
},
{
"name": "Shell",
"bytes": "1312832"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "Starlark",
"bytes": "69877"
},
{
"name": "TypeScript",
"bytes": "243012"
}
],
"symlink_target": ""
} |
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.iotcloud.v20210408 import models
class IotcloudClient(AbstractClient):
_apiVersion = '2021-04-08'
_endpoint = 'iotcloud.tencentcloudapi.com'
_service = 'iotcloud'
def BatchUpdateFirmware(self, request):
"""本接口(BatchUpdateFirmware)用于批量更新设备固件
:param request: Request instance for BatchUpdateFirmware.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.BatchUpdateFirmwareRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.BatchUpdateFirmwareResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("BatchUpdateFirmware", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BatchUpdateFirmwareResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def BindDevices(self, request):
"""本接口(BindDevices)用于网关设备批量绑定子设备
:param request: Request instance for BindDevices.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.BindDevicesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.BindDevicesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("BindDevices", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.BindDevicesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CancelDeviceFirmwareTask(self, request):
"""取消设备升级任务
:param request: Request instance for CancelDeviceFirmwareTask.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CancelDeviceFirmwareTaskRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CancelDeviceFirmwareTaskResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CancelDeviceFirmwareTask", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CancelDeviceFirmwareTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateDevice(self, request):
"""本接口(CreateDevice)用于新建一个物联网通信设备。
:param request: Request instance for CreateDevice.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreateDeviceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreateDeviceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreateDevice", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateDeviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateMultiDevicesTask(self, request):
"""本接口(CreateMultiDevicesTask)用于创建产品级别的批量创建设备任务
:param request: Request instance for CreateMultiDevicesTask.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreateMultiDevicesTaskRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreateMultiDevicesTaskResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreateMultiDevicesTask", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateMultiDevicesTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreatePrivateCA(self, request):
"""创建私有CA证书
:param request: Request instance for CreatePrivateCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreatePrivateCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreatePrivateCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreatePrivateCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreatePrivateCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateProduct(self, request):
"""本接口(CreateProduct)用于创建一个新的物联网通信产品
:param request: Request instance for CreateProduct.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreateProductRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreateProductResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreateProduct", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateProductResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateTaskFileUrl(self, request):
"""本接口(CreateTaskFileUrl)用于获取产品级任务文件上传链接
:param request: Request instance for CreateTaskFileUrl.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreateTaskFileUrlRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreateTaskFileUrlResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreateTaskFileUrl", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateTaskFileUrlResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateTopicPolicy(self, request):
"""本接口(CreateTopicPolicy)用于创建一个Topic
:param request: Request instance for CreateTopicPolicy.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreateTopicPolicyRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreateTopicPolicyResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreateTopicPolicy", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateTopicPolicyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateTopicRule(self, request):
"""本接口(CreateTopicRule)用于创建一个规则
:param request: Request instance for CreateTopicRule.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.CreateTopicRuleRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.CreateTopicRuleResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("CreateTopicRule", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateTopicRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteDevice(self, request):
"""本接口(DeleteDevice)用于删除物联网通信设备。
:param request: Request instance for DeleteDevice.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeleteDeviceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeleteDeviceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeleteDevice", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteDeviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteDeviceResource(self, request):
"""本接口(DeleteDeviceResource)用于删除设备资源
:param request: Request instance for DeleteDeviceResource.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeleteDeviceResourceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeleteDeviceResourceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeleteDeviceResource", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteDeviceResourceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteDeviceShadow(self, request):
"""本接口(DeleteDeviceShadow)用于删除设备影子
:param request: Request instance for DeleteDeviceShadow.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeleteDeviceShadowRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeleteDeviceShadowResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeleteDeviceShadow", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteDeviceShadowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeletePrivateCA(self, request):
"""删除私有CA证书
:param request: Request instance for DeletePrivateCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeletePrivateCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeletePrivateCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeletePrivateCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeletePrivateCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteProduct(self, request):
"""本接口(DeleteProduct)用于删除一个物联网通信产品
:param request: Request instance for DeleteProduct.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeleteProductRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeleteProductResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeleteProduct", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteProductResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteProductPrivateCA(self, request):
"""删除产品的私有CA证书
:param request: Request instance for DeleteProductPrivateCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeleteProductPrivateCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeleteProductPrivateCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeleteProductPrivateCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteProductPrivateCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteTopicRule(self, request):
"""本接口(DeleteTopicRule)用于删除规则
:param request: Request instance for DeleteTopicRule.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DeleteTopicRuleRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DeleteTopicRuleResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DeleteTopicRule", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteTopicRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDevice(self, request):
"""本接口(DescribeDevice)用于查看设备信息
:param request: Request instance for DescribeDevice.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeDevice", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDeviceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDeviceClientKey(self, request):
"""获取证书认证类型设备的私钥,刚生成或者重置设备后仅可调用一次
:param request: Request instance for DescribeDeviceClientKey.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceClientKeyRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceClientKeyResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeDeviceClientKey", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDeviceClientKeyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDeviceResource(self, request):
"""本接口(DescribeDeviceResource)用于查询设备资源详情。
:param request: Request instance for DescribeDeviceResource.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceResourceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceResourceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeDeviceResource", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDeviceResourceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDeviceResources(self, request):
"""本接口(DescribeDeviceResources)用于查询设备资源列表。
:param request: Request instance for DescribeDeviceResources.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceResourcesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceResourcesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeDeviceResources", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDeviceResourcesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDeviceShadow(self, request):
"""本接口(DescribeDeviceShadow)用于查询虚拟设备信息。
:param request: Request instance for DescribeDeviceShadow.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceShadowRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDeviceShadowResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeDeviceShadow", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDeviceShadowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeDevices(self, request):
"""本接口(DescribeDevices)用于查询物联网通信设备的设备列表。
:param request: Request instance for DescribeDevices.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDevicesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeDevicesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeDevices", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeDevicesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFirmware(self, request):
"""查询固件信息
:param request: Request instance for DescribeFirmware.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeFirmware", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFirmwareResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFirmwareTask(self, request):
"""查询固件升级任务详情
:param request: Request instance for DescribeFirmwareTask.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeFirmwareTask", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFirmwareTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFirmwareTaskDevices(self, request):
"""查询固件升级任务的设备列表
:param request: Request instance for DescribeFirmwareTaskDevices.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskDevicesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskDevicesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeFirmwareTaskDevices", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFirmwareTaskDevicesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFirmwareTaskDistribution(self, request):
"""查询固件升级任务状态分布
:param request: Request instance for DescribeFirmwareTaskDistribution.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskDistributionRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskDistributionResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeFirmwareTaskDistribution", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFirmwareTaskDistributionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFirmwareTaskStatistics(self, request):
"""查询固件升级任务统计信息
:param request: Request instance for DescribeFirmwareTaskStatistics.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskStatisticsRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTaskStatisticsResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeFirmwareTaskStatistics", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFirmwareTaskStatisticsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeFirmwareTasks(self, request):
"""查询固件升级任务列表
:param request: Request instance for DescribeFirmwareTasks.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTasksRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeFirmwareTasksResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeFirmwareTasks", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeFirmwareTasksResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeGatewayBindDevices(self, request):
"""本接口(DescribeGatewayBindDevices)用于获取网关绑定的子设备列表
:param request: Request instance for DescribeGatewayBindDevices.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeGatewayBindDevicesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeGatewayBindDevicesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeGatewayBindDevices", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeGatewayBindDevicesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePrivateCA(self, request):
"""查询私有化CA信息
:param request: Request instance for DescribePrivateCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribePrivateCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribePrivateCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribePrivateCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePrivateCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePrivateCABindedProducts(self, request):
"""查询私有CA绑定的产品列表
:param request: Request instance for DescribePrivateCABindedProducts.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribePrivateCABindedProductsRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribePrivateCABindedProductsResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribePrivateCABindedProducts", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePrivateCABindedProductsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePrivateCAs(self, request):
"""查询私有CA证书列表
:param request: Request instance for DescribePrivateCAs.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribePrivateCAsRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribePrivateCAsResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribePrivateCAs", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePrivateCAsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProduct(self, request):
"""本接口(DescribeProduct)用于查看产品详情
:param request: Request instance for DescribeProduct.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProduct", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProductCA(self, request):
"""查询产品绑定的CA证书
:param request: Request instance for DescribeProductCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProductCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProductResource(self, request):
"""本接口(DescribeProductResource)用于查询产品资源详情。
:param request: Request instance for DescribeProductResource.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductResourceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductResourceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProductResource", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductResourceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProductResources(self, request):
"""本接口(DescribeProductResources)用于查询产品资源列表。
:param request: Request instance for DescribeProductResources.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductResourcesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductResourcesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProductResources", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductResourcesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProductTask(self, request):
"""本接口(DescribeProductTask)用于查看产品级别的任务信息
:param request: Request instance for DescribeProductTask.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductTaskRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductTaskResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProductTask", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProductTasks(self, request):
"""本接口(DescribeProductTasks)用于查看产品级别的任务列表
:param request: Request instance for DescribeProductTasks.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductTasksRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductTasksResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProductTasks", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductTasksResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeProducts(self, request):
"""本接口(DescribeProducts)用于列出产品列表。
:param request: Request instance for DescribeProducts.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductsRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeProductsResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeProducts", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeProductsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePushResourceTaskStatistics(self, request):
"""查询推送资源任务统计信息
:param request: Request instance for DescribePushResourceTaskStatistics.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribePushResourceTaskStatisticsRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribePushResourceTaskStatisticsResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribePushResourceTaskStatistics", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePushResourceTaskStatisticsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeResourceTasks(self, request):
"""查询资源推送任务列表
:param request: Request instance for DescribeResourceTasks.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DescribeResourceTasksRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DescribeResourceTasksResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DescribeResourceTasks", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeResourceTasksResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableTopicRule(self, request):
"""本接口(DisableTopicRule)用于禁用规则
:param request: Request instance for DisableTopicRule.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DisableTopicRuleRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DisableTopicRuleResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DisableTopicRule", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableTopicRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DownloadDeviceResource(self, request):
"""本接口(DownloadDeviceResource)用于下载设备资源
:param request: Request instance for DownloadDeviceResource.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.DownloadDeviceResourceRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.DownloadDeviceResourceResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("DownloadDeviceResource", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DownloadDeviceResourceResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EditFirmware(self, request):
"""编辑固件信息
:param request: Request instance for EditFirmware.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.EditFirmwareRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.EditFirmwareResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("EditFirmware", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EditFirmwareResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableTopicRule(self, request):
"""本接口(EnableTopicRule)用于启用规则
:param request: Request instance for EnableTopicRule.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.EnableTopicRuleRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.EnableTopicRuleResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("EnableTopicRule", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableTopicRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetAllVersion(self, request):
"""本接口(GetAllVersion)用于获取所有的版本列表
:param request: Request instance for GetAllVersion.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.GetAllVersionRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.GetAllVersionResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("GetAllVersion", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetAllVersionResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetCOSURL(self, request):
"""本接口(GetCOSURL)用于获取固件存储在COS的URL
:param request: Request instance for GetCOSURL.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.GetCOSURLRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.GetCOSURLResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("GetCOSURL", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetCOSURLResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def GetUserResourceInfo(self, request):
"""本接口(GetUserResourceInfo)用于查询用户资源使用信息。
:param request: Request instance for GetUserResourceInfo.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.GetUserResourceInfoRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.GetUserResourceInfoResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("GetUserResourceInfo", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.GetUserResourceInfoResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListFirmwares(self, request):
"""本接口(ListFirmwares)用于获取固件列表
:param request: Request instance for ListFirmwares.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ListFirmwaresRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ListFirmwaresResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ListFirmwares", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListFirmwaresResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListLog(self, request):
"""本接口(ListLog)用于查看日志信息
:param request: Request instance for ListLog.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ListLogRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ListLogResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ListLog", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListLogResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListLogPayload(self, request):
"""获取日志内容列表
:param request: Request instance for ListLogPayload.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ListLogPayloadRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ListLogPayloadResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ListLogPayload", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListLogPayloadResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListSDKLog(self, request):
"""获取设备上报的日志
:param request: Request instance for ListSDKLog.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ListSDKLogRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ListSDKLogResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ListSDKLog", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListSDKLogResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ListTopicRules(self, request):
"""本接口(ListTopicRules)用于分页获取规则列表
:param request: Request instance for ListTopicRules.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ListTopicRulesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ListTopicRulesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ListTopicRules", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ListTopicRulesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PublishBroadcastMessage(self, request):
"""发布广播消息
:param request: Request instance for PublishBroadcastMessage.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.PublishBroadcastMessageRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.PublishBroadcastMessageResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("PublishBroadcastMessage", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PublishBroadcastMessageResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PublishMessage(self, request):
"""本接口(PublishMessage)用于向某个主题发消息。
:param request: Request instance for PublishMessage.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.PublishMessageRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.PublishMessageResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("PublishMessage", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PublishMessageResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def PublishRRPCMessage(self, request):
"""发布RRPC消息
:param request: Request instance for PublishRRPCMessage.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.PublishRRPCMessageRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.PublishRRPCMessageResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("PublishRRPCMessage", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.PublishRRPCMessageResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ReplaceTopicRule(self, request):
"""本接口(ReplaceTopicRule)用于修改替换规则
:param request: Request instance for ReplaceTopicRule.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ReplaceTopicRuleRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ReplaceTopicRuleResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ReplaceTopicRule", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ReplaceTopicRuleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ResetDeviceState(self, request):
"""重置设备的连接状态
:param request: Request instance for ResetDeviceState.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.ResetDeviceStateRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.ResetDeviceStateResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("ResetDeviceState", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ResetDeviceStateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def RetryDeviceFirmwareTask(self, request):
"""重试设备升级任务
:param request: Request instance for RetryDeviceFirmwareTask.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.RetryDeviceFirmwareTaskRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.RetryDeviceFirmwareTaskResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("RetryDeviceFirmwareTask", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.RetryDeviceFirmwareTaskResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def SetProductsForbiddenStatus(self, request):
"""批量设置产品禁用状态
:param request: Request instance for SetProductsForbiddenStatus.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.SetProductsForbiddenStatusRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.SetProductsForbiddenStatusResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("SetProductsForbiddenStatus", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.SetProductsForbiddenStatusResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UnbindDevices(self, request):
"""本接口(UnbindDevices)用于网关设备批量解绑子设备
:param request: Request instance for UnbindDevices.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UnbindDevicesRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UnbindDevicesResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UnbindDevices", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UnbindDevicesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateDeviceAvailableState(self, request):
"""启用或者禁用设备
:param request: Request instance for UpdateDeviceAvailableState.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDeviceAvailableStateRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDeviceAvailableStateResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateDeviceAvailableState", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateDeviceAvailableStateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateDeviceLogLevel(self, request):
"""设置设备上报的日志级别
:param request: Request instance for UpdateDeviceLogLevel.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDeviceLogLevelRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDeviceLogLevelResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateDeviceLogLevel", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateDeviceLogLevelResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateDevicePSK(self, request):
"""本接口(UpdateDevicePSK)用于更新设备的PSK
:param request: Request instance for UpdateDevicePSK.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDevicePSKRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDevicePSKResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateDevicePSK", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateDevicePSKResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateDeviceShadow(self, request):
"""本接口(UpdateDeviceShadow)用于更新虚拟设备信息。
:param request: Request instance for UpdateDeviceShadow.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDeviceShadowRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDeviceShadowResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateDeviceShadow", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateDeviceShadowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateDevicesEnableState(self, request):
"""批量启用或者禁用设备
:param request: Request instance for UpdateDevicesEnableState.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDevicesEnableStateRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateDevicesEnableStateResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateDevicesEnableState", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateDevicesEnableStateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdatePrivateCA(self, request):
"""更新私有CA证书
:param request: Request instance for UpdatePrivateCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdatePrivateCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdatePrivateCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdatePrivateCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdatePrivateCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateProductDynamicRegister(self, request):
"""更新产品动态注册的配置
:param request: Request instance for UpdateProductDynamicRegister.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateProductDynamicRegisterRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateProductDynamicRegisterResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateProductDynamicRegister", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateProductDynamicRegisterResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateProductPrivateCA(self, request):
"""更新产品的私有CA
:param request: Request instance for UpdateProductPrivateCA.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateProductPrivateCARequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateProductPrivateCAResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateProductPrivateCA", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateProductPrivateCAResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UpdateTopicPolicy(self, request):
"""本接口(UpdateTopicPolicy)用于更新Topic信息
:param request: Request instance for UpdateTopicPolicy.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UpdateTopicPolicyRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UpdateTopicPolicyResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UpdateTopicPolicy", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UpdateTopicPolicyResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def UploadFirmware(self, request):
"""本接口(UploadFirmware)用于上传设备固件信息
:param request: Request instance for UploadFirmware.
:type request: :class:`tencentcloud.iotcloud.v20210408.models.UploadFirmwareRequest`
:rtype: :class:`tencentcloud.iotcloud.v20210408.models.UploadFirmwareResponse`
"""
try:
params = request._serialize()
headers = request.headers
body = self.call("UploadFirmware", params, headers=headers)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.UploadFirmwareResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | {
"content_hash": "a026bb442a9eb878b669bc2106d7385d",
"timestamp": "",
"source": "github",
"line_count": 2099,
"max_line_length": 112,
"avg_line_length": 42.23296808003811,
"alnum_prop": 0.5952936929619728,
"repo_name": "tzpBingo/github-trending",
"id": "1a02f1bde334837f71fa2ac48eaf421829645d40",
"size": "91149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codespace/python/tencentcloud/iotcloud/v20210408/iotcloud_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "11470"
},
{
"name": "HTML",
"bytes": "1543"
},
{
"name": "Python",
"bytes": "49985109"
},
{
"name": "Shell",
"bytes": "18039"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand, CommandError
from payments.models import Payment, ACTIVE_BACKENDS
class Command(BaseCommand):
help = "Check bitcoin payments status"
def handle(self, *args, **options):
if 'bitcoin' not in ACTIVE_BACKENDS:
raise CommandError("bitcoin backend not active.")
backend = ACTIVE_BACKENDS['bitcoin']
payments = Payment.objects.filter(backend_id='bitcoin', status='new')
self.stdout.write("Found %d active unconfirmed payments." % len(payments))
for p in payments:
self.stdout.write("Checking payment #%d... " % p.id, ending="")
backend.check(p)
if p.status == 'confirmed':
self.stdout.write("OK.")
else:
self.stdout.write("Waiting")
| {
"content_hash": "3b7a3c6d1a511daf0623473d237b41ba",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 82,
"avg_line_length": 29.642857142857142,
"alnum_prop": 0.6180722891566265,
"repo_name": "CCrypto/ccvpn3",
"id": "0a0e68bd31c44f6045fb1c14fae945def7dd299d",
"size": "830",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "payments/management/commands/check_btc_payments.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16467"
},
{
"name": "HTML",
"bytes": "54117"
},
{
"name": "JavaScript",
"bytes": "3594"
},
{
"name": "Python",
"bytes": "171906"
},
{
"name": "Shell",
"bytes": "320"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0002_auto_20160130_1724'),
]
operations = [
migrations.AlterField(
model_name='address',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
migrations.AlterField(
model_name='organization',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
migrations.AlterField(
model_name='organizationmember',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
migrations.AlterField(
model_name='socialcontactlink',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
migrations.AlterField(
model_name='user',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
migrations.AlterField(
model_name='usergroup',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
migrations.AlterField(
model_name='usergroupmember',
name='admin_note',
field=models.TextField(help_text=b'Not publicly visible', null=True, verbose_name='admin note', blank=True),
),
]
| {
"content_hash": "6d3dec40cc7f3524660202ccff8c790d",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 120,
"avg_line_length": 38.625,
"alnum_prop": 0.5992448759439051,
"repo_name": "ninapavlich/lesleyloraine",
"id": "ac971adc18bc8729805e76b0de184c807b9f0cfc",
"size": "1878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lesleyloraine/apps/account/migrations/0003_auto_20160205_2337.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "276066"
},
{
"name": "HTML",
"bytes": "49917"
},
{
"name": "JavaScript",
"bytes": "86784"
},
{
"name": "Python",
"bytes": "304843"
}
],
"symlink_target": ""
} |
from __future__ import print_function, absolute_import, division
import warnings
import numpy as np
from numpy.ma.core import nomask
import dask.array as da
from astropy import convolution
from astropy import units as u
from astropy import wcs
#from astropy import log
from astropy.io.fits import Header, HDUList, PrimaryHDU, BinTableHDU, FITS_rec
from radio_beam import Beam, Beams
from astropy.io.registry import UnifiedReadWriteMethod
from . import spectral_axis
from .io.core import LowerDimensionalObjectWrite
from .utils import SliceWarning, BeamWarning, SmoothingWarning, FITSWarning
from .cube_utils import convert_bunit
from . import wcs_utils
from .masks import BooleanArrayMask, MaskBase
from .base_class import (BaseNDClass, SpectralAxisMixinClass,
SpatialCoordMixinClass, MaskableArrayMixinClass,
MultiBeamMixinClass, BeamMixinClass,
HeaderMixinClass
)
from . import cube_utils
__all__ = ['LowerDimensionalObject', 'Projection', 'Slice', 'OneDSpectrum']
class LowerDimensionalObject(u.Quantity, BaseNDClass, HeaderMixinClass):
"""
Generic class for 1D and 2D objects.
"""
@property
def hdu(self):
if self.wcs is None:
hdu = PrimaryHDU(self.value)
else:
hdu = PrimaryHDU(self.value, header=self.header)
hdu.header['BUNIT'] = self.unit.to_string(format='fits')
if 'beam' in self.meta:
hdu.header.update(self.meta['beam'].to_header_keywords())
return hdu
def read(self, *args, **kwargs):
raise NotImplementedError()
write = UnifiedReadWriteMethod(LowerDimensionalObjectWrite)
def __getslice__(self, start, end, increment=None):
# I don't know why this is needed, but apparently one of the inherited
# classes implements getslice, which forces us to overwrite it
# I can't find any examples where __getslice__ is actually implemented,
# though, so this seems like a deep and frightening bug.
#log.debug("Getting a slice from {0} to {1}".format(start,end))
return self.__getitem__(slice(start, end, increment))
def __getitem__(self, key, **kwargs):
"""
Return a new `~spectral_cube.lower_dimensional_structures.LowerDimensionalObject` of the same class while keeping
other properties fixed.
"""
new_qty = super(LowerDimensionalObject, self).__getitem__(key)
if new_qty.ndim < 2:
# do not return a projection
return u.Quantity(new_qty)
if self._wcs is not None:
if ((isinstance(key, tuple) and
any(isinstance(k, slice) for k in key) and
len(key) > self.ndim)):
# Example cases include: indexing tricks like [:,:,None]
warnings.warn("Slice {0} cannot be used on this {1}-dimensional"
" array's WCS. If this is intentional, you "
" should use this {2}'s ``array`` or ``quantity``"
" attribute."
.format(key, self.ndim, type(self)),
SliceWarning
)
return self.quantity[key]
else:
newwcs = self._wcs[key]
else:
newwcs = None
new = self.__class__(value=new_qty.value,
unit=new_qty.unit,
copy=False,
wcs=newwcs,
meta=self._meta,
mask=(self._mask[key] if self._mask is not nomask
else None),
header=self._header,
**kwargs)
new._wcs = newwcs
new._meta = self._meta
new._mask=(self._mask[key] if self._mask is not nomask else nomask)
new._header = self._header
return new
def __array_finalize__(self, obj):
self._wcs = getattr(obj, '_wcs', None)
self._meta = getattr(obj, '_meta', None)
self._mask = getattr(obj, '_mask', None)
self._header = getattr(obj, '_header', None)
self._spectral_unit = getattr(obj, '_spectral_unit', None)
self._fill_value = getattr(obj, '_fill_value', np.nan)
self._wcs_tolerance = getattr(obj, '_wcs_tolerance', 0.0)
if isinstance(obj, VaryingResolutionOneDSpectrum):
self._beams = getattr(obj, '_beams', None)
else:
self._beam = getattr(obj, '_beam', None)
super(LowerDimensionalObject, self).__array_finalize__(obj)
@property
def __array_priority__(self):
return super(LowerDimensionalObject, self).__array_priority__*2
@property
def array(self):
"""
Get a pure array representation of the LDO. Useful when multiplying
and using numpy indexing tricks.
"""
return np.asarray(self)
@property
def _data(self):
# the _data property is required by several other mixins
# (which probably means defining it here is a bad design)
return self.array
@property
def quantity(self):
"""
Get a pure `~astropy.units.Quantity` representation of the LDO.
"""
return u.Quantity(self)
def to(self, unit, equivalencies=[], freq=None):
"""
Return a new `~spectral_cube.lower_dimensional_structures.Projection`
of the same class with the specified unit.
See `astropy.units.Quantity.to` for further details.
"""
if not isinstance(unit, u.Unit):
unit = u.Unit(unit)
if unit == self.unit:
# No copying
return self
if ((self.unit.is_equivalent(u.Jy / u.beam) and
not any({u.Jy/u.beam, u.K}.issubset(set(eq)) for eq in equivalencies))):
# the 'not any' above checks that there is not already a defined
# Jy<->K equivalency. If there is, the code below is redundant
# and will cause problems.
if hasattr(self, 'beams'):
factor = (self.jtok_factors(equivalencies=equivalencies) *
(self.unit*u.beam).to(u.Jy))
else:
# replace "beam" with the actual beam
if not hasattr(self, 'beam'):
raise ValueError("To convert objects with Jy/beam units, "
"the object needs to have a beam defined.")
brightness_unit = self.unit * u.beam
# create a beam equivalency for brightness temperature
if freq is None:
try:
freq = self.with_spectral_unit(u.Hz).spectral_axis
except AttributeError:
raise TypeError("Object of type {0} has no spectral "
"information. `freq` must be provided for"
" unit conversion from Jy/beam"
.format(type(self)))
else:
if not freq.unit.is_equivalent(u.Hz):
raise u.UnitsError("freq must be given in equivalent "
"frequency units.")
bmequiv = self.beam.jtok_equiv(freq)
# backport to handle astropy < 3: the beam equivalency was only
# modified to handle jy/beam in astropy 3
if bmequiv[0] == u.Jy:
bmequiv.append([u.Jy/u.beam, u.K, bmequiv[2], bmequiv[3]])
factor = brightness_unit.to(unit,
equivalencies=bmequiv + list(equivalencies))
else:
# scaling factor
factor = self.unit.to(unit, equivalencies=equivalencies)
converted_array = (self.quantity * factor).value
# use private versions of variables, not the generated property
# versions
# Not entirely sure the use of __class__ here is kosher, but we do want
# self.__class__, not super()
new = self.__class__(value=converted_array, unit=unit, copy=True,
wcs=self._wcs, meta=self._meta, mask=self._mask,
header=self._header)
return new
@property
def _mask(self):
""" Annoying hack to deal with np.ma.core.is_mask failures (I don't
like using __ but I think it's necessary here)"""
if self.__mask is None:
# need this to be *exactly* the numpy boolean False
return nomask
return self.__mask
@_mask.setter
def _mask(self, value):
self.__mask = value
def shrink_mask(self):
"""
Copy of the numpy masked_array shrink_mask method. This is essentially
a hack needed for matplotlib to show images.
"""
m = self._mask
if m.ndim and not m.any():
self._mask = nomask
return self
def _initial_set_mask(self, mask):
"""
Helper tool to validate mask when originally setting it in __new__
Note that because this is intended to be used in __new__, order
matters: ``self`` must have ``_wcs``, for example.
"""
if mask is None:
mask = BooleanArrayMask(np.ones_like(self.value, dtype=bool),
self._wcs, shape=self.value.shape)
elif isinstance(mask, np.ndarray):
if mask.shape != self.value.shape:
raise ValueError("Mask shape must match the {0} shape."
.format(self.__class__.__name__)
)
mask = BooleanArrayMask(mask, self._wcs, shape=self.value.shape)
elif isinstance(mask, MaskBase):
pass
else:
raise TypeError("mask of type {} is not a supported mask "
"type.".format(type(mask)))
# Validate the mask before setting
mask._validate_wcs(new_data=self.value, new_wcs=self._wcs,
wcs_tolerance=self._wcs_tolerance)
self._mask = mask
class Projection(LowerDimensionalObject, SpatialCoordMixinClass,
MaskableArrayMixinClass, BeamMixinClass):
def __new__(cls, value, unit=None, dtype=None, copy=True, wcs=None,
meta=None, mask=None, header=None, beam=None,
fill_value=np.nan, read_beam=False, wcs_tolerance=0.0):
if np.asarray(value).ndim != 2:
raise ValueError("value should be a 2-d array")
if wcs is not None and wcs.wcs.naxis != 2:
raise ValueError("wcs should have two dimension")
self = u.Quantity.__new__(cls, value, unit=unit, dtype=dtype,
copy=copy).view(cls)
self._wcs = wcs
self._meta = {} if meta is None else meta
self._wcs_tolerance = wcs_tolerance
self._initial_set_mask(mask)
self._fill_value = fill_value
if header is not None:
self._header = header
else:
self._header = Header()
if beam is None:
if "beam" in self.meta:
beam = self.meta['beam']
elif read_beam:
beam = cube_utils.try_load_beam(header)
if beam is None:
warnings.warn("Cannot load beam from header.",
BeamWarning
)
if beam is not None:
self.beam = beam
self.meta['beam'] = beam
# TODO: Enable header updating when non-celestial slices are
# properly handled in the WCS object.
# self._header.update(beam.to_header_keywords())
self._cache = {}
return self
def with_beam(self, beam):
'''
Attach a new beam object to the Projection.
Parameters
----------
beam : `~radio_beam.Beam`
A new beam object.
'''
meta = self.meta.copy()
meta['beam'] = beam
return self._new_projection_with(beam=beam, meta=meta)
def with_fill_value(self, fill_value):
"""
Create a new :class:`Projection` or :class:`Slice` with a different
``fill_value``.
"""
return self._new_projection_with(fill_value=fill_value)
@property
def _new_thing_with(self):
return self._new_projection_with
def _new_projection_with(self, data=None, wcs=None, mask=None, meta=None,
fill_value=None, spectral_unit=None, unit=None,
header=None, wcs_tolerance=None, beam=None,
**kwargs):
data = self._data if data is None else data
if unit is None and hasattr(data, 'unit'):
if data.unit != self.unit:
raise u.UnitsError("New data unit '{0}' does not"
" match unit '{1}'. You can"
" override this by specifying the"
" `unit` keyword."
.format(data.unit, self.unit))
unit = data.unit
elif unit is None:
unit = self.unit
elif unit is not None:
# convert string units to Units
if not isinstance(unit, u.Unit):
unit = u.Unit(unit)
if hasattr(data, 'unit'):
if u.Unit(unit) != data.unit:
raise u.UnitsError("The specified new cube unit '{0}' "
"does not match the input unit '{1}'."
.format(unit, data.unit))
else:
data = u.Quantity(data, unit=unit, copy=False)
wcs = self._wcs if wcs is None else wcs
mask = self._mask if mask is None else mask
if meta is None:
meta = {}
meta.update(self._meta)
if unit is not None:
meta['BUNIT'] = unit.to_string(format='FITS')
fill_value = self._fill_value if fill_value is None else fill_value
if beam is None:
if hasattr(self, 'beam'):
beam = self.beam
newproj = self.__class__(value=data, wcs=wcs, mask=mask, meta=meta,
unit=unit, fill_value=fill_value,
header=header or self._header,
wcs_tolerance=wcs_tolerance or self._wcs_tolerance,
beam=beam,
**kwargs)
return newproj
@staticmethod
def from_hdu(hdu):
'''
Return a projection from a FITS HDU.
'''
if isinstance(hdu, HDUList):
hdul = hdu
hdu = hdul[0]
if not len(hdu.data.shape) == 2:
raise ValueError("HDU must contain two-dimensional data.")
meta = {}
mywcs = wcs.WCS(hdu.header)
if "BUNIT" in hdu.header:
unit = convert_bunit(hdu.header["BUNIT"])
meta["BUNIT"] = hdu.header["BUNIT"]
else:
unit = None
beam = cube_utils.try_load_beam(hdu.header)
self = Projection(hdu.data, unit=unit, wcs=mywcs, meta=meta,
header=hdu.header, beam=beam)
return self
def quicklook(self, filename=None, use_aplpy=True, aplpy_kwargs={}):
"""
Use `APLpy <https://pypi.python.org/pypi/APLpy>`_ to make a quick-look
image of the projection. This will make the ``FITSFigure`` attribute
available.
If there are unmatched celestial axes, this will instead show an image
without axis labels.
Parameters
----------
filename : str or Non
Optional - the filename to save the quicklook to.
"""
if use_aplpy:
try:
if not hasattr(self, 'FITSFigure'):
import aplpy
self.FITSFigure = aplpy.FITSFigure(self.hdu,
**aplpy_kwargs)
self.FITSFigure.show_grayscale()
self.FITSFigure.add_colorbar()
if filename is not None:
self.FITSFigure.save(filename)
except (wcs.InconsistentAxisTypesError, ImportError):
self._quicklook_mpl(filename=filename)
else:
self._quicklook_mpl(filename=filename)
def _quicklook_mpl(self, filename=None):
from matplotlib import pyplot
self.figure = pyplot.gcf()
self.image = pyplot.imshow(self.value)
if filename is not None:
self.figure.savefig(filename)
def convolve_to(self, beam, convolve=convolution.convolve_fft,
**kwargs):
"""
Convolve the image to a specified beam.
Parameters
----------
beam : `radio_beam.Beam`
The beam to convolve to
convolve : function
The astropy convolution function to use, either
`astropy.convolution.convolve` or
`astropy.convolution.convolve_fft`
Returns
-------
proj : `Projection`
A Projection convolved to the given ``beam`` object.
"""
self._raise_wcs_no_celestial()
if not hasattr(self, 'beam'):
raise ValueError("No beam is contained in Projection.meta.")
# Check if the beams are the same.
if beam == self.beam:
warnings.warn("The given beam is identical to the current beam. "
"Skipping convolution.")
return self
pixscale = wcs.utils.proj_plane_pixel_area(self.wcs.celestial)**0.5 * u.deg
convolution_kernel = \
beam.deconvolve(self.beam).as_kernel(pixscale)
newdata = convolve(self.value, convolution_kernel,
normalize_kernel=True,
**kwargs)
self = Projection(newdata, unit=self.unit, wcs=self.wcs,
meta=self.meta, header=self.header,
beam=beam)
return self
def reproject(self, header, order='bilinear'):
"""
Reproject the image into a new header.
Parameters
----------
header : `astropy.io.fits.Header`
A header specifying a cube in valid WCS
order : int or str, optional
The order of the interpolation (if ``mode`` is set to
``'interpolation'``). This can be either one of the following
strings:
* 'nearest-neighbor'
* 'bilinear'
* 'biquadratic'
* 'bicubic'
or an integer. A value of ``0`` indicates nearest neighbor
interpolation.
"""
self._raise_wcs_no_celestial()
try:
from reproject.version import version
except ImportError:
raise ImportError("Requires the reproject package to be"
" installed.")
# Need version > 0.2 to work with cubes
from distutils.version import LooseVersion
if LooseVersion(version) < "0.3":
raise Warning("Requires version >=0.3 of reproject. The current "
"version is: {}".format(version))
from reproject import reproject_interp
# TODO: Find the minimal footprint that contains the header and only reproject that
# (see FITS_tools.regrid_cube for a guide on how to do this)
newwcs = wcs.WCS(header)
shape_out = [header['NAXIS{0}'.format(i + 1)] for i in range(header['NAXIS'])][::-1]
newproj, newproj_valid = reproject_interp((self.value,
self.header),
newwcs,
shape_out=shape_out,
order=order)
self = Projection(newproj, unit=self.unit, wcs=newwcs,
meta=self.meta, header=header,
read_beam=True)
return self
def subimage(self, xlo='min', xhi='max', ylo='min', yhi='max'):
"""
Extract a region spatially.
When spatial WCS dimensions are given as an `~astropy.units.Quantity`,
the spatial coordinates of the 'lo' and 'hi' corners are solved together.
This minimizes WCS variations due to the sky curvature when slicing from
a large (>1 deg) image.
Parameters
----------
[xy]lo/[xy]hi : int or `astropy.units.Quantity` or `min`/`max`
The endpoints to extract. If given as a quantity, will be
interpreted as World coordinates. If given as a string or
int, will be interpreted as pixel coordinates.
"""
self._raise_wcs_no_celestial()
# Solve for the spatial pixel indices together
limit_dict = wcs_utils.find_spatial_pixel_index(self, xlo, xhi, ylo, yhi)
slices = [slice(limit_dict[xx + 'lo'], limit_dict[xx + 'hi'])
for xx in 'yx']
return self[tuple(slices)]
def to(self, unit, equivalencies=[], freq=None):
"""
Return a new `~spectral_cube.lower_dimensional_structures.Projection`
of the same class with the specified unit.
See `astropy.units.Quantity.to` for further details.
"""
return super(Projection, self).to(unit, equivalencies, freq)
# A slice is just like a projection in every way
class Slice(Projection):
pass
class BaseOneDSpectrum(LowerDimensionalObject, MaskableArrayMixinClass,
SpectralAxisMixinClass):
"""
Properties shared between OneDSpectrum and VaryingResolutionOneDSpectrum.
"""
def __new__(cls, value, unit=None, dtype=None, copy=True, wcs=None,
meta=None, mask=None, header=None, spectral_unit=None,
fill_value=np.nan, wcs_tolerance=0.0):
#log.debug("Creating a OneDSpectrum with __new__")
if np.asarray(value).ndim != 1:
raise ValueError("value should be a 1-d array")
if wcs is not None and wcs.wcs.naxis != 1:
raise ValueError("wcs should have two dimension")
self = u.Quantity.__new__(cls, value, unit=unit, dtype=dtype,
copy=copy).view(cls)
self._wcs = wcs
self._meta = {} if meta is None else meta
self._wcs_tolerance = wcs_tolerance
self._initial_set_mask(mask)
self._fill_value = fill_value
if header is not None:
self._header = header
else:
self._header = Header()
self._spectral_unit = spectral_unit
if spectral_unit is None:
if 'CUNIT1' in self._header:
self._spectral_unit = u.Unit(self._header['CUNIT1'])
elif self._wcs is not None:
self._spectral_unit = u.Unit(self._wcs.wcs.cunit[0])
return self
def __repr__(self):
prefixstr = '<' + self.__class__.__name__ + ' '
arrstr = np.array2string(self.filled_data[:].value, separator=',',
prefix=prefixstr)
return '{0}{1}{2:s}>'.format(prefixstr, arrstr, self._unitstr)
@staticmethod
def from_hdu(hdu):
'''
Return a OneDSpectrum from a FITS HDU or HDU list.
'''
if isinstance(hdu, HDUList):
hdul = hdu
hdu = hdul[0]
else:
hdul = HDUList([hdu])
if not len(hdu.data.shape) == 1:
raise ValueError("HDU must contain one-dimensional data.")
meta = {}
mywcs = wcs.WCS(hdu.header)
if "BUNIT" in hdu.header:
unit = convert_bunit(hdu.header["BUNIT"])
meta["BUNIT"] = hdu.header["BUNIT"]
else:
unit = None
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=FITSWarning)
beam = cube_utils.try_load_beams(hdul)
if hasattr(beam, '__len__'):
beams = beam
else:
beams = None
if beams is not None:
self = VaryingResolutionOneDSpectrum(hdu.data, unit=unit,
wcs=mywcs, meta=meta,
header=hdu.header,
beams=beams)
else:
beam = cube_utils.try_load_beam(hdu.header)
self = OneDSpectrum(hdu.data, unit=unit, wcs=mywcs, meta=meta,
header=hdu.header, beam=beam)
return self
@property
def header(self):
header = super(BaseOneDSpectrum, self).header
# Preserve the spectrum's spectral units
if 'CUNIT1' in header and self._spectral_unit != u.Unit(header['CUNIT1']):
spectral_scale = spectral_axis.wcs_unit_scale(self._spectral_unit)
header['CDELT1'] *= spectral_scale
header['CRVAL1'] *= spectral_scale
header['CUNIT1'] = self.spectral_axis.unit.to_string(format='FITS')
return header
@property
def spectral_axis(self):
"""
A `~astropy.units.Quantity` array containing the central values of
each channel along the spectral axis.
"""
if self._wcs is None:
spec_axis = np.arange(self.size) * u.one
else:
spec_axis = self.wcs.wcs_pix2world(np.arange(self.size), 0)[0] * \
u.Unit(self.wcs.wcs.cunit[0])
if self._spectral_unit is not None:
spec_axis = spec_axis.to(self._spectral_unit)
return spec_axis
def quicklook(self, filename=None, drawstyle='steps-mid', **kwargs):
"""
Plot the spectrum with current spectral units in the currently open
figure
kwargs are passed to `matplotlib.pyplot.plot`
Parameters
----------
filename : str or Non
Optional - the filename to save the quicklook to.
"""
from matplotlib import pyplot
ax = pyplot.gca()
ax.plot(self.spectral_axis, self.filled_data[:].value,
drawstyle=drawstyle, **kwargs)
ax.set_xlabel(self.spectral_axis.unit.to_string(format='latex'))
ax.set_ylabel(self.unit)
if filename is not None:
pyplot.gcf().savefig(filename)
def with_spectral_unit(self, unit, velocity_convention=None,
rest_value=None):
newwcs, newmeta = self._new_spectral_wcs(unit,
velocity_convention=velocity_convention,
rest_value=rest_value)
newheader = self._nowcs_header.copy()
newheader.update(newwcs.to_header())
wcs_cunit = u.Unit(newheader['CUNIT1'])
newheader['CUNIT1'] = unit.to_string(format='FITS')
newheader['CDELT1'] *= wcs_cunit.to(unit)
if self._mask is not None:
newmask = self._mask.with_spectral_unit(unit,
velocity_convention=velocity_convention,
rest_value=rest_value)
newmask._wcs = newwcs
else:
newmask = None
return self._new_spectrum_with(wcs=newwcs, spectral_unit=unit,
mask=newmask, meta=newmeta,
header=newheader)
def __getitem__(self, key, **kwargs):
# Ideally, this could just be in VaryingResolutionOneDSpectrum,
# but it's about the code is about the same length by just
# keeping it here.
try:
kwargs['beams'] = self.beams[key]
except (AttributeError, TypeError):
pass
new_qty = super(BaseOneDSpectrum, self).__getitem__(key)
if isinstance(key, slice):
new = self.__class__(value=new_qty.value,
unit=new_qty.unit,
copy=False,
wcs=wcs_utils.slice_wcs(self._wcs, key,
shape=self.shape),
meta=self._meta,
mask=(self._mask[key]
if self._mask is not nomask
else nomask),
header=self._header,
wcs_tolerance=self._wcs_tolerance,
fill_value=self.fill_value,
**kwargs)
return new
else:
if self._mask is not nomask:
# Kind of a hack; this is probably inefficient
bad = self._mask.exclude()[key]
if isinstance(bad, da.Array):
bad = bad.compute()
new_qty[bad] = np.nan
return new_qty
def __getattribute__(self, attrname):
# This is a hack to handle dimensionality-reducing functions
# We want spectrum.max() to return a Quantity, not a spectrum
# Long-term, we really want `OneDSpectrum` to not inherit from
# `Quantity`, but for now this approach works.... we just have
# to add more functions to this list.
if attrname in ('min', 'max', 'std', 'mean', 'sum', 'cumsum',
'nansum', 'ptp', 'var'):
return getattr(self.quantity, attrname)
else:
return super(BaseOneDSpectrum, self).__getattribute__(attrname)
def spectral_interpolate(self, spectral_grid,
suppress_smooth_warning=False,
fill_value=None):
"""
Resample the spectrum onto a specific grid
Parameters
----------
spectral_grid : array
An array of the spectral positions to regrid onto
suppress_smooth_warning : bool
If disabled, a warning will be raised when interpolating onto a
grid that does not nyquist sample the existing grid. Disable this
if you have already appropriately smoothed the data.
fill_value : float
Value for extrapolated spectral values that lie outside of
the spectral range defined in the original data. The
default is to use the nearest spectral channel in the
cube.
Returns
-------
spectrum : OneDSpectrum
"""
assert spectral_grid.ndim == 1
inaxis = self.spectral_axis.to(spectral_grid.unit)
indiff = np.mean(np.diff(inaxis))
outdiff = np.mean(np.diff(spectral_grid))
# account for reversed axes
if outdiff < 0:
spectral_grid = spectral_grid[::-1]
outdiff = np.mean(np.diff(spectral_grid))
outslice = slice(None, None, -1)
else:
outslice = slice(None, None, 1)
specslice = slice(None) if indiff >= 0 else slice(None, None, -1)
inaxis = inaxis[specslice]
indiff = np.mean(np.diff(inaxis))
# insanity checks
if indiff < 0 or outdiff < 0:
raise ValueError("impossible.")
assert np.all(np.diff(spectral_grid) > 0)
assert np.all(np.diff(inaxis) > 0)
np.testing.assert_allclose(np.diff(spectral_grid), outdiff,
err_msg="Output grid must be linear")
if outdiff > 2 * indiff and not suppress_smooth_warning:
warnings.warn("Input grid has too small a spacing. The data should "
"be smoothed prior to resampling.",
SmoothingWarning
)
newspec = np.empty([spectral_grid.size], dtype=self.dtype)
newmask = np.empty([spectral_grid.size], dtype='bool')
newspec[outslice] = np.interp(spectral_grid.value, inaxis.value,
self.filled_data[specslice].value,
left=fill_value, right=fill_value)
mask = self.mask.include()
if all(mask):
newmask = np.ones([spectral_grid.size], dtype='bool')
else:
interped = np.interp(spectral_grid.value,
inaxis.value, mask[specslice]) > 0
newmask[outslice] = interped
newwcs = self.wcs.deepcopy()
newwcs.wcs.crpix[0] = 1
newwcs.wcs.crval[0] = spectral_grid[0].value if outslice.step > 0 \
else spectral_grid[-1].value
newwcs.wcs.cunit[0] = spectral_grid.unit.to_string(format='FITS')
newwcs.wcs.cdelt[0] = outdiff.value if outslice.step > 0 \
else -outdiff.value
newwcs.wcs.set()
newheader = self._nowcs_header.copy()
newheader.update(newwcs.to_header())
wcs_cunit = u.Unit(newheader['CUNIT1'])
newheader['CUNIT1'] = spectral_grid.unit.to_string(format='FITS')
newheader['CDELT1'] *= wcs_cunit.to(spectral_grid.unit)
newbmask = BooleanArrayMask(newmask, wcs=newwcs)
return self._new_spectrum_with(data=newspec, wcs=newwcs, mask=newbmask,
header=newheader,
spectral_unit=spectral_grid.unit)
def spectral_smooth(self, kernel,
convolve=convolution.convolve,
**kwargs):
"""
Smooth the spectrum
Parameters
----------
kernel : `~astropy.convolution.Kernel1D`
A 1D kernel from astropy
convolve : function
The astropy convolution function to use, either
`astropy.convolution.convolve` or
`astropy.convolution.convolve_fft`
kwargs : dict
Passed to the convolve function
"""
newspec = convolve(self.value, kernel, normalize_kernel=True, **kwargs)
return self._new_spectrum_with(data=newspec)
def to(self, unit, equivalencies=[]):
"""
Return a new `~spectral_cube.lower_dimensional_structures.OneDSpectrum`
of the same class with the specified unit.
See `astropy.units.Quantity.to` for further details.
"""
return super(BaseOneDSpectrum, self).to(unit, equivalencies, freq=None)
def with_fill_value(self, fill_value):
"""
Create a new :class:`OneDSpectrum` with a different ``fill_value``.
"""
return self._new_spectrum_with(fill_value=fill_value)
@property
def _new_thing_with(self):
return self._new_spectrum_with
def _new_spectrum_with(self, data=None, wcs=None, mask=None, meta=None,
fill_value=None, spectral_unit=None, unit=None,
header=None, wcs_tolerance=None,
**kwargs):
data = self._data if data is None else data
if unit is None and hasattr(data, 'unit'):
if data.unit != self.unit:
raise u.UnitsError("New data unit '{0}' does not"
" match unit '{1}'. You can"
" override this by specifying the"
" `unit` keyword."
.format(data.unit, self.unit))
unit = data.unit
elif unit is None:
unit = self.unit
elif unit is not None:
# convert string units to Units
if not isinstance(unit, u.Unit):
unit = u.Unit(unit)
if hasattr(data, 'unit'):
if u.Unit(unit) != data.unit:
raise u.UnitsError("The specified new cube unit '{0}' "
"does not match the input unit '{1}'."
.format(unit, data.unit))
else:
data = u.Quantity(data, unit=unit, copy=False)
wcs = self._wcs if wcs is None else wcs
mask = self._mask if mask is None else mask
if meta is None:
meta = {}
meta.update(self._meta)
if unit is not None:
meta['BUNIT'] = unit.to_string(format='FITS')
fill_value = self._fill_value if fill_value is None else fill_value
spectral_unit = self._spectral_unit if spectral_unit is None else u.Unit(spectral_unit)
spectrum = self.__class__(value=data, wcs=wcs, mask=mask, meta=meta,
unit=unit, fill_value=fill_value,
header=header or self._header,
wcs_tolerance=wcs_tolerance or self._wcs_tolerance,
**kwargs)
spectrum._spectral_unit = spectral_unit
return spectrum
class OneDSpectrum(BaseOneDSpectrum, BeamMixinClass):
def __new__(cls, value, beam=None, read_beam=False, **kwargs):
self = super(OneDSpectrum, cls).__new__(cls, value, **kwargs)
if beam is None:
if "beam" in self.meta:
beam = self.meta['beam']
elif read_beam:
beam = cube_utils.try_load_beam(self.header)
if beam is None:
warnings.warn("Cannot load beam from header.",
BeamWarning
)
if beam is not None:
self.beam = beam
self.meta['beam'] = beam
self._cache = {}
return self
def _new_spectrum_with(self, **kwargs):
beam = kwargs.pop('beam', None)
if 'beam' in self._meta and beam is None:
beam = self.beam
out = super(OneDSpectrum, self)._new_spectrum_with(beam=beam, **kwargs)
return out
def with_beam(self, beam):
'''
Attach a new beam object to the OneDSpectrum.
Parameters
----------
beam : `~radio_beam.Beam`
A new beam object.
'''
meta = self.meta.copy()
meta['beam'] = beam
return self._new_spectrum_with(beam=beam, meta=meta)
class VaryingResolutionOneDSpectrum(BaseOneDSpectrum, MultiBeamMixinClass):
def __new__(cls, value, beams=None, read_beam=False, goodbeams_mask=None, **kwargs):
self = super(VaryingResolutionOneDSpectrum, cls).__new__(cls, value, **kwargs)
assert hasattr(self, '_fill_value')
if beams is None:
if "beams" in self.meta:
beams = self.meta['beams']
elif read_beam:
beams = cube_utils.try_load_beams(self.header)
if beams is None:
warnings.warn("Cannot load beams table from header.",
BeamWarning
)
if beams is not None:
if isinstance(beams, BinTableHDU):
beam_data_table = beams.data
elif isinstance(beams, FITS_rec):
beam_data_table = beams
else:
beam_data_table = None
if beam_data_table is not None:
beams = Beams(major=u.Quantity(beam_data_table['BMAJ'], u.arcsec),
minor=u.Quantity(beam_data_table['BMIN'], u.arcsec),
pa=u.Quantity(beam_data_table['BPA'], u.deg),
meta=[{key: row[key] for key in beam_data_table.names
if key not in ('BMAJ','BPA', 'BMIN')}
for row in beam_data_table],)
self.beams = beams
self.meta['beams'] = beams
if goodbeams_mask is not None:
self.goodbeams_mask = goodbeams_mask
self._cache = {}
return self
@property
def hdu(self):
warnings.warn("There are multiple beams for this spectrum that "
"are being ignored when creating the HDU.",
BeamWarning
)
return super(VaryingResolutionOneDSpectrum, self).hdu
@property
def hdulist(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
hdu = self.hdu
beamhdu = cube_utils.beams_to_bintable(self.beams)
return HDUList([hdu, beamhdu])
def _new_spectrum_with(self, **kwargs):
beams = kwargs.pop('beams', self.beams)
if beams is None:
beams = self.beams
VRODS = VaryingResolutionOneDSpectrum
out = super(VRODS, self)._new_spectrum_with(beams=beams,
**kwargs)
return out
def __array_finalize__(self, obj):
super(VaryingResolutionOneDSpectrum, self).__array_finalize__(obj)
self._beams = getattr(obj, '_beams', None)
if getattr(obj, 'goodbeams_mask', None) is not None:
# do NOT use the setter here, because we sometimes need to write
# intermediate size-mismatch things that later get fixed, e.g., in
# __getitem__ below
self._goodbeams_mask = getattr(obj, 'goodbeams_mask', None)
def __getitem__(self, key):
new_qty = super(VaryingResolutionOneDSpectrum, self).__getitem__(key)
# use the goodbeams_mask setter here because it checks size
new_qty.goodbeams_mask = self.goodbeams_mask[key]
new_qty.beams = self.unmasked_beams[key]
return new_qty
| {
"content_hash": "84cd6e354a97cdbd65fc1518e0ecdfcc",
"timestamp": "",
"source": "github",
"line_count": 1148,
"max_line_length": 121,
"avg_line_length": 36.704703832752614,
"alnum_prop": 0.5300092555236491,
"repo_name": "low-sky/spectral-cube",
"id": "fa5cee4f9d9c24540fcae698dd32470ce78a3bde",
"size": "42137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spectral_cube/lower_dimensional_structures.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "128"
},
{
"name": "Python",
"bytes": "755139"
}
],
"symlink_target": ""
} |
import glob, os, sys
sys.path.append(os.getcwd() + '/lib/')
sys.path.append(os.getcwd() + '/cloudtracker/')
# Multiprocessing modules
import multiprocessing as mp
from multiprocessing import Pool
PROC = 16
import model_param as mc
from conversion import convert
import cloudtracker.main
# Default working directory for ent_analysis package
cwd = os.getcwd()
# Output profile names
profiles = {'condensed', 'condensed_env', 'condensed_edge', \
'condensed_shell' , 'core', 'core_env', 'core_edge', 'core_shell', \
'plume', 'condensed_entrain', 'core_entrain', 'surface'}
def wrapper(module_name, script_name, function_name, filelist):
pkg = __import__ (module_name, globals(), locals(), ['*'])
md = getattr(pkg, script_name)
fn = getattr(md, function_name)
pool = mp.Pool(PROC)
pool.map(fn, filelist)
def run_conversion():
pkg = 'conversion'
os.chdir(mc.input_directory)
# Ensure the data folders exist at the target location
if not os.path.exists(mc.data_directory):
os.makedirs(mc.data_directory)
if not os.path.exists('%s/variables/' % (mc.data_directory)):
os.makedirs('%s/variables/' % (mc.data_directory))
if not os.path.exists('%s/tracking/' % (mc.data_directory)):
os.makedirs('%s/tracking/' % (mc.data_directory))
if not os.path.exists('%s/core_entrain/' % (mc.data_directory)):
os.makedirs('%s/core_entrain/' % (mc.data_directory))
if not os.path.exists('%s/condensed_entrain/' % (mc.data_directory)):
os.makedirs('%s/condensed_entrain/' % (mc.data_directory))
# Generate cloud field statistic
convert.convert_stat()
# bin3d2nc conversion
filelist = glob.glob('./*.bin3D')
wrapper(pkg, 'convert', 'convert', filelist)
# Move the netCDF files to relevant locations
filelist = glob.glob('./*.nc')
wrapper(pkg, 'nc_transfer', 'transfer', filelist)
# generate_tracking
filelist = glob.glob('%s/variables/*.nc' % (mc.data_directory))
wrapper(pkg, 'generate_tracking', 'main', filelist)
def run_cloudtracker():
# Change the working directory for cloudtracker
os.chdir('%s/cloudtracker/' % (cwd))
model_config = mc.model_config
# Update nt
model_config['nt'] = mc.nt
# Swap input directory for cloudtracker
model_config['input_directory'] = mc.data_directory + '/tracking/'
cloudtracker.main.main(model_config)
def run_profiler():
### Time Profiles
pkg = 'time_profiles'
os.chdir('%s/time_profiles' % (cwd))
# Ensure output folder exists
if not os.path.exists('%s/time_profiles/cdf' % (cwd)):
os.makedirs('%s/time_profiles/cdf' % (cwd))
# Main thermodynamic profiles
filelist = glob.glob('%s/variables/*.nc' % (mc.data_directory))
wrapper(pkg, 'make_profiles', 'main', filelist)
if(mc.do_entrainment):
filelist = glob.glob('%s/core_entrain/*.nc' % (mc.data_directory))
wrapper(pkg, 'core_entrain_profiles', 'main', filelist)
filelist = glob.glob('%s/condensed_entrain/*.nc' % (mc.data_directory))
wrapper(pkg, 'condensed_entrain_profiles', 'main', filelist)
# Chi Profiles
filelist = glob.glob('cdf/core_env*.nc')
wrapper(pkg, 'chi_core', 'makechi', filelist)
filelist = glob.glob('cdf/condensed_env*.nc')
wrapper(pkg, 'chi_condensed', 'makechi', filelist)
# Surface Profiles (based on cloud tracking algorithm)
wrapper(pkg, 'surface_profiles', 'main', range(mc.nt))
def run_id_profiles():
### ID Profiles
pkg = 'id_profiles'
os.chdir('%s/id_profiles' % (cwd))
# Ensure output folder exists
if not os.path.exists('%s/id_profiles/cdf' % (cwd)):
os.makedirs('%s/id_profiles/cdf' % (cwd))
wrapper(pkg, 'all_profiles', 'main', profiles)
if __name__ == '__main__':
run_conversion()
run_cloudtracker()
run_profiler()
#run_id_profiles()
print 'Entrainment analysis completed'
| {
"content_hash": "82c3303f22a1c5d0838e772f1708315c",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 73,
"avg_line_length": 30.401639344262296,
"alnum_prop": 0.6875168509032085,
"repo_name": "lorenghoh/ent_analysis",
"id": "ed963c181d218e01d284e796e0073b8f7932d839",
"size": "3709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run_analysis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15099"
},
{
"name": "JavaScript",
"bytes": "25956"
},
{
"name": "Python",
"bytes": "137308"
},
{
"name": "Shell",
"bytes": "4118"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup, Extension
except ImportError:
from distutils.core import setup, Extension
from distutils import sysconfig
sysconfig.get_config_vars()["OPT"] = ''
sysconfig.get_config_vars()["CFLAGS"] = ''
setup(name="miniupnpc",
version=open('VERSION').read().strip(),
author='Thomas BERNARD',
author_email='miniupnp@free.fr',
license=open('LICENSE').read(),
url='http://miniupnp.free.fr/',
description='miniUPnP client',
ext_modules=[
Extension(name="miniupnpc", sources=["miniupnpcmodule.c"],
extra_objects=["libminiupnpc.a"])
])
| {
"content_hash": "894bfb80d660941739a4117799606ec3",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 67,
"avg_line_length": 33.68421052631579,
"alnum_prop": 0.6390625,
"repo_name": "ranok/bitmonero",
"id": "aa632db8225c17204c3a31e186ef71edd5f9b761",
"size": "997",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "external/miniupnpc/setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2627"
},
{
"name": "C",
"bytes": "1077089"
},
{
"name": "C++",
"bytes": "5356844"
},
{
"name": "CMake",
"bytes": "138557"
},
{
"name": "Makefile",
"bytes": "6530"
},
{
"name": "Objective-C",
"bytes": "6986"
},
{
"name": "Perl",
"bytes": "3110"
},
{
"name": "Perl6",
"bytes": "9033"
},
{
"name": "Python",
"bytes": "21901"
},
{
"name": "Shell",
"bytes": "22885"
}
],
"symlink_target": ""
} |
"""
Implements the SimpleDB interface for MySQLdb
"""
from __future__ import generators
import MySQLdb
import MySQLdb.cursors
import _mysql_exceptions
from simpledb import SimpleDB
class SimpleMySQLdb(SimpleDB):
def __init__(self, **kwargs):
super(SimpleMySQLdb, self).__init__(**kwargs)
self.auth(**kwargs)
self.paramstyle = MySQLdb.paramstyle
def __del__(self):
"""
Poof!
"""
self._disconnect()
def auth(self, **kwargs):
"""
Set/Update authorization
"""
self._disconnect()
self.authorization.update(kwargs)
def _connect(self):
"""
Connect to the server
"""
if not self.dbh:
self.dbh = MySQLdb.Connect(**self.authorization)
def _disconnect(self):
"""
Close the connection as best as we can
"""
try:
self.dbh.close()
except Exception:
pass
self.dbh = None
def _query(self, *args, **kwargs):
"""
Query and return the cursor. Reconnect if needed
"""
if 'cursorclass' not in kwargs:
kwargs['cursorclass'] = MySQLdb.cursors.Cursor
if 'reconnect' not in kwargs:
kwargs['reconnect'] = True
try:
self._connect()
cursor = self.dbh.cursor(kwargs['cursorclass'])
cursor.execute(args[0], args[1:])
return cursor
except _mysql_exceptions.OperationalError:
if not kwargs['reconnect']:
raise
try:
self.dbh.close()
except Exception:
pass
self.dbh = None
kwargs['reconnect'] = False
return self._query(*args, **kwargs)
###############################################################################
def query_dict(self, *args, **kwargs):
def _query_dict_results():
kwargs['cursorclass'] = MySQLdb.cursors.SSDictCursor
cursor = self._query(*args, **kwargs)
row = cursor.fetchone()
while row is not None:
yield row
row = cursor.fetchone()
cursor.close()
return _query_dict_results
def query_dict_row(self, *args, **kwargs):
kwargs['cursorclass'] = MySQLdb.cursors.DictCursor
cursor = self._query(*args, **kwargs)
result = cursor.fetchone()
if cursor.fetchone() is not None:
raise OverflowError('Query returns more then one row')
cursor.close()
return result
###############################################################################
def query_tuple(self, *args, **kwargs):
def _query_tuple_results():
kwargs['cursorclass'] = MySQLdb.cursors.SSCursor
cursor = self._query(*args, **kwargs)
row = cursor.fetchone()
while row is not None:
yield row
row = cursor.fetchone()
cursor.close()
return _query_tuple_results
def query_tuple_row(self, *args, **kwargs):
kwargs['cursorclass'] = MySQLdb.cursors.Cursor
cursor = self._query(*args, **kwargs)
result = cursor.fetchone()
if cursor.fetchone() is not None:
raise OverflowError('Query returns more then one row')
cursor.close()
return result
###############################################################################
def query_list(self, *args, **kwargs):
def _query_list_results():
kwargs['cursorclass'] = MySQLdb.cursors.SSCursor
cursor = self._query(*args, **kwargs)
row = cursor.fetchone()
while row is not None:
yield list(row)
row = cursor.fetchone()
cursor.close()
return _query_list_results
def query_list_row(self, *args, **kwargs):
kwargs['cursorclass'] = MySQLdb.cursors.Cursor
cursor = self._query(*args, **kwargs)
result = cursor.fetchone()
if cursor.fetchone() is not None:
raise OverflowError('Query returns more then one row')
cursor.close()
return list(result)
###############################################################################
def query_fields(self, *args, **kwargs):
def _query_field_results():
kwargs['cursorclass'] = MySQLdb.cursors.SSCursor
cursor = self._query(*args, **kwargs)
row = cursor.fetchone()
while row is not None:
if len(row) > 1:
raise OverflowError('Too many result fields')
yield row[0]
row = cursor.fetchone()
cursor.close()
return _query_field_results
def query_field(self, *args, **kwargs):
kwargs['cursorclass'] = MySQLdb.cursors.Cursor
cursor = self._query(*args, **kwargs)
result = cursor.fetchone()
if cursor.fetchone() is not None:
raise OverflowError('Query returns more then one row')
cursor.close()
if result is None:
return None
if len(result) > 1:
raise OverflowError('Too many result fields')
return result[0]
###############################################################################
def update(self, *args, **kwargs):
cursor = self._query(*args, **kwargs)
rowsaffected = cursor.rowcount
cursor.close()
self.dbh.commit()
return rowsaffected
###############################################################################
def insert(self, *args, **kwargs):
if type(args[-1]) is not list:
query = args[0]
row = args[1:]
args = [query, [row]]
ids = []
for row in args[1]:
arg = [args[0]]
arg.extend(list(row))
cursor = self._query(*arg, **kwargs)
ids.append(cursor.lastrowid)
cursor.close()
self.dbh.commit()
return ids
| {
"content_hash": "f57b83de949bbe54d41df489663da867",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 79,
"avg_line_length": 31.446153846153845,
"alnum_prop": 0.4926614481409002,
"repo_name": "kormoc/SimpleDB",
"id": "27374fc7f40760257f0421fb9ae1e29d4f9c0856",
"size": "6169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SimpleDB/simplemysqldb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8698"
}
],
"symlink_target": ""
} |
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
from xor import xorData
class CFBMode(object):
'''
This class is used to implment CFB8 mode using python cryptography. The
base encryption library function used is ECB mode. Specific block function
is done manually. Educational purposes only.
'''
def __init__(self, key, iv):
'''
This constructor initilizes the key and initialization vector. The key
and the IV should be the same length, either 16, 24, or 32 bytes long.
'''
self.key = key
self.iv = iv
# Input validation for the key
@property
def key(self):
return self._key
@key.setter
def key(self, key):
if (len(key) not in [16, 24, 32]):
raise Exception('The key must be 16, 24, or 32 bytes long.')
self._key = key
# Input validation for the IV
@property
def iv(self):
return self._iv
@iv.setter
def iv(self, iv):
if (len(iv) not in [16, 24, 32]):
raise Exception('The iv must be 16, 24, or 32 bytes long.')
self._iv = iv
def encrypt(self, plaintext):
'''
This encrypt constructor takes the plaintext string, and loops through
it encrypting using a 8 bit shift register.
Note: CFB does not use padding.
'''
# Initilize the python cryptography ECB mode
backend = default_backend()
cipher = Cipher(algorithms.AES(self.key), modes.ECB(),
backend = backend)
encryptor = cipher.encryptor()
ciphertextList = []
# Loop through the plaintext elements, return ciphertext string
for i in range(0, len(plaintext)):
if (i == 0):
outputBlock = encryptor.update(self.iv)
xor = xorData(outputBlock[0], plaintext[i])
firstCiphertextByte = xor.getXor()
ciphertextList.append(firstCiphertextByte)
sbitShift = self.iv[1:] + ciphertextList[i]
elif (i >= 1):
nBlock = encryptor.update(sbitShift)
xor = xorData(nBlock[0], plaintext[i])
nCiphertextByte = xor.getXor()
ciphertextList.append(nCiphertextByte)
sbitShift = sbitShift[1:] + ciphertextList[i]
return ''.join(ciphertextList)
def decrypt(self, ciphertext):
'''
This decrypt constructor takes the ciphertext string, loops through
decrypting using a 8 bit shift register.
Note: CFB does not use padding.
'''
# Initilize the python cryptography ECB mode
# CFB uses encryption algorithm for decryption
backend = default_backend()
cipher = Cipher(algorithms.AES(self.key), modes.ECB(),
backend = backend)
encryptor = cipher.encryptor()
plaintextList = []
# Loop through the ciphertext list, return plaintext string
for i in range(0, len(ciphertext)):
if (i == 0):
outputBlock = encryptor.update(self.iv)
xor = xorData(outputBlock[0], ciphertext[i])
firstPlaintextByte = xor.getXor()
plaintextList.append(firstPlaintextByte)
sbitShift = self.iv[1:] + ciphertext[i]
elif (i >= 1):
nBlock = encryptor.update(sbitShift)
xor = xorData(nBlock[0], ciphertext[i])
nPlaintextByte = xor.getXor()
plaintextList.append(nPlaintextByte)
sbitShift = sbitShift[1:] + ciphertext[i]
return ''.join(plaintextList)
| {
"content_hash": "057ddcf394669022358320a6d6c5f8aa",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 78,
"avg_line_length": 37.707070707070706,
"alnum_prop": 0.5882668095365657,
"repo_name": "dennisme/AESBlockCiphers",
"id": "aaaccb85ec0c205b5d09abfa67188bd49cd22d28",
"size": "3775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blocks/aesCFB.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "66347"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(name='salearner',
version='0.1',
description='TensorFlow POC optimizing SA scoring',
url='https://github.com/Gaasmann/sa-learner',
author='Nicolas Haller',
author_email='nicolas@boiteameuh.org',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Topic :: Communications :: Email',
'Scientific/Engineering :: Artificial Intelligence'],
packages=['salearner'],
install_requires=['numpy'],
include_package_data=True,
zip_safe=False)
| {
"content_hash": "ee7543cf5424ce787208e1d5ac995747",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 65,
"avg_line_length": 38.95238095238095,
"alnum_prop": 0.5916870415647921,
"repo_name": "Gaasmann/sa-learner",
"id": "ec33e82e78c6ecabcc697af159468ba1413ab0ee",
"size": "842",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "126968"
},
{
"name": "Python",
"bytes": "14692"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('twitterclone', '0010_auto_20150331_1958'),
]
operations = [
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(serialize=False, primary_key=True)),
('url', models.URLField(max_length=255, blank=True)),
('server', models.CharField(max_length=255, blank=True)),
('farm', models.CharField(max_length=255, blank=True)),
('secret', models.CharField(max_length=255, blank=True)),
('flickrid', models.CharField(max_length=255, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(serialize=False, primary_key=True)),
('title', models.CharField(unique=True, max_length=200)),
('message', models.TextField(max_length=1024)),
('created_date', models.DateTimeField()),
('photo_id', models.CharField(max_length=50)),
('tags', models.CharField(max_length=200)),
('userId', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='photo',
name='post',
field=models.ForeignKey(to='twitterclone.Post'),
preserve_default=True,
),
]
| {
"content_hash": "772aa995e8daa4d85191c5e6420e923b",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 76,
"avg_line_length": 35.82,
"alnum_prop": 0.5343383584589615,
"repo_name": "rsubra13/dtc",
"id": "0ad24abbc91dc80255a21b1150dd7dd855ebd7b6",
"size": "1815",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twitterclone/migrations/0011_auto_20150331_1958.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "14149"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "25802"
}
],
"symlink_target": ""
} |
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function)
from dragonglue.command import send_command, Command
grammar = Grammar("launch")
applications = {
'sublime': 'w-s',
'pycharm': 'w-d',
'chrome': 'w-f',
'logs': 'w-j',
'SQL': 'w-k',
'IPython': 'w-l',
'shell': 'w-semicolon',
'terminal': 'w-a',
# 'spotify': 'spotify /home/dan/bin/spotify',
}
# aliases
applications['charm'] = applications['pycharm']
applications['termie'] = applications['terminal']
launch_rule = MappingRule(
name="launch",
mapping={
'Do run': Key('w-x'),
'get <application>': Key('%(application)s'),
# 're-browse': Key('w-F'),
'voice sync': Command('subl --command voice_sync'),
'(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'),
},
extras=[
Choice('application', applications)
]
)
grammar.add_rule(launch_rule)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
| {
"content_hash": "e144c35b65b637391b85038b9bb2a17a",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 114,
"avg_line_length": 22.70212765957447,
"alnum_prop": 0.6082474226804123,
"repo_name": "drocco007/vox_commands",
"id": "5e849f3b7531981288f3e3184ff9f3dda9de730a",
"size": "1067",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_launch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42522"
}
],
"symlink_target": ""
} |
import sys
import traceback
import os
from PyQt5.QtWidgets import (QWidget, QFileDialog, QPushButton, QTextEdit,
QGridLayout, QApplication, QLabel, QComboBox,
QCheckBox, QLineEdit, QStatusBar, QMainWindow)
# files for this package
import loss_analysis
class Measurement(QWidget):
def __init__(self, grid, meas_name, default_file, row):
super().__init__()
self.meas_name = meas_name
self.start_dir = os.path.join(os.pardir, 'example_cell')
self.filepath = os.path.join(self.start_dir, default_file)
_, self.file_ext = os.path.splitext(default_file)
self._add_objects(grid, row)
def _add_objects(self, grid, row):
'''
Builds and binds the boxes.
'''
self.btn = QPushButton('Load {0}'.format(self.meas_name))
self.btn.clicked.connect(self._get)
filename = os.path.basename(self.filepath)
self.label = QLabel(filename, self)
grid.addWidget(self.btn, row, 0)
grid.addWidget(self.label, row, 1)
def _get(self):
'''
Gets and sets the label with the new file name
'''
filter_str = '{0} file (*{1})'.format(self.meas_name, self.file_ext)
self.filepath = QFileDialog.getOpenFileName(self,
'Choose {0} file'.format(
self.meas_name),
self.start_dir, filter_str)[0]
filename = os.path.basename(self.filepath)
self.label.setText(filename)
def file(self):
return {self.meas_name + '_fname': self.filepath}
class LossAnalysisGui(QWidget):
def __init__(self, parent):
# super(LossAnalysisGui, self).__init__(parent)
super().__init__()
self.parent = parent
self.initUI()
def initUI(self):
grid = QGridLayout()
# grid.setSpacing(10)
self.output_dir = os.path.join(os.pardir, 'example_cell')
self.start_dir = os.path.join(os.pardir, 'example_cell')
self.save_fig_bool = False
# select starting directory
self.btn_start_dir = QPushButton("Select start directory")
self.btn_start_dir.clicked.connect(self.select_start_dir)
grid.addWidget(self.btn_start_dir, 0, 0)
self.label_start_dir = QLabel(os.path.basename(self.start_dir), self)
grid.addWidget(self.label_start_dir, 0, 1)
# select output directory
self.btn_output_dir = QPushButton("Select output directory")
self.btn_output_dir.clicked.connect(self.select_output_dir)
grid.addWidget(self.btn_output_dir, 1, 0)
self.label_output_dir = QLabel(os.path.basename(self.output_dir), self)
grid.addWidget(self.label_output_dir, 1, 1)
boxes = [['reflectance', 'example_reflectance.csv'],
['EQE', 'example_EQE.txt'],
['light IV', 'example_lightIV.lgt'],
['suns Voc', 'example_sunsVoc.xlsm'],
['dark IV', 'example_darkIV.drk']
]
self.measurement = []
for box, row_num in zip(boxes, range(len(boxes))):
self.measurement.append(Measurement(grid, box[0], box[1],
row_num + 3))
# save figures checkbox
self.cb_save_fig = QCheckBox('Save figures', self)
self.cb_save_fig.stateChanged.connect(self.save_fig_toggle)
grid.addWidget(self.cb_save_fig, 9, 0)
# cell name input
self.cell_name_input = QLineEdit(self)
self.cell_name_input.setPlaceholderText('cell name')
grid.addWidget(self.cell_name_input, 9, 1)
# process all data
self.btn_process = QPushButton("Process data")
self.btn_process.clicked.connect(self.process_data)
grid.addWidget(self.btn_process, 10, 0)
# self.statusBar = QStatusBar()
# self.setStatusBar(self.statusBar)
self.setLayout(grid)
self.show()
def select_start_dir(self):
self.start_dir = QFileDialog.getExistingDirectory(self,
'Choose start directory', self.start_dir)
self.label_start_dir.setText(os.path.basename(self.start_dir))
for m in self.measurement:
m.start_dir = self.start_dir
self.output_dir = self.start_dir
self.label_output_dir.setText(os.path.basename(self.start_dir))
def select_output_dir(self):
self.output_dir = QFileDialog.getExistingDirectory(self,
'Choose output directory', self.output_dir)
self.label_output_dir.setText(os.path.basename(self.output_dir))
def save_fig_toggle(self, state):
if self.cb_save_fig.isChecked():
self.save_fig_bool = True
else:
self.save_fig_bool = False
def process_data(self):
files = {}
for i in self.measurement:
files.update(i.file())
# pass the file names, and let the next thing handle them.
self.parent.statusBar().showMessage('loading files')
# a check on the data
# if the data is bad, a message is returned in the gui
try:
la = loss_analysis.Cell(**files)
except Exception as e:
self.parent.statusBar().showMessage('Error:' + str(e))
# print(str(e))
else:
self.parent.statusBar().showMessage('Calculating losses')
la.process_all(self.save_fig_bool, self.output_dir,
self.cell_name_input.text())
self.parent.statusBar().showMessage('Done!')
class App(QMainWindow):
def __init__(self):
super().__init__()
self.title = 'Loss analysis'
self.left = 100
self.top = 100
self.width = 400
self.height = 500
self.initUI()
def initUI(self):
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
self.statusBar().showMessage('All clear, ready to roll')
self.form_widget = LossAnalysisGui(self)
self.setCentralWidget(self.form_widget)
self.show()
if __name__ == '__main__':
# logfile = open('traceback.log', 'w')
app = QApplication(sys.argv)
# try:
lag = App()
# except:
# traceback.print_exc(file=logfile)
lag.show()
# logfile.close()
sys.exit(app.exec_())
| {
"content_hash": "ecbd135e13d29778aa4c18c06912eb08",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 102,
"avg_line_length": 33.11055276381909,
"alnum_prop": 0.5718621945667021,
"repo_name": "neddles/Loss-Analysis",
"id": "df048ff33e8b90f49510dc6c06c323d3570e4f90",
"size": "6589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "loss_analysis/main_gui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Logtalk",
"bytes": "4734"
},
{
"name": "Python",
"bytes": "48976"
}
],
"symlink_target": ""
} |
import numpy as np
from unet3d.utils.utils import get_nibabel_data
def extract_gifti_surface_vertices(surface, index=0, geometric_type="Anatomical", **kwargs):
return extract_gifti_array(surface, index=index, geometric_type=geometric_type, **kwargs)
def extract_gifti_array(gifti_object,
index,
geometric_type=None,
primary_anatomical_structure=None,
secondary_anatomical_structure=None):
if type(index) is str:
index = extract_gifti_array_names(gifti_object).index(index)
array = gifti_object.darrays[index]
if geometric_type:
assert array.metadata["GeometricType"] == geometric_type
if primary_anatomical_structure:
assert array.metadata["AnatomicalStructurePrimary"] == primary_anatomical_structure
if secondary_anatomical_structure:
assert array.metadata["AnatomicalStructureSecondary"] == secondary_anatomical_structure
return array.data
def extract_gifti_array_names(gifti_object, key='Name'):
return [array.metadata[key] for array in gifti_object.darrays]
def extract_cifti_scalar_map_names(cifti_scalar, map_index=0):
return [index.map_name for index in cifti_scalar.header.get_index_map(map_index)]
def extract_cifti_scalar_data(cifti_scalar, map_name, brain_structure_name=None, brain_model_axis_index=1):
map_names = extract_cifti_scalar_map_names(cifti_scalar)
data = cifti_scalar.dataobj[map_names.index(map_name)]
if brain_structure_name is not None:
data = data[get_mask_from_scalar(cifti_scalar,
brain_structure_name=brain_structure_name,
axis_index=brain_model_axis_index)]
return data
def extract_masked_surface_vertices(surface, mask, surface_name=None):
# extract the vertices
surface_vertices = extract_gifti_surface_vertices(surface, primary_anatomical_structure=surface_name)
return surface_vertices[mask]
def extract_parcellated_scalar_parcel_names(pscalar, parcel_index=1):
parcel_names = list()
for index in pscalar.header.get_index_map(parcel_index):
try:
parcel_names.append(index.name)
except AttributeError:
continue
if not pscalar.shape[parcel_index] == len(parcel_names):
raise RuntimeError("Number of parcel names, {}, does not match pscalar reduced_shape, {}.".format(len(parcel_names),
pscalar.shape))
return parcel_names
def get_vertices_from_scalar(scalar, brain_structure_name, axis_index=1):
brain_model_axis = get_axis(scalar, axis_index)
return brain_model_axis.vertex[get_mask_from_axis(brain_model_axis, brain_structure_name)]
def get_mask_from_scalar(scalar, brain_structure_name, axis_index=1):
return get_mask_from_axis(scalar.header.get_axis(axis_index), brain_structure_name)
def get_mask_from_axis(brain_model_axis, brain_structure_name):
return brain_model_axis.name == brain_model_axis.to_cifti_brain_structure_name(brain_structure_name)
def get_axis(scalar, axis_index):
return scalar.header.get_axis(axis_index)
def get_metric_data(metrics, metric_names, surface_names, subject_id, stack_axis=1):
all_metric_data = list()
for metric, metric_names in zip(metrics, metric_names):
for metric_name in metric_names:
metric_data = list()
for surface_name in surface_names:
metric_data.extend(extract_cifti_scalar_data(metric, metric_name.format(subject_id),
brain_structure_name=surface_name))
all_metric_data.append(metric_data)
return np.stack(all_metric_data, axis=stack_axis)
def new_cifti_scalar_exactly_like(array, structure_names, reference_cifti, default_value=0, almost_equals_decimals=2):
return new_cifti_scalar_like(array, structure_names=structure_names,
scalar_names=extract_cifti_scalar_map_names(reference_cifti),
default_value=default_value, almost_equals_decimals=almost_equals_decimals,
reference_cifti=reference_cifti)
def new_cifti_scalar_like(array, scalar_names, structure_names, reference_cifti, default_value=0,
almost_equals_decimals=2):
scalar_axis = reference_cifti.header.get_axis(0)
new_scalar_axis = scalar_axis.__class__(scalar_names)
model_axis = reference_cifti.header.get_axis(1)
dataobj = np.ones((array.shape[0], model_axis.name.shape[0]), array.dtype) * default_value
i = 0
for structure_name in structure_names:
structure_mask = model_axis.name == model_axis.to_cifti_brain_structure_name(structure_name)
ii = np.sum(structure_mask) + i
dataobj[:, structure_mask] = array[:, i:ii]
i = ii
if default_value == 0:
np.testing.assert_almost_equal(np.sum(dataobj), np.sum(array), almost_equals_decimals)
return reference_cifti.__class__(dataobj=dataobj, header=[new_scalar_axis, model_axis])
def new_gifti_array_like(data, gifti_array):
return gifti_array.__class__(data=data, intent=gifti_array.intent, datatype=gifti_array.datatype,
encoding=gifti_array.encoding, endian=gifti_array.endian,
coordsys=gifti_array.coordsys, meta=gifti_array.meta)
def new_surface_like(data, surface):
return surface.__class__(darrays=[new_gifti_array_like(array, garray) for array, garray in zip(data,
surface.darrays)])
def create_metric_masked_surface(surface, metric):
vertices = surface.darrays[0]
vert_indices = get_vertices_from_scalar(metric, vertices.metadata["AnatomicalStructurePrimary"])
masked_vertices = vertices.data[vert_indices]
faces_array = surface.darrays[1].data
faces_mask = np.all(np.in1d(faces_array, vert_indices).reshape(faces_array.shape), axis=1)
masked_faces = faces_array[faces_mask, :]
for new_idx, idx in enumerate(vert_indices):
masked_faces[masked_faces == idx] = new_idx
return new_surface_like([masked_vertices, masked_faces], surface)
def extract_cifti_volumetric_data(cifti_image, map_names, subject_id=None, model_axis=1):
map_names = list(map_names)
if subject_id is not None:
for i, map_name in enumerate(list(map_names)):
map_names[i] = map_name.format(subject_id)
data = get_nibabel_data(cifti_image)
all_map_names = extract_cifti_scalar_map_names(cifti_image)
mask = np.in1d(all_map_names, map_names)
data = np.swapaxes(data[mask], 0, -1)
model_axis = cifti_image.header.get_axis(model_axis)
volume_data = np.zeros(model_axis.volume_shape + (data.shape[-1],))
for voxel, values in zip(model_axis.voxel, data):
volume_data[tuple(voxel)] = values
return volume_data
| {
"content_hash": "c9dfe975d35d77d09bc30b8a903bd0c7",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 124,
"avg_line_length": 46.31372549019608,
"alnum_prop": 0.656929156082416,
"repo_name": "ellisdg/3DUnetCNN",
"id": "08656dd973f157d05b899d1e59100a15c3546f7d",
"size": "7086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unet3d/utils/hcp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "493190"
}
],
"symlink_target": ""
} |
import os
import os.path
import subprocess
import sys
import tempfile
import neovim
sys.path.append(os.path.dirname(__file__))
import resolve
@neovim.plugin
class ResolvePlugin(object):
def __init__(self, nvim):
self.nvim = nvim
def get_args(buffer):
return os.path.splitext(os.path.basename(buffer.name))[0], buffer[0][0].lower(), '\n'.join(buffer)
def popout(self, name):
for buffer in self.nvim.buffers:
if buffer.name == name:
self.nvim.command('{} bdelete'.format(buffer.number))
self.nvim.command('belowright vertical 50 new')
win = self.nvim.current.window
win.buffer.name = name
win.buffer.options['swapfile'] = False
win.buffer.options['undofile'] = False
win.buffer.options['buftype'] = 'nofile'
win.buffer.options['bufhidden'] = 'delete'
return win
def display_lines(self, win, objs, ln, key, gen):
lines = {}
line = 0
last = 0
double = False
double_start = 0
for obj in objs:
if ln(obj) == last:
double = True
else:
if double:
self.nvim.command('{},{} fold'.format(double_start, line))
added = ln(obj) - last - 1
for _ in range(added):
if line == 0:
win.buffer[0] = '~'
else:
win.buffer.append('~')
line += 1
double = False
double_start = line + 1
last = ln(obj)
if key:
lines[key(obj)] = line
info = gen(obj)
start = line + 1
for infoline in info.split('\n'):
if line == 0:
win.buffer[0] = infoline
else:
win.buffer.append(infoline)
line += 1
self.nvim.command('{},{} fold'.format(start, line))
if double:
self.nvim.command('{},{} fold'.format(double_start, line))
win.buffer.options['modifiable'] = False
self.nvim.command('1')
self.nvim.command('winc h')
self.nvim.command('1')
return lines
@neovim.command('ResolveCompile')
def compile(self):
popout_prefix = '{}/Compilation Output - '.format(os.readlink('/tmp'))
if self.nvim.current.buffer.name.startswith(popout_prefix):
self.nvim.command('winc h')
if not self.nvim.current.buffer.name:
self.nvim.out_write('You must set a buffer name (e.g. save it to a file) to compile a resolve program.\n')
return
name, type, content = ResolvePlugin.get_args(self.nvim.current.buffer)
codewin = self.nvim.current.window
try:
jar = resolve.compile(name, type, content)
except resolve.ResolveAPIError as e:
ewin = self.popout(popout_prefix + name)
lines = self.display_lines(ewin, e.args[0], lambda err: err['error']['ln'], None, lambda err: 'Error: {}'.format(err['error']['msg'][:-1]))
codewin.options['scrollbind'] = True
ewin.options['scrollbind'] = True
return
except resolve.ResolveCompilerError as e:
ewin = self.popout(popout_prefix + name)
lines = self.display_lines(ewin, e.args[0], lambda err: err['error']['ln'], None, lambda err: 'Error: {}'.format(err['error']['msg'][:-1]))
codewin.options['scrollbind'] = True
ewin.options['scrollbind'] = True
return
with tempfile.NamedTemporaryFile() as temp:
temp.write(jar)
temp.flush()
subprocess.call(['java', '-jar', temp.name])
@neovim.command('ResolveVerify')
def verify(self):
popout_prefix = '{}/Verification Conditions - '.format(os.readlink('/tmp'))
if self.nvim.current.buffer.name.startswith(popout_prefix):
self.nvim.command('winc h')
if not self.nvim.current.buffer.name:
self.nvim.out_write('You must set a buffer name (e.g. save it to a file) to verify a resolve program.\n')
return
name, type, content = ResolvePlugin.get_args(self.nvim.current.buffer)
codewin = self.nvim.current.window
try:
vcs = resolve.genvcs(name, type, content)
except resolve.ResolveAPIError as e:
ewin = self.popout(popout_prefix + name)
lines = self.display_lines(ewin, e.args[0], lambda err: err['error']['ln'], None, lambda err: 'Error: {}'.format(err['error']['msg'][:-1]))
codewin.options['scrollbind'] = True
ewin.options['scrollbind'] = True
return
except resolve.ResolveCompilerError as e:
ewin = self.popout(popout_prefix + name)
lines = self.display_lines(ewin, e.args[0], lambda err: err['error']['ln'], None, lambda err: 'Error: {}'.format(err['error']['msg'][:-1]))
codewin.options['scrollbind'] = True
ewin.options['scrollbind'] = True
return
vcwin = self.popout(popout_prefix + name)
lines = self.display_lines(vcwin, vcs['vcs'], lambda vc: vc['lineNum'], lambda vc: vc['vc'], lambda vc: '* {}\nGoal: {}Given:\n\t{}'.format(vc['vcInfo'], vc['vcGoal'] if isinstance(vc['vcGoal'], str) and vc['vcGoal'].endswith('\n') else '{}\n'.format(vc['vcGoal']), '\n\t'.join(vc['vcGivens'].split('\n')[:-1]) if vc['vcGivens'] else 'None'))
codewin.options['scrollbind'] = True
vcwin.options['scrollbind'] = True
try:
for verification in resolve.verify(name, type, content):
if verification['result'] == 0:
result = '✓'
elif verification['result'] == 1:
result = '?'
elif verification['result'] == 2:
result = '-'
else:
result = '✗'
line = lines[verification['id']]
vcwin.buffer.options['modifiable'] = True
vcwin.buffer[line] = result + vcwin.buffer[line][1:]
vcwin.buffer.options['modifiable'] = False
except resolve.ResolveAPIError as e:
self.nvim.out_write('Received error from resolve api: {}'.format(e))
return
except resolve.ResolveCompilerError as e:
self.nvim.out_write('Received error from resolve compiler: {}'.format(e))
return
| {
"content_hash": "1dc4cfc28ccbf6af9e40d5b21b5dbb94",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 350,
"avg_line_length": 33.53299492385787,
"alnum_prop": 0.5408719346049047,
"repo_name": "fkmclane/vim-resolve",
"id": "d0bbf6701aa8663c1e88b8f9a0058f6ef3cce22e",
"size": "6610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rplugin/python3/resolveplugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15116"
},
{
"name": "Vim script",
"bytes": "9219"
}
],
"symlink_target": ""
} |
"""
urls module includes all urls related settings. By default, import all from urls.common
"""
from common import * | {
"content_hash": "99732479f16e6139ba6bb277e23fff16",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 87,
"avg_line_length": 29,
"alnum_prop": 0.7586206896551724,
"repo_name": "teracyhq/django-tutorial",
"id": "a01948a64b0d503c9dc4feaf764eb068642808f1",
"size": "116",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "urls/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "19619"
}
],
"symlink_target": ""
} |
import click
from parsec.commands.roles.create_role import cli as create_role
from parsec.commands.roles.get_roles import cli as get_roles
from parsec.commands.roles.show_role import cli as show_role
@click.group()
def cli():
pass
cli.add_command(create_role)
cli.add_command(get_roles)
cli.add_command(show_role)
| {
"content_hash": "d90fdf89ce3f036e2297f9b4d2fc9959",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 64,
"avg_line_length": 23,
"alnum_prop": 0.7763975155279503,
"repo_name": "galaxy-iuc/parsec",
"id": "ea5680142a894c27e4f342b21e47b746a348d138",
"size": "322",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "parsec/commands/cmd_roles.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "194"
},
{
"name": "Python",
"bytes": "187279"
}
],
"symlink_target": ""
} |
import simpbot
from . import conf
module = simpbot.get_module(sys=True)
loader = module.loader()
@loader('anti-clon', None)
def anticlon(irc, ev, result, target, channel, _, locale):
pass
| {
"content_hash": "07fd4a3219d13c45e91cd5458dc42d38",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 58,
"avg_line_length": 19.5,
"alnum_prop": 0.7025641025641025,
"repo_name": "IsmaelRLG/simpbot",
"id": "eae25e8cd506b41bbf6615ea644ba61a76bce19c",
"size": "288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extra/channel_control/anticlon.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "310649"
}
],
"symlink_target": ""
} |
from knack.Guest.Debian import Debian
"""
Guest Factory, creates the right guest object from
given configs.
"""
class Factory(object):
"""
create: Creates a guest object from given config
@static
@void
"""
def create(boxConfig):
if not "guest_os" in boxConfig:
raise Exception("You have to pass guest_os in boxConfig")
if boxConfig["guest_os"] == "debian8-64" or boxConfig["guest_os"] == "debian7-64":
return Debian(boxConfig)
# raise an exception?
return False
| {
"content_hash": "a24d80cbfcf436509e5fba0628543864",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 86,
"avg_line_length": 21.375,
"alnum_prop": 0.6666666666666666,
"repo_name": "claudio-walser/knack",
"id": "25262d81615a732a688aa1befe8c89164ad8eee8",
"size": "537",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "knack/Guest/Factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51159"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import sys
import os
def main():
if len(sys.argv) != 4:
print("Usage: gen-toolchain-links.py <versions file> <base download URL> <output directory>")
sys.exit(1)
out_dir = sys.argv[3]
if not os.path.exists(out_dir):
print("Creating directory %s" % out_dir)
os.mkdir(out_dir)
base_url = sys.argv[2]
versions_file = sys.argv[1]
version_vars = {}
with open(versions_file) as f:
for line in f:
name, var = line.partition("=")[::2]
version_vars[name.strip()] = var.strip()
gcc_version = version_vars["CURRENT_TOOLCHAIN_GCC_VERSION"]
toolchain_desc = version_vars["CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT"]
unpack_code_linux_macos = """
::
mkdir -p ~/esp
cd ~/esp
tar -x{}f ~/Downloads/{}
"""
scratch_build_code_linux_macos = """
::
git clone -b xtensa-1.22.x https://github.com/espressif/crosstool-NG.git
cd crosstool-NG
./bootstrap && ./configure --enable-local && make install
"""
platform_info = [ ["linux64", "tar.gz", "z", unpack_code_linux_macos],
["linux32", "tar.gz", "z", unpack_code_linux_macos],
["osx", "tar.gz", "z", unpack_code_linux_macos],
["win32", "zip", None, None]]
with open(os.path.join(out_dir, 'download-links.inc'), "w") as links_file:
for p in platform_info:
platform_name = p[0]
extension = p[1]
unpack_cmd = p[2]
unpack_code = p[3]
archive_name = 'xtensa-lx106-elf-{}-{}-{}.{}'.format(
platform_name, toolchain_desc, gcc_version, extension)
print('.. |download_link_{}| replace:: {}{}'.format(
platform_name, base_url, archive_name), file=links_file)
if unpack_code is not None:
with open(os.path.join(out_dir, 'unpack-code-%s.inc' % platform_name), "w") as f:
print(unpack_code.format(unpack_cmd, archive_name), file=f)
with open(os.path.join(out_dir, 'scratch-build-code.inc'), "w") as code_file:
print(scratch_build_code_linux_macos, file=code_file)
if __name__ == "__main__":
main()
| {
"content_hash": "4898044af484070abc66ccdec24dc397",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 101,
"avg_line_length": 32.27142857142857,
"alnum_prop": 0.5568835768038956,
"repo_name": "espressif/ESP8266_RTOS_SDK",
"id": "4441f8dcf9a688f878ceba64654e0ce3edf52927",
"size": "2467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/gen-toolchain-links.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "48701"
},
{
"name": "Batchfile",
"bytes": "8453"
},
{
"name": "C",
"bytes": "16675480"
},
{
"name": "C++",
"bytes": "788226"
},
{
"name": "CMake",
"bytes": "197769"
},
{
"name": "Dockerfile",
"bytes": "2032"
},
{
"name": "Inno Setup",
"bytes": "3663"
},
{
"name": "Lex",
"bytes": "7800"
},
{
"name": "M4",
"bytes": "98046"
},
{
"name": "Makefile",
"bytes": "159517"
},
{
"name": "Pascal",
"bytes": "52308"
},
{
"name": "Perl",
"bytes": "15204"
},
{
"name": "Python",
"bytes": "1161765"
},
{
"name": "Shell",
"bytes": "100056"
},
{
"name": "Smarty",
"bytes": "5972"
},
{
"name": "VBScript",
"bytes": "294"
},
{
"name": "Yacc",
"bytes": "15875"
}
],
"symlink_target": ""
} |
from collections import OrderedDict, defaultdict
import sys
from pedigree import read_pedigree
DRAW_CHARACTERS = {
" ": " ",
"\\": "┐",
"/": "┘",
">": "├",
"<": "┤",
"|": "│",
"-": "─"
}
def ancestors(pedigree, id):
result = {}
depth = -1
parents = pedigree.get_parents(id)
if parents:
for parent in parents:
(result[parent], parent_depth) = ancestors(pedigree, parent)
depth = max(depth, parent_depth)
return (result, depth+1)
def descendents(pedigree, id):
result = {}
depth = 0
childeren = pedigree.find_childeren(id)
for child in childeren:
(result[child], child_depth) = descendents(pedigree, child)
depth = max(depth, child_depth)
return (result, depth+1)
def matrix_fill_ans(matrix, ans, row=0, col=0):
col -= 1
for i, id in enumerate(ans):
matrix[row+i][col] = id
matrix_fill_ans(matrix, ans[id], (row+i)*2, col)
def pedigree_structure(row, depth):
"""
Returns a pedigree drawing skeleton.
It takes full advantage of the fractal nature of the pedigree drawing structure.
row: row, where 0 should be the center. It should be inbetween ]2**depth, 2**depth[
depth: how deep the pedigree goes.
"""
def inverse(state):
"""
Inversed the upward and downward bends.
"""
for c in state:
if c == "\\":
yield "/"
elif c == "/":
yield "\\"
else:
yield c
if row == 0:
for i in range(1, depth):
yield " "
yield ">"
return
if row > 0:
# The main mirror is at row 0.
# Positive rows are just the inverse of the negative ones
yield from inverse(pedigree_structure(-row, depth))
return
# The negative rows indexed as if from 1, depends on the depth.
row_from_1 = 2**depth + row
# Check of the row_from_1 makes sense.
if row_from_1 < 0 or row_from_1 > 2**depth:
raise ValueError(f"Row {row} doesn't exists for depth {depth}.")
# The first row is always a \
if row_from_1 == 1:
yield "\\"
return
# The pivot where the mirror is for the row.
pivot = 2**(int.bit_length(row_from_1)-1)
if pivot == row_from_1:
# Add empties for each zero bit
for i in range(2, int.bit_length(row_from_1)):
yield " "
yield ">"
yield "\\"
return
# The row at the lower side of the mirror.
mirror_row = 2*pivot - row_from_1
# So we inverse the state of that mirror row (in the original counting)
yield from inverse(pedigree_structure(mirror_row - 2**depth, depth))
# Add empties for each zero bit
for i in range(1, int.bit_length(row_from_1) - int.bit_length(mirror_row)):
yield " "
# And since we cut of the highest bit by using the mirror row, here we need a |
yield "|"
def bin_end_zeros(i):
if i == 0:
sys.exit()
if i & 1:
return 0
else:
return 1 + bin_end_zeros(i>>1)
def draw(ans, ans_depth, id, des, des_depth, *, col_width=None, full=False):
if not col_width:
col_width = 16
matrix = defaultdict(dict)
matrix[0][0] = id
matrix_fill_ans(matrix, ans)
spread_rows = {}
top = -(2**ans_depth)
for row in sorted(matrix):
for col in sorted(matrix[row]):
half_diff = 2**(ans_depth - abs(col))
new_row = top + (2*row + 1) * half_diff
spread_rows[new_row] = matrix[row][col]
max_depth = max(ans_depth, 0) # des_depth
max_range = 2**max_depth - 1
if full:
printing_rows = range(-max_range, max_range + 1)
else:
printing_rows = sorted(spread_rows)
for row in printing_rows:
line = []
for col in pedigree_structure(row, ans_depth):
if col in ">|":
end_char = DRAW_CHARACTERS[col]
if col in "><" and not full:
# If there is no parent in the printing rows, the > doesn't make sense.
diff_with_parents = 2**(bin_end_zeros(2**(ans_depth) + row) - 1)
if (row - diff_with_parents) not in printing_rows and (row + diff_with_parents) not in printing_rows:
end_char = " "
line.append(" "*(col_width-1) + end_char)
elif col in "\\/":
label = spread_rows[row] if row in spread_rows else ""
line.append(label[:col_width-2].ljust(col_width-1, DRAW_CHARACTERS["-"]) + DRAW_CHARACTERS[col])
else:
line.append(col*col_width)
if row == 0:
line.append(spread_rows[row][:col_width-1].ljust(col_width, DRAW_CHARACTERS["-"]))
print("".join(line))
if __name__ == "__main__":
pedigree = read_pedigree()
center_bird = sys.argv[1]
(ans, ans_depth) = ancestors(pedigree, center_bird)
(des, des_depth) = descendents(pedigree, center_bird)
# print(ans, ans_depth)
# print(des, des_depth)
draw(ans, ans_depth, center_bird, des, des_depth)
| {
"content_hash": "951f0220a29f3701fff39f3be048e835",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 121,
"avg_line_length": 32.2,
"alnum_prop": 0.5518245341614907,
"repo_name": "BertRaeymaekers/scrapbook",
"id": "dd5cbc67e86f751c2a71a6661e449a20c3baaff1",
"size": "5189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "random_projects/pedigree_scripts/drawpedigree.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40923"
},
{
"name": "Roff",
"bytes": "918"
},
{
"name": "Ruby",
"bytes": "19804"
},
{
"name": "Shell",
"bytes": "8315"
},
{
"name": "Visual Basic",
"bytes": "974"
}
],
"symlink_target": ""
} |
class APIException(Exception):
"""Base class for API exceptions"""
def __init__(self, text):
"""
:param str text:
"""
self.text = text
def __str__(self):
return 'APIException(' + self.text + ')'
| {
"content_hash": "3095435ca1945552b5b85eca416ae7c1",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 48,
"avg_line_length": 22.636363636363637,
"alnum_prop": 0.5060240963855421,
"repo_name": "vt-dev/sdk",
"id": "fea9d23fac13be4fd90becff722fb38bab02fade",
"size": "249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/sdk/exceptions/APIException.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "211981"
},
{
"name": "Makefile",
"bytes": "161"
},
{
"name": "Python",
"bytes": "70613"
}
],
"symlink_target": ""
} |
"""
:mod:`vimeourlparser` --- Vimeo URL parser plugin
=================================================
.. note::
This module requires `lxml library <http://lxml.de/>'_
"""
from __future__ import unicode_literals
__docformat__ = "restructuredtext en"
import urllib2
import re
from lxml import etree
from lxml.etree import XMLSyntaxError
from Skype4Py.enums import cmsReceived, cmsSent
from plugin import Plugin
from utils import retry_on_exception
from output import ChatMessage
def get_video_id(url):
"""
Parse Vimeo video URL and return its ID.
>>> assert get_video_id("http://vimeo.com/123") is 123
>>> assert get_video_id("http://www.vimeo.com/123") is 123
>>> assert get_video_id("vimeo.com/123/") is 123
"""
retval = None
match = re.search(ur"vimeo\.com/(\d+)", url, re.IGNORECASE)
if match:
retval = int(match.group(1))
return retval
class VimeoURLParser(Plugin):
"""
This plugin monitors received messages and outputs video titles(s) if that
message contains a valid Vimeo video URL(s).
"""
_api_url = "http://vimeo.com/api/v2/video/{0}.xml"
_pattern = re.compile(ur"(vimeo\.com/\d+)", re.IGNORECASE)
_headers = {
"User-Agent": "Googlebot/2.1 (+http://www.googlebot.com/bot.html)",
"Accept-Language": "en-US,en;q=0.5",
"Connection": "Keep-Alive",
}
_opener = urllib2.build_opener()
_opener.addheaders = [(k, v) for k, v in _headers.iteritems()]
def get_video_title(self, video_id):
"""
Retrieves Vimeo video title by its ID.
>>> plugin = VimeoURLParser()
>>> plugin.get_video_title(16056709)
u'METACHAOS'
"""
cached_title = self._cache.get(video_id)
if cached_title is not None:
return cached_title
url = self._api_url.format(video_id)
@retry_on_exception((urllib2.URLError, urllib2.HTTPError), tries=2,
backoff=0, delay=1)
def retrieve_xml():
response = self._opener.open(url)
buf = response.read()
try:
return etree.fromstring(buf)
except XMLSyntaxError:
return
xml = retrieve_xml()
try:
title = xml.find("video/title").text
except AttributeError:
return None
else:
title = unicode(title)
self._cache.set(video_id, title)
return title
def on_message_status(self, message, status):
if status not in (cmsReceived, cmsSent):
return
if "vimeo.com/" not in message.Body:
return
found = re.findall(self._pattern, message.Body.strip())
if not found:
return
titles = []
for url in found:
video_id = get_video_id(url)
if video_id is None:
continue
self._logger.info("Retrieving {0} for {1}".format(
video_id, message.FromHandle
))
title = self.get_video_title(video_id)
if title is not None:
titles.append(title)
else:
msg = "Unable to retrieve video title for {0}".format(video_id)
titles.append(msg)
msg = "Unable to retrieve {0} for {1}".format(
video_id, message.FromHandle
)
self._logger.error(msg)
if not titles:
return
if len(titles) is 1:
msg = u"[Vimeo] {0}".format("".join(titles))
else:
msg = u"[Vimeo]\n{0}".format("\n".join(titles))
#message.Chat.SendMessage(msg)
self.output.append(ChatMessage(message.Chat.Name, msg))
return message, status
if __name__ == "__main__":
import doctest
doctest.testmod()
| {
"content_hash": "79fc7af745a559f990a09a423a7dbd01",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 79,
"avg_line_length": 25.444444444444443,
"alnum_prop": 0.548933984073979,
"repo_name": "tetra5/gooby",
"id": "39a285c3d3e7ed6d88ba85a2a47a8595eeed701b",
"size": "3941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gooby/plugins/vimeourlparser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "239065"
}
],
"symlink_target": ""
} |
from urllib import parse
import functools
from aiohttp.web import HTTPForbidden
from aiohttp_session import get_session
from .handlers import login_handler, logout_handler
from .utils import APP_KEY, SESSION_KEY
def setup(app, host, version,
host_prefix='', host_scheme='https',
login_route='/login', logout_route='/logout',
on_success='/', on_logout='/'):
"""Sets up CAS authentication for the app.
:param app: aiohttp app.
:param str host: CAS host to authenticate against
:param str version: Version of CAS to use
:param str host_prefix: Server prefix that CAS runs under
:param str host_scheme: Scheme to access the CAS host under
:param str login_route: Route for local login handler
:param str logout_route: Route for local logout handler
:param str on_success: Default route for redirect after a successful login
:param str on_logout: Route for redirect after logout
"""
# Add a closing /, if necessary
if not host_prefix.endswith('/'):
host_prefix += '/'
cas_root_url = parse.urlunsplit((
host_scheme, host, host_prefix,
None, None
))
app[APP_KEY] = {
'VERSION': version,
'ROOT_URL': cas_root_url,
'LOGIN_ROUTE': login_route,
'LOGOUT_ROUTE': logout_route,
'ON_SUCCESS': on_success,
'ON_LOGOUT': on_logout,
}
app.router.add_route('GET', login_route, login_handler)
app.router.add_route('GET', logout_route, logout_handler)
return app
def login_required(func, *args, **kwargs):
"""Decorator for handler functions.
Applied to a request handler, it will first check if our user is logged in,
and if they are not, will tack on a redirect parameter to the session set
to the requested url and run the login handler
:param func: function to wrap
"""
@functools.wraps(func)
async def wrapped(request):
session = await get_session(request)
stored_attrs = session.get(SESSION_KEY)
if (not stored_attrs) or 'user' not in stored_attrs:
session[SESSION_KEY] = {'redir': request.path}
return await login_handler(request)
else:
return await func(request)
return wrapped
def filter_attrs(filter_fn, *args, **kwargs):
"""Decorator for handler functions.
Applied to a request handler, it will apply the filter_fn to the attributes.
If filter_fn(attrs) returns true, the request will succeed; if not, it raises a
HTTPForbidden error.
I think this will only work after at least one @login_required.
Example:
@login_required
@filter_attrs(lambda x: x['employeeType'] != 'staff')
async def handler(request):
return "Hello there!"
:param func: function to wrap
:param filter_fn: function to evaluate the attributes with
"""
def actual_decorator(func):
@functools.wraps(func)
async def wrapped(request):
session = await get_session(request)
stored_attrs = session.get(SESSION_KEY)
if (not stored_attrs):
raise HTTPForbidden
elif filter_fn(stored_attrs):
return await func(request)
else:
raise HTTPForbidden
return wrapped
return actual_decorator
| {
"content_hash": "3c59ec65325fe3449568c076d23f37f3",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 83,
"avg_line_length": 33.881188118811885,
"alnum_prop": 0.6285797779076563,
"repo_name": "bard-it/aiohttp_cas",
"id": "52a8b1c74beb25eb28932c36495e647aca30a7be",
"size": "3422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aiohttp_cas/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12304"
}
],
"symlink_target": ""
} |
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RetrieveFriends(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RetrieveFriends Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RetrieveFriends, self).__init__(temboo_session, '/Library/RunKeeper/Friends/RetrieveFriends')
def new_input_set(self):
return RetrieveFriendsInputSet()
def _make_result_set(self, result, path):
return RetrieveFriendsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RetrieveFriendsChoreographyExecution(session, exec_id, path)
class RetrieveFriendsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RetrieveFriends
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved after the final step in the OAuth process.)
"""
super(RetrieveFriendsInputSet, self)._set_input('AccessToken', value)
def set_PageSize(self, value):
"""
Set the value of the PageSize input for this Choreo. ((optional, integer) The number entries to return per page. Defaults to 25.)
"""
super(RetrieveFriendsInputSet, self)._set_input('PageSize', value)
def set_Page(self, value):
"""
Set the value of the Page input for this Choreo. ((optional, integer) The page of entries to return. This parameter is used in combination with the PageSize input to page through results. Defaults to 0 (the first page).)
"""
super(RetrieveFriendsInputSet, self)._set_input('Page', value)
class RetrieveFriendsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RetrieveFriends Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from RunKeeper.)
"""
return self._output.get('Response', None)
def get_Next(self):
"""
Retrieve the value for the "Next" output from this Choreo execution. ((integer) The next page of entries that is available. This value can be passed into the Page input while paging through entries.)
"""
return self._output.get('Next', None)
def get_Previous(self):
"""
Retrieve the value for the "Previous" output from this Choreo execution. ((integer) The previous page of entries that is available. This value can be passed into the Page input while paging through entries.)
"""
return self._output.get('Previous', None)
class RetrieveFriendsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RetrieveFriendsResultSet(response, path)
| {
"content_hash": "6a9e4df2f853536b1eb6a73c51a747d7",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 228,
"avg_line_length": 44.35526315789474,
"alnum_prop": 0.6991990507267873,
"repo_name": "lupyuen/RaspberryPiImage",
"id": "3a548faa5db6066b8823c1ef02b3876b7341a574",
"size": "4232",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "home/pi/GrovePi/Software/Python/others/temboo/Library/RunKeeper/Friends/RetrieveFriends.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "82308"
},
{
"name": "C",
"bytes": "3197439"
},
{
"name": "C#",
"bytes": "33056"
},
{
"name": "C++",
"bytes": "1020255"
},
{
"name": "CSS",
"bytes": "208338"
},
{
"name": "CoffeeScript",
"bytes": "87200"
},
{
"name": "Eagle",
"bytes": "1632170"
},
{
"name": "Go",
"bytes": "3646"
},
{
"name": "Groff",
"bytes": "286691"
},
{
"name": "HTML",
"bytes": "41527"
},
{
"name": "JavaScript",
"bytes": "403603"
},
{
"name": "Makefile",
"bytes": "33808"
},
{
"name": "Objective-C",
"bytes": "69457"
},
{
"name": "Perl",
"bytes": "96047"
},
{
"name": "Processing",
"bytes": "1304"
},
{
"name": "Python",
"bytes": "13358098"
},
{
"name": "Shell",
"bytes": "68795"
},
{
"name": "TeX",
"bytes": "4317"
}
],
"symlink_target": ""
} |
imagedir = parent + "/openexr-images-1.5.0/MultiResolution"
files = [ "Bonita.exr", "ColorCodedLevels.exr",
# FIXME -- we don't know how to deal with RIP-maps -- Kapaa,
"KernerEnvCube.exr", "KernerEnvLatLong.exr", "MirrorPattern.exr",
"OrientationCube.exr", "OrientationLatLong.exr",
"PeriodicPattern.exr", "StageEnvCube.exr", "StageEnvLatLong.exr",
"WavyLinesCube.exr", "WavyLinesLatLong.exr", "WavyLinesSphere.exr" ]
for f in files:
command += rw_command (imagedir, f)
| {
"content_hash": "2b2ce92909ae09d393bb5ef7591a5d44",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 78,
"avg_line_length": 52.7,
"alnum_prop": 0.6603415559772297,
"repo_name": "jeremyselan/oiio",
"id": "526b27177397da9c030ab410c8f975564bf39e22",
"size": "936",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "testsuite/openexr-multires/run.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "158687"
},
{
"name": "C++",
"bytes": "3488403"
},
{
"name": "Python",
"bytes": "104305"
},
{
"name": "TeX",
"bytes": "390893"
}
],
"symlink_target": ""
} |
from spyne.model.complex import ComplexModel
import unittest
from pprint import pprint
from spyne.application import Application
from spyne.const.xml_ns import xsd as _ns_xsd
from spyne.interface.wsdl.wsdl11 import Wsdl11
from spyne.model.complex import Array
from spyne.protocol.xml import XmlDocument
from spyne.protocol.soap.soap11 import Soap11
from spyne.server.wsgi import WsgiApplication
from spyne.service import ServiceBase
from spyne.decorator import rpc
from spyne.model.enum import Enum
from lxml import etree
vals = [
'Monday',
'Tuesday',
'Wednesday',
'Thursday',
'Friday',
'Saturday',
'Sunday',
]
DaysOfWeekEnum = Enum(
'Monday',
'Tuesday',
'Wednesday',
'Thursday',
'Friday',
'Saturday',
'Sunday',
type_name = 'DaysOfWeekEnum',
)
class TestService(ServiceBase):
@rpc(DaysOfWeekEnum, _returns=DaysOfWeekEnum)
def get_the_day(self, day):
return DaysOfWeekEnum.Sunday
class Test(ComplexModel):
days = DaysOfWeekEnum(max_occurs=7)
class TestEnum(unittest.TestCase):
def setUp(self):
self.app = Application([TestService], 'tns', in_protocol=Soap11(), out_protocol=Soap11())
self.app.transport = 'test'
self.server = WsgiApplication(self.app)
self.wsdl = Wsdl11(self.app.interface)
self.wsdl.build_interface_document('prot://url')
def test_wsdl(self):
wsdl = self.wsdl.get_interface_document()
elt = etree.fromstring(wsdl)
simple_type = elt.xpath('//xs:simpleType', namespaces=self.app.interface.nsmap)[0]
print((etree.tostring(elt, pretty_print=True)))
print(simple_type)
self.assertEquals(simple_type.attrib['name'], 'DaysOfWeekEnum')
self.assertEquals(simple_type[0].tag, "{%s}restriction" % _ns_xsd)
self.assertEquals([e.attrib['value'] for e in simple_type[0]], vals)
def test_serialize(self):
mo = DaysOfWeekEnum.Monday
print((repr(mo)))
elt = etree.Element('test')
XmlDocument().to_parent(None, DaysOfWeekEnum, mo, elt, 'test_namespace')
elt = elt[0]
ret = XmlDocument().from_element(None, DaysOfWeekEnum, elt)
self.assertEquals(mo, ret)
def test_serialize_complex_array(self):
days = [
DaysOfWeekEnum.Monday,
DaysOfWeekEnum.Tuesday,
DaysOfWeekEnum.Wednesday,
DaysOfWeekEnum.Thursday,
DaysOfWeekEnum.Friday,
DaysOfWeekEnum.Saturday,
DaysOfWeekEnum.Sunday,
]
days_xml = [
('{tns}DaysOfWeekEnum', 'Monday'),
('{tns}DaysOfWeekEnum', 'Tuesday'),
('{tns}DaysOfWeekEnum', 'Wednesday'),
('{tns}DaysOfWeekEnum', 'Thursday'),
('{tns}DaysOfWeekEnum', 'Friday'),
('{tns}DaysOfWeekEnum', 'Saturday'),
('{tns}DaysOfWeekEnum', 'Sunday'),
]
DaysOfWeekEnumArray = Array(DaysOfWeekEnum)
DaysOfWeekEnumArray.__namespace__ = 'tns'
elt = etree.Element('test')
XmlDocument().to_parent(None, DaysOfWeekEnumArray, days,
elt, 'test_namespace')
elt = elt[0]
ret = XmlDocument().from_element(None, Array(DaysOfWeekEnum), elt)
assert days == ret
print((etree.tostring(elt, pretty_print=True)))
pprint(self.app.interface.nsmap)
assert days_xml == [ (e.tag, e.text) for e in
elt.xpath('//tns:DaysOfWeekEnum', namespaces=self.app.interface.nsmap)]
def test_serialize_simple_array(self):
t = Test(days=[
DaysOfWeekEnum.Monday,
DaysOfWeekEnum.Tuesday,
DaysOfWeekEnum.Wednesday,
DaysOfWeekEnum.Thursday,
DaysOfWeekEnum.Friday,
DaysOfWeekEnum.Saturday,
DaysOfWeekEnum.Sunday,
])
Test.resolve_namespace(Test, 'tns')
elt = etree.Element('test')
XmlDocument().to_parent(None, Test, t, elt, 'test_namespace')
elt = elt[0]
print((etree.tostring(elt, pretty_print=True)))
ret = XmlDocument().from_element(None, Test, elt)
self.assertEquals(t.days, ret.days)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "1f7fb947dff9b63714d5a3b4c69875d5",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 97,
"avg_line_length": 30.145833333333332,
"alnum_prop": 0.6067726330338632,
"repo_name": "deevarvar/myLab",
"id": "3d8a02e43de15d3c3a9c01d5750a25be9c08f523",
"size": "5137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "baidu_code/soap_mockserver/spyne/test/model/test_enum.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "850"
},
{
"name": "C",
"bytes": "856044"
},
{
"name": "C++",
"bytes": "2988"
},
{
"name": "CSS",
"bytes": "6488"
},
{
"name": "DIGITAL Command Language",
"bytes": "282400"
},
{
"name": "HTML",
"bytes": "119253"
},
{
"name": "JavaScript",
"bytes": "445705"
},
{
"name": "Makefile",
"bytes": "20119"
},
{
"name": "Objective-C",
"bytes": "108"
},
{
"name": "PHP",
"bytes": "2502"
},
{
"name": "Python",
"bytes": "2305843"
},
{
"name": "Roff",
"bytes": "106"
},
{
"name": "Ruby",
"bytes": "478"
},
{
"name": "Shell",
"bytes": "68858"
}
],
"symlink_target": ""
} |
from unittest import mock
from oslo_service import loopingcall
from cinder.tests.unit import test
from cinder.volume.drivers.netapp.dataontap.utils import loopingcalls
class LoopingCallsTestCase(test.TestCase):
def setUp(self):
super(LoopingCallsTestCase, self).setUp()
self.mock_first_looping_task = mock.Mock()
self.mock_second_looping_task = mock.Mock()
self.mock_loopingcall = self.mock_object(
loopingcall,
'FixedIntervalLoopingCall',
side_effect=[self.mock_first_looping_task,
self.mock_second_looping_task]
)
self.loopingcalls = loopingcalls.LoopingCalls()
def test_add_task(self):
interval = 3600
initial_delay = 5
self.loopingcalls.add_task(self.mock_first_looping_task, interval)
self.loopingcalls.add_task(
self.mock_second_looping_task, interval, initial_delay)
self.assertEqual(2, len(self.loopingcalls.tasks))
self.assertEqual(interval, self.loopingcalls.tasks[0].interval)
self.assertEqual(initial_delay,
self.loopingcalls.tasks[1].initial_delay)
def test_start_tasks(self):
interval = 3600
initial_delay = 5
self.loopingcalls.add_task(self.mock_first_looping_task, interval)
self.loopingcalls.add_task(
self.mock_second_looping_task, interval, initial_delay)
self.loopingcalls.start_tasks()
self.mock_first_looping_task.start.assert_called_once_with(
interval, 0)
self.mock_second_looping_task.start.assert_called_once_with(
interval, initial_delay)
| {
"content_hash": "96f11770d3e66239f494e42807ad5d03",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 74,
"avg_line_length": 33.7,
"alnum_prop": 0.6540059347181009,
"repo_name": "mahak/cinder",
"id": "213fd58a00d6e1f747199176a56eed527819de2f",
"size": "2316",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/volume/drivers/netapp/dataontap/utils/test_loopingcalls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "259"
},
{
"name": "Mako",
"bytes": "976"
},
{
"name": "Python",
"bytes": "25078356"
},
{
"name": "Shell",
"bytes": "6456"
},
{
"name": "Smarty",
"bytes": "67595"
}
],
"symlink_target": ""
} |
from django.db import models
from django.contrib.auth.models import User
class Activity(models.Model):
owner = models.ForeignKey(User)
title = models.CharField(max_length=20)
description = models.CharField(max_length=140)
approx_time = models.IntegerField(null=True)
due_date = models.DateField()
priority = models.IntegerField(default=0)
class Frees(models.Model):
startTime = models.DateTimeField()
finishTime = models.DateTimeField()
| {
"content_hash": "a4efa0a8a53b27046be59e11ee97fd02",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 50,
"avg_line_length": 29.5625,
"alnum_prop": 0.7378435517970402,
"repo_name": "Victoria-Sardelli/hackBU16",
"id": "a2e1f9f1d3136cf5f2c073573cc67a2c7b3e49a8",
"size": "473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chalk/scheduler/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "27025"
},
{
"name": "HTML",
"bytes": "14012"
},
{
"name": "JavaScript",
"bytes": "457470"
},
{
"name": "Python",
"bytes": "14172"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pingpong', '0002_auto_20150130_1509'),
]
operations = [
migrations.CreateModel(
name='Gifs',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.URLField(blank=True)),
('low_boundary', models.IntegerField()),
('high_boundary', models.IntegerField()),
],
options={
},
bases=(models.Model,),
),
]
| {
"content_hash": "41d18d8e71e2849e9f079eb2662c7ad6",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 114,
"avg_line_length": 27.44,
"alnum_prop": 0.5306122448979592,
"repo_name": "simpleenergy/pingpong",
"id": "b9b732f3f852f25048aaf029fcaecec5d44748c3",
"size": "710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pingpong/migrations/0003_gifs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "146703"
}
],
"symlink_target": ""
} |
"""
@brief test log(time=1s)
You should indicate a time in seconds. The program ``run_unittests.py``
will sort all test files by increasing time and run them.
"""
import unittest
from pyquickhelper.pycode import ExtTestCase
from python3_module_template.subproject.myexampleb import onefunction
class TestExample(ExtTestCase):
"""Third example of a test."""
def test_onefunction(self):
self.assertTrue(onefunction(3, 4), 7)
with self.assertRaises(TypeError):
onefunction(3.3, 4)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "b35dd5fe6a98c5207cc9c93e9613059f",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 23.958333333333332,
"alnum_prop": 0.688695652173913,
"repo_name": "sdpython/python3_module_template",
"id": "3b9333e46fe51a66a9b02a8d621c990d44c50e9b",
"size": "575",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_unittests/ut_example/test_example_function.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "401"
},
{
"name": "Jupyter Notebook",
"bytes": "10379"
},
{
"name": "Python",
"bytes": "21087"
}
],
"symlink_target": ""
} |
from datetime import datetime
from alertaclient.utils import DateTime
class Alert:
def __init__(self, resource, event, **kwargs):
if not resource:
raise ValueError('Missing mandatory value for "resource"')
if not event:
raise ValueError('Missing mandatory value for "event"')
if any(['.' in key for key in kwargs.get('attributes', dict()).keys()])\
or any(['$' in key for key in kwargs.get('attributes', dict()).keys()]):
raise ValueError('Attribute keys must not contain "." or "$"')
self.id = kwargs.get('id', None)
self.resource = resource
self.event = event
self.environment = kwargs.get('environment', None) or ''
self.severity = kwargs.get('severity', None)
self.correlate = kwargs.get('correlate', None) or list()
if self.correlate and event not in self.correlate:
self.correlate.append(event)
self.status = kwargs.get('status', None) or 'unknown'
self.service = kwargs.get('service', None) or list()
self.group = kwargs.get('group', None) or 'Misc'
self.value = kwargs.get('value', None)
self.text = kwargs.get('text', None) or ''
self.tags = kwargs.get('tags', None) or list()
self.attributes = kwargs.get('attributes', None) or dict()
self.origin = kwargs.get('origin', None)
self.event_type = kwargs.get('event_type', kwargs.get('type', None)) or 'exceptionAlert'
self.create_time = kwargs.get('create_time', None) or datetime.utcnow()
self.timeout = kwargs.get('timeout', None)
self.raw_data = kwargs.get('raw_data', None)
self.customer = kwargs.get('customer', None)
self.duplicate_count = kwargs.get('duplicate_count', None)
self.repeat = kwargs.get('repeat', None)
self.previous_severity = kwargs.get('previous_severity', None)
self.trend_indication = kwargs.get('trend_indication', None)
self.receive_time = kwargs.get('receive_time', None) or datetime.utcnow()
self.last_receive_id = kwargs.get('last_receive_id', None)
self.last_receive_time = kwargs.get('last_receive_time', None)
self.history = kwargs.get('history', None) or list()
def __repr__(self):
return 'Alert(id={!r}, environment={!r}, resource={!r}, event={!r}, severity={!r}, status={!r}, customer={!r})'.format(
self.id, self.environment, self.resource, self.event, self.severity, self.status, self.customer)
@classmethod
def parse(cls, json):
if not isinstance(json.get('correlate', []), list):
raise ValueError('correlate must be a list')
if not isinstance(json.get('service', []), list):
raise ValueError('service must be a list')
if not isinstance(json.get('tags', []), list):
raise ValueError('tags must be a list')
if not isinstance(json.get('attributes', {}), dict):
raise ValueError('attributes must be a JSON object')
if not isinstance(json.get('timeout') if json.get('timeout', None) is not None else 0, int):
raise ValueError('timeout must be an integer')
return Alert(
id=json.get('id', None),
resource=json.get('resource', None),
event=json.get('event', None),
environment=json.get('environment', None),
severity=json.get('severity', None),
correlate=json.get('correlate', list()),
status=json.get('status', None),
service=json.get('service', list()),
group=json.get('group', None),
value=json.get('value', None),
text=json.get('text', None),
tags=json.get('tags', list()),
attributes=json.get('attributes', dict()),
origin=json.get('origin', None),
event_type=json.get('type', None),
create_time=DateTime.parse(json.get('createTime')),
timeout=json.get('timeout', None),
raw_data=json.get('rawData', None),
customer=json.get('customer', None),
duplicate_count=json.get('duplicateCount', None),
repeat=json.get('repeat', None),
previous_severity=json.get('previousSeverity', None),
trend_indication=json.get('trendIndication', None),
receive_time=DateTime.parse(json.get('receiveTime')),
last_receive_id=json.get('lastReceiveId', None),
last_receive_time=DateTime.parse(json.get('lastReceiveTime')),
history=json.get('history', None)
)
def get_id(self, short=False):
return self.id[:8] if short else self.id
def tabular(self, timezone=None):
return {
'id': self.get_id(short=True),
'lastReceiveTime': DateTime.localtime(self.last_receive_time, timezone),
'severity': self.severity,
'status': self.status,
'duplicateCount': self.duplicate_count,
'customer': self.customer,
'environment': self.environment,
'service': ','.join(self.service),
'resource': self.resource,
'group': self.group,
'event': self.event,
'correlate': self.correlate,
'value': self.value,
'text': self.text,
'tags': ','.join(self.tags),
'attributes': self.attributes,
'origin': self.origin,
'type': self.event_type,
'createTime': DateTime.localtime(self.create_time, timezone),
'timeout': self.timeout,
'rawData': self.raw_data,
'repeat': self.repeat,
'previousSeverity': self.previous_severity,
'trendIndication': self.trend_indication,
'receiveTime': DateTime.localtime(self.receive_time, timezone),
'lastReceiveId': self.last_receive_id,
'history': self.history
}
| {
"content_hash": "e5d04edc8c692daf2e5ee80f21303a66",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 127,
"avg_line_length": 46.7734375,
"alnum_prop": 0.5844329380324036,
"repo_name": "alerta/python-alerta",
"id": "5ea741ae52b5f3ecc1898f710aed2a565b83086d",
"size": "5987",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "alertaclient/models/alert.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "106941"
}
],
"symlink_target": ""
} |
from __future__ import division, print_function
import paho.mqtt.client as mqtt
import base64, sys, math
from hashlib import md5
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
if rc != 0:
print("Connection Failed with result code {}".format(rc))
client.disconnect()
else:
print("Connected with result code {}".format(rc))
# calcluate firmware md5
firmware_md5 = md5(userdata['firmware']).hexdigest()
userdata.update({'md5': firmware_md5})
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("{base_topic}{device_id}/$implementation/ota/status".format(**userdata))
client.subscribe("{base_topic}{device_id}/$implementation/ota/enabled".format(**userdata))
client.subscribe("{base_topic}{device_id}/$fw/#".format(**userdata))
# Wait for device info to come in and invoke the on_message callback where update will continue
print("Waiting for device info...")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
# decode string for python2/3 compatiblity
msg.payload = msg.payload.decode()
if msg.topic.endswith('$implementation/ota/status'):
status = int(msg.payload.split()[0])
if userdata.get("published"):
if status == 206: # in progress
# state in progress, print progress bar
progress, total = [int(x) for x in msg.payload.split()[1].split('/')]
bar_width = 30
bar = int(bar_width*(progress/total))
print("\r[", '+'*bar, ' '*(bar_width-bar), "] ", msg.payload.split()[1], end='', sep='')
if (progress == total):
print()
sys.stdout.flush()
elif status == 304: # not modified
print("Device firmware already up to date with md5 checksum: {}".format(userdata.get('md5')))
client.disconnect()
elif status == 403: # forbidden
print("Device ota disabled, aborting...")
client.disconnect()
elif msg.topic.endswith('$fw/checksum'):
checksum = msg.payload
if userdata.get("published"):
if checksum == userdata.get('md5'):
print("Device back online. Update Successful!")
else:
print("Expecting checksum {}, got {}, update failed!".format(userdata.get('md5'), checksum))
client.disconnect()
else:
if checksum != userdata.get('md5'): # save old md5 for comparison with new firmware
userdata.update({'old_md5': checksum})
else:
print("Device firmware already up to date with md5 checksum: {}".format(checksum))
client.disconnect()
elif msg.topic.endswith('ota/enabled'):
if msg.payload == 'true':
userdata.update({'ota_enabled': True})
else:
print("Device ota disabled, aborting...")
client.disconnect()
if ( not userdata.get("published") ) and ( userdata.get('ota_enabled') ) and \
( 'old_md5' in userdata.keys() ) and ( userdata.get('md5') != userdata.get('old_md5') ):
# push the firmware binary
userdata.update({"published": True})
topic = "{base_topic}{device_id}/$implementation/ota/firmware/{md5}".format(**userdata)
print("Publishing new firmware with checksum {}".format(userdata.get('md5')))
client.publish(topic, userdata['firmware'])
def main(broker_host, broker_port, broker_username, broker_password, base_topic, device_id, firmware):
# initialise mqtt client and register callbacks
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
# set username and password if given
if broker_username and broker_password:
client.username_pw_set(broker_username, broker_password)
# save data to be used in the callbacks
client.user_data_set({
"base_topic": base_topic,
"device_id": device_id,
"firmware": firmware
})
# start connection
print("Connecting to mqtt broker {} on port {}".format(broker_host, broker_port))
client.connect(broker_host, broker_port, 60)
# Blocking call that processes network traffic, dispatches callbacks and handles reconnecting.
client.loop_forever()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='ota firmware update scirpt for ESP8226 implemenation of the Homie mqtt IoT convention.')
# ensure base topic always ends with a '/'
def base_topic_arg(s):
s = str(s)
if not s.endswith('/'):
s = s + '/'
return s
# specify arguments
parser.add_argument('-l', '--broker-host', type=str, required=False,
help='host name or ip address of the mqtt broker', default="127.0.0.1")
parser.add_argument('-p', '--broker-port', type=int, required=False,
help='port of the mqtt broker', default=1883)
parser.add_argument('-u', '--broker-username', type=str, required=False,
help='username used to authenticate with the mqtt broker')
parser.add_argument('-d', '--broker-password', type=str, required=False,
help='password used to authenticate with the mqtt broker')
parser.add_argument('-t', '--base-topic', type=base_topic_arg, required=False,
help='base topic of the homie devices on the broker', default="homie/")
parser.add_argument('-i', '--device-id', type=str, required=True,
help='homie device id')
parser.add_argument('firmware', type=argparse.FileType('rb'),
help='path to the firmware to be sent to the device')
# workaround for http://bugs.python.org/issue9694
parser._optionals.title = "arguments"
# get and validate arguments
args = parser.parse_args()
# read the contents of firmware into buffer
fw_buffer = args.firmware.read()
args.firmware.close()
firmware = bytearray()
firmware.extend(fw_buffer)
# Invoke the business logic
main(args.broker_host, args.broker_port, args.broker_username,
args.broker_password, args.base_topic, args.device_id, firmware)
| {
"content_hash": "836f37d1e95a2d5365d575a65354c647",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 109,
"avg_line_length": 43.111111111111114,
"alnum_prop": 0.6105215281989085,
"repo_name": "euphi/homie-esp8266",
"id": "2b0f1f5b7ee16ac5abd736db4a16502b472d9226",
"size": "6619",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "scripts/ota_updater/ota_updater.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "89"
},
{
"name": "C++",
"bytes": "141123"
},
{
"name": "HTML",
"bytes": "175625"
},
{
"name": "Makefile",
"bytes": "222"
},
{
"name": "Python",
"bytes": "12036"
}
],
"symlink_target": ""
} |
import os
from sqs import (make_SQS_connection, get_queue, queue_size, )
from cloudwatch import (make_CW_connection, update_metric, )
from threading import Timer
# Define AWS credentials
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
REGION = 'us-west-2'
# Set queue name variables
FULL_COMPOSITE_QUEUE = 'snapsat_composite_queue'
PREVIEW_COMPOSITE_QUEUE = 'snapsat_preview_queue'
# Set metric name variables
FULL_COMPOSITE_METRIC = 'number_jobs_full_queue'
PREVIEW_COMPOSITE_METRIC = 'number_jobs_preview_queue'
# Set metric namespace
NAMESPACE = 'Snapsat'
# Set size check intervals
FULL_INTERVAL = 10
PREVIEW_INTERVAL = 10
# Create SQS connction
SQSconn = make_SQS_connection(REGION,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY)
# Create CW connection
CWconn = make_CW_connection(REGION,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY)
# Monitor size of queue
def monitor_queue(SQSconn, CWconn, queue_name, metric_name):
queue = get_queue(SQSconn, queue_name)
size = queue_size(queue)
update_metric(CWconn, NAMESPACE, metric_name, size)
# Create full queue size check timer funciton
def full_queue_timer(SQSconn, CWconn, queue_name, metric_name, interval):
monitor_queue(SQSconn, CWconn, queue_name, metric_name)
return Timer(interval,
full_queue_timer,
args=[SQSconn, CWconn, queue_name, metric_name, interval]
).start()
# Create preview queue size check timer funciton
def preview_queue_timer(SQSconn, CWconn, queue_name, metric_name, interval):
monitor_queue(SQSconn, CWconn, queue_name, metric_name)
return Timer(interval,
full_queue_timer,
args=[SQSconn, CWconn, queue_name, metric_name, interval]
).start()
# Check queue sizes every 20 seconds
def main():
full_queue_timer(SQSconn,
CWconn,
FULL_COMPOSITE_QUEUE,
FULL_COMPOSITE_METRIC,
FULL_INTERVAL)
preview_queue_timer(SQSconn,
CWconn,
PREVIEW_COMPOSITE_QUEUE,
PREVIEW_COMPOSITE_METRIC,
PREVIEW_INTERVAL)
if __name__ == '__main__':
main()
| {
"content_hash": "ae9a95107d8b43329a8f07efb0e1eb29",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 76,
"avg_line_length": 31.2987012987013,
"alnum_prop": 0.6236514522821577,
"repo_name": "recombinators/autoscaling",
"id": "be4cf5a3dd8f9d79d126b7d96822ede188157470",
"size": "2410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "checker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6698"
}
],
"symlink_target": ""
} |
'''
Problem List Actions class. Extends Actions
Created on Mar 7, 2012
@author: pbradley
@copyright PwC
@license http://www.apache.org/licenses/LICENSE-2.0
'''
import time
import TestHelper
from Actions import Actions
import logging
class PLActions (Actions):
'''
This class extends the Actions class with methods specific to actions performed
through the Roll and Scroll interface for the Problem List package.
'''
def __init__(self, VistAconn, scheduling=None, user=None, code=None):
Actions.__init__(self, VistAconn, scheduling, user, code)
def signon (self):
''' This provides a signon via ^XUP or ^ZU depending on the value of acode'''
if self.acode is None:
self.VistA.write('S DUZ=1,DUZ(0)="@" D ^XUP')
self.VistA.wait('OPTION NAME:')
self.VistA.write('GMPL MGT MENU')
else:
self.VistA.write('D ^ZU')
self.VistA.wait('ACCESS CODE:')
self.VistA.write(self.acode)
self.VistA.wait('VERIFY CODE:')
self.VistA.write(self.vcode)
self.VistA.wait('//')
self.VistA.write('')
self.VistA.wait('Option:')
self.VistA.write('Problem List')
# def signoff(self):
# super(Actions,self).signoff(self.VistA, self.acode)
def write(self, string):
self.VistA.write(string)
def addcsv(self, ssn, pfile):
'''Add a list of problems to a patient's record'''
preader = TestHelper.CSVFileReader()
prec = preader.getfiledata(pfile)
for key in sorted(prec):
problem_data = prec[key]
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(problem_data['clinic'].strip())
while True:
index = self.VistA.multiwait(['Select Item','PROBLEM:',"No items available"])
if index == 0:
self.VistA.write('AD')
else:
self.VistA.write('?')
probID =[problem_data['icd'].strip(),
problem_data['icd10'].strip(),
problem_data['snomed'].strip()]
valIndex = 0
while True:
index = self.VistA.multiwait(['Ok','PROBLEM:'])
if index == 1:
self.VistA.write(probID[valIndex])
valIndex += 1;
elif index == 0:
break
else:
self.VistA.write('?')
self.VistA.write('Yes')
# if self.acode is not None:
# self.VistA.wait('//'); self.VistA.write('')
index = self.VistA.multiwait(['COMMENT','already an ACTIVE problem'])
if index == 0:
self.VistA.write(problem_data['comment1'].strip())
self.VistA.wait('ANOTHER COMMENT')
self.VistA.write(problem_data['comment2'].strip())
self.VistA.wait('DATE OF ONSET')
self.VistA.write(problem_data['onsetdate'].strip())
self.VistA.wait('STATUS')
self.VistA.write(problem_data['status'].strip())
self.VistA.wait('hronic')
self.VistA.write(problem_data['acutechronic'].strip())
rval = self.VistA.multiwait(['service-connected condition',
'uit w/o saving'])
if rval == 0:
self.VistA.write(problem_data['service'].strip())
self.VistA.wait('uit w/o saving?')
self.VistA.write('Save')
elif rval == 1:
self.VistA.write('Save')
break
else:
self.VistA.write("")
break
self.VistA.wait('PROBLEM')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def addbyprobnum(self, ssn, clinic, comment, onsetdate, status, acutechronic,
service, probnum, icd=None,icd10=None,snomed=None, evalue=None, verchknum=None):
''' Add a problem using clinic or user with assigned selection list'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(clinic)
index = self.VistA.multiwait(["Select I(TEM|tem)","PROBLEM:"])
self.VistA.write(probnum)
index = self.VistA.multiwait(['COMMENT','already an ACTIVE problem'])
if index == 0:
self.VistA.write(comment)
self.VistA.wait('ANOTHER COMMENT')
self.VistA.write('')
self.VistA.wait('DATE OF ONSET')
self.VistA.write(onsetdate)
self.VistA.wait('STATUS')
self.VistA.write(status)
self.VistA.wait('hronic')
self.VistA.write(acutechronic)
rval = self.VistA.multiwait(['service-connected condition', 'uit w/o saving'])
if rval == 0:
self.VistA.write(service)
self.VistA.wait('uit w/o saving')
self.VistA.write('Save')
elif rval == 1:
self.VistA.write('Save')
#
else:
self.VistA.write('')
self.VistA.multiwait(["PROBLEM:","Select Item"])
self.VistA.write('')
self.VistA.wait('Select Action')
# optionally, check to make sure user entering the data can't also verify it
if verchknum is not None:
self.VistA.write('$')
self.VistA.wait('Select Problem')
self.VistA.write(verchknum)
self.VistA.wait('does not require verification')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def add(self, ssn, clinic, comment, onsetdate, status, acutechronic,
service, probnum=None, icd=None,icd10=None,snomed=None, evalue=None, verchknum=None):
''' Add a problem using clinic or user with assigned selection list'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(clinic)
index = self.VistA.multiwait(["Select Item","PROBLEM:"])
if (index == 0):
self.VistA.write('AD')
self.VistA.wait('PROBLEM:')
probList = [icd, icd10,snomed]
probIndex = 0
self.VistA.write('?')
while True:
index = self.VistA.multiwait(['PROBLEM:','Ok',"Select Item"])
if index==0:
self.VistA.write(probList[probIndex])
probIndex += 1
elif index == 1:
break
elif index == 2:
self.VistA.write('AD')
else:
self.VistA.write('?')
self.VistA.write('YES')
index = self.VistA.multiwait(['COMMENT','already an ACTIVE problem'])
if index == 0:
self.VistA.write(comment)
self.VistA.wait('ANOTHER COMMENT')
self.VistA.write('')
self.VistA.wait('DATE OF ONSET')
self.VistA.write(onsetdate)
self.VistA.wait('STATUS')
self.VistA.write(status)
self.VistA.wait('hronic')
self.VistA.write(acutechronic)
rval = self.VistA.multiwait(['service-connected condition', 'uit w/o saving'])
if rval == 0:
self.VistA.write(service)
self.VistA.wait('uit w/o saving')
self.VistA.write('Save')
elif rval == 1:
self.VistA.write('Save')
#
else:
self.VistA.write('')
while True:
index = self.VistA.multiwait(["PROBLEM:","Select Item",'Select Action'])
if index == 2:
break
self.VistA.write('')
# optionally, check to make sure user entering the data can't also verify it
if verchknum is not None:
self.VistA.write('$')
self.VistA.wait('Select Problem')
self.VistA.write(verchknum)
self.VistA.wait('does not require verification')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def addspec(self, ssn, clinic, comment, onsetdate, status, acutechronic,
service, icd, prompt='yes', uselex='yes', screendups='yes', isdup=None, prob=None, vlist=None):
''' Add problems with checks for the PL site parameters'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(clinic)
self.VistA.wait('PROBLEM')
self.VistA.write(icd)
if uselex is 'yes':
self.VistA.wait('Ok?')
self.VistA.write('YES')
if screendups == isdup == 'yes':
self.VistA.wait('>>> ' + prob)
self.VistA.wait(' is already an')
self.VistA.wait('Are you sure you want to continue')
self.VistA.write('Yes')
self.VistA.wait('COMMENT')
self.VistA.write(comment)
self.VistA.wait('ANOTHER COMMENT')
self.VistA.write('')
self.VistA.wait('DATE OF ONSET')
self.VistA.write(onsetdate)
self.VistA.wait('STATUS')
self.VistA.write(status)
self.VistA.wait('hronic')
self.VistA.write(acutechronic)
rval = self.VistA.multiwait(['service-connected condition', 'uit w/o saving'])
if rval == 0:
self.VistA.write(service)
self.VistA.wait('uit w/o saving')
self.VistA.write('Save')
elif rval == 1:
self.VistA.write('Save')
self.VistA.wait('PROBLEM')
self.VistA.write('')
if vlist is not None:
while True:
index = self.VistA.multiwait(vlist)
if index == len(vlist)-1:
break
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
if prompt == 'yes':
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def dataentry(self, ssn, provider, clinic, problem, comment, onsetdate, status, acutechronic,
service, probnum=None, icd=None, evalue=None):
'''Add a problem (via data entry) using description or selection list'''
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Provider:')
self.VistA.write(provider)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(clinic)
if probnum == 'skip': # SL exists but don't use
self.VistA.wait('Select Item')
self.VistA.write('AD')
self.VistA.wait('PROBLEM')
self.VistA.write(icd)
elif probnum is None : # SL doesn't exist
self.VistA.wait('PROBLEM')
self.VistA.write(problem)
else : # Use SL
self.VistA.wait('Select Item')
self.VistA.write(probnum)
# if clinic == '':
# self.VistA.wait(evalue); self.VistA.write('')
self.VistA.wait('COMMENT')
self.VistA.write(comment)
self.VistA.wait('ANOTHER COMMENT')
self.VistA.write('')
self.VistA.wait('DATE OF ONSET')
self.VistA.write(onsetdate)
self.VistA.wait('STATUS')
self.VistA.write(status)
self.VistA.wait('hronic')
self.VistA.write(acutechronic)
rval = self.VistA.multiwait(['service-connected condition', 'uit w/o saving'])
if rval == 0:
self.VistA.write(service)
self.VistA.wait('uit w/o saving')
self.VistA.write('Save')
elif rval == 1:
self.VistA.write('Save')
self.VistA.wait('PROBLEM:')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def editsimple(self, ssn, probnum, itemnum, chgval,icd10='',snomed=''):
'''Simple edit of problem, items 1,2,4,5 or 6 only'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('ED')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
self.VistA.wait('Select Item')
self.VistA.write(itemnum) # select 1, 2,4,5,or6
self.VistA.wait(':')
self.VistA.write(chgval)
valIndex=0
valList = [icd10,snomed]
while True:
rval = self.VistA.multiwait(['Select Item', 'Ok','A suitable term','STOP or Select'])
if rval == 0:
self.VistA.write('SC')
break
elif rval == 1:
self.VistA.write('Yes')
elif rval == 2:
self.VistA.write(valList[valIndex])
valIndex +=1
elif rval == 3:
self.VistA.write('1')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def editinactivate (self, ssn, probnum, resdate):
'''Inactivate a problem'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('ED')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
self.VistA.wait('Select Item')
self.VistA.write('3') # STATUS
self.VistA.wait('STATUS')
self.VistA.write('INACTIVE')
self.VistA.wait('DATE RESOLVED')
self.VistA.write(resdate)
self.VistA.wait('Select Item')
self.VistA.write('SC')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def editactivate (self, ssn, probnum, acutechronic):
'''Activate a problem'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('ED')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
self.VistA.wait('Select Item')
self.VistA.write('3') # STATUS
self.VistA.wait('STATUS')
self.VistA.write('ACTIVE')
self.VistA.wait('hronic')
self.VistA.write(acutechronic)
self.VistA.wait('Select Item')
self.VistA.write('SC')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def verify(self, ssn, probnum, itemnum, evalue, view='AT'):
'''Verify a problem exists'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('VW')
self.VistA.wait('Select Item')
self.VistA.write(view)
self.VistA.wait('Select Action')
self.VistA.write('ED')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
self.VistA.wait('Select Item')
self.VistA.write(itemnum) # which item to verify?
self.VistA.multiwait(evalue)
self.VistA.write('^')
self.VistA.wait('Select Item')
self.VistA.write('QUIT')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
def comcm (self, ssn, probnum, comment):
'''Comment on an Active problem'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('CM')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
self.VistA.wait('COMMENT')
self.VistA.write(comment)
self.VistA.wait('ANOTHER COMMENT')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def rem (self, ssn):
'''Remove the first problem on the list (Active or Inactive)'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('VW')
self.VistA.wait('Select Item')
self.VistA.write('BO')
self.VistA.wait('Select Action')
self.VistA.write('RM')
self.VistA.wait('Select Problem')
self.VistA.write('1')
self.VistA.wait('Are you sure')
self.VistA.write('YES')
self.VistA.wait('REASON FOR REMOVAL')
self.VistA.write('testing')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def rem_all (self, ssn):
'''Remove the first problem on the list (Active or Inactive)'''
rval = 0
while rval is not 1:
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('VW')
self.VistA.wait('Select Item')
self.VistA.write('BO')
self.VistA.wait('Select Action')
self.VistA.write('RM')
rval = self.VistA.multiwait(['Select Problem', 'Select Action'])
if rval == 0:
self.VistA.write('1')
self.VistA.wait('Are you sure')
self.VistA.write('YES')
self.VistA.wait('REASON FOR REMOVAL')
self.VistA.write('testing')
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
elif rval == 1:
self.VistA.write('QUIT')
r2val = self.VistA.multiwait(['Print a new problem list', 'Problem List Mgt Menu'])
if r2val == 0:
self.VistA.write('N')
elif r2val == 1:
self.VistA.write('?')
else:
self.VistA.wait('SHOULDNOTGETHERE')
else:
self.VistA.wait('SHOULDNOTGETHERE')
def replace (self, ssn, probnum):
'''Replace Removed Problem'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Replace Removed Problem')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select the problem')
self.VistA.write(probnum)
self.VistA.wait('Are you sure you want to do this?')
self.VistA.write('YES')
self.VistA.wait('to continue')
self.VistA.write('')
def checkempty (self, ssn):
'''Verify that patient problem list is empty'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action: Add New Problems//')
self.VistA.write('QUIT')
def createsellist (self, listname, clinic):
'''Create a Selection List'''
needAssignedToClinic = False
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('new PROBLEM SELECTION LIST')
self.VistA.write('Yes')
index = self.VistA.multiwait(['PROBLEM SELECTION LIST CLINIC:','PROBLEM SELECTION LIST CLASS'])
if index == 0:
self.VistA.write(clinic)
else:
needAssignedToClinic = True
self.VistA.write("Local")
self.VistA.wait('Select Action:')
# assign to clinic
if (needAssignedToClinic) and (clinic):
self.VistA.write("SS")
self.VistA.wait("Enter selection")
self.VistA.write('2') # Assign to hospital location
self.VistA.wait("HOSPITAL LOCATION NAME")
self.VistA.write(clinic)
self.VistA.wait("Selection List")
self.VistA.write(listname)
self.VistA.wait("Enter selection")
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def createcat (self, listname, catname):
'''Create a Category'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build Problem Selection List')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action')
self.VistA.write('EC')
self.VistA.wait('Select CATEGORY NAME:')
self.VistA.write(catname)
self.VistA.wait('new PROBLEM SELECTION CATEGORY')
self.VistA.write('Yes')
index = self.VistA.multiwait(['Select Item','PROBLEM SELECTION CATEGORY CLASS'])
if index == 1:
self.VistA.write("Local")
self.VistA.wait('Select Item')
self.VistA.write('SV')
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('CATEGORY NAME')
self.VistA.write(catname)
index = self.VistA.multiwait(['HEADER','part of this list'])
if index == 0:
self.VistA.write('')
self.VistA.wait('SEQUENCE')
self.VistA.write('')
self.VistA.wait('CATEGORY NAME')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def catad (self, listname, catname, icd, snomed, spec='', dtext='', seqnum=''):
'''Add a Problem (ICD) to a Category'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action')
self.VistA.write('EC')
self.VistA.wait('Select CATEGORY NAME:')
self.VistA.write(catname)
self.VistA.wait('Select Item')
self.VistA.write('AD')
index = self.VistA.multiwait(['PROBLEM','Select Specialty Subset'])
if index == 1:
self.VistA.write(spec)
self.VistA.wait('PROBLEM')
self.VistA.write(icd)
index = self.VistA.multiwait(['Ok', 'STOP or Select', 'A suitable term'])
if index == 0:
self.VistA.write('')
self.VistA.wait('DISPLAY TEXT')
self.VistA.write(dtext)
self.VistA.wait('ICD CODE')
self.VistA.write(icd)
self.VistA.wait('...OK')
self.VistA.write('Yes')
self.VistA.wait('SEQUENCE')
self.VistA.write(seqnum)
self.VistA.wait('PROBLEM')
self.VistA.write('')
elif index == 1:
self.VistA.write('1')
self.VistA.wait('DISPLAY TEXT')
self.VistA.write(dtext)
self.VistA.wait('ICD CODE')
self.VistA.write(icd)
self.VistA.wait('...OK')
self.VistA.write('Yes')
self.VistA.wait('SEQUENCE')
self.VistA.write(seqnum)
self.VistA.wait('PROBLEM')
self.VistA.write('')
elif index == 2:
self.VistA.write(snomed)
index = self.VistA.multiwait(['Ok', 'STOP or Select', 'A suitable term'])
if index == 0:
self.VistA.write('')
self.VistA.wait('DISPLAY TEXT')
self.VistA.write(dtext)
self.VistA.multiwait(['... Ok','... Yes'])
self.VistA.write('Yes')
self.VistA.wait('SEQUENCE')
self.VistA.write(seqnum)
self.VistA.wait('PROBLEM')
elif index == 1:
self.VistA.write('1')
self.VistA.wait('DISPLAY TEXT')
self.VistA.write(dtext)
self.VistA.multiwait(['... Ok','... Yes'])
self.VistA.write('Yes')
self.VistA.wait('SEQUENCE')
self.VistA.write(seqnum)
self.VistA.wait('PROBLEM')
self.VistA.write('')
self.VistA.wait('Select Item')
self.VistA.write('SV')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def sellistad (self, listname, catname, hdrname='', seqnum=''):
'''Add a Category to a Selection List'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Select CATEGORY NAME:')
self.VistA.write(catname)
index = self.VistA.multiwait(['HEADER','part of this list'])
if index == 0:
self.VistA.write(hdrname)
self.VistA.wait('SEQUENCE')
self.VistA.write(seqnum)
self.VistA.wait('Select CATEGORY NAME')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def sellistss (self, listname, clinic, username):
'''Assign a Selection List to a User'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action')
self.VistA.write('SS')
self.VistA.wait('CLINIC:')
self.VistA.write(clinic)
self.VistA.wait('Select USER')
self.VistA.write(username)
self.VistA.wait('ANOTHER ONE')
self.VistA.write('')
self.VistA.wait('Are you ready')
self.VistA.write('Yes')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def sellistgal (self, listname, username):
'''Assign a Selection List to a User'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Assign')
index = self.VistA.multiwait(["System",'Enter Selection','Select LIST NAME:'])
if index == 0:
self.VistA.write("1")
self.VistA.wait("NEW PERSON NAME")
self.VistA.write(username)
self.VistA.wait("Selection List")
self.VistA.write(listname)
self.VistA.wait("Enter selection")
self.VistA.write('')
else:
self.VistA.write(listname)
self.VistA.wait('Select USER')
self.VistA.write(username)
self.VistA.wait('ANOTHER ONE')
self.VistA.write('')
self.VistA.wait('Are you ready')
self.VistA.write('Yes')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def sellistrfu (self, listname, username):
'''De-Assign a Selection List from a User'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Select Create Problem Selection Lists')
self.VistA.write('Remove')
index = self.VistA.multiwait(['Select LIST NAME:','Select Create Problem'])
if index == 1:
self.VistA.write('Assign')
self.VistA.wait('Enter selection')
self.VistA.write('1')
self.VistA.wait('NEW PERSON')
self.VistA.write(username)
self.VistA.wait('Selection List')
self.VistA.write('@')
self.VistA.wait('Enter selection')
self.VistA.write('')
else:
self.VistA.write(listname)
self.VistA.wait('Select USER')
self.VistA.write(username)
self.VistA.wait('ANOTHER ONE')
self.VistA.write('')
self.VistA.wait('Are you ready')
self.VistA.write('Yes')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def sellistrm (self, listname, catnum='1'):
''' Remove Category from a Selection List'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action')
self.VistA.write('RM')
self.VistA.wait('Select Category')
self.VistA.write(catnum)
self.VistA.wait('Are you sure you want to remove')
self.VistA.write('Yes')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def catdl (self, listname, catname):
''' Delete a Category'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action')
self.VistA.write('EC')
self.VistA.wait('Select CATEGORY NAME')
self.VistA.write(catname)
self.VistA.wait('Select Item')
self.VistA.write('DL')
self.VistA.wait('Are you sure you want to delete the entire')
self.VistA.write('Yes')
self.VistA.wait('Select CATEGORY NAME')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def sellistdl (self, listname, clinic):
'''Delete a Selection List'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
# First remove assignments
if clinic:
self.VistA.write("ASSIGN")
index = self.VistA.multiwait(["Enter selection","Select LIST NAME" ])
if index == 0:
self.VistA.write('2') # Assign to hospital location
self.VistA.wait("HOSPITAL LOCATION NAME")
self.VistA.write(clinic)
self.VistA.wait("Selection List")
self.VistA.write("@")
self.VistA.wait("Enter selection")
self.VistA.write('')
self.VistA.wait('Create Problem Selection Lists')
else:
self.VistA.write('')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Delete')
self.VistA.wait('Select LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Are you sure you want to delete this list')
self.VistA.write('Yes')
index = self.VistA.multiwait(['to continue','Create Problem Selection Lists'])
if index == 0:
self.VistA.write('')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def createibform (self, clinic, formname, groupname, plist, icd10list):
'''Create IB Encounter Form'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('')
self.VistA.wait('Core Applications')
self.VistA.write('IB')
self.VistA.wait('Integrated Billing Master Menu')
self.VistA.write('Encounter Forms')
self.VistA.wait('Encounter Forms')
self.VistA.write('Edit Encounter Forms')
self.VistA.wait('Edit Encounter Forms')
self.VistA.write('Clinic Setup')
self.VistA.wait('WHICH CLINIC?')
self.VistA.write(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('Create Blank Form')
self.VistA.wait('New Form Name')
self.VistA.write(formname + '\r\r\r0\r\r\rTest Form\r1')
self.VistA.wait('Select Action')
self.VistA.write('Edit Form')
self.VistA.wait('Select Action')
self.VistA.write('Add Toolkit')
self.VistA.wait('Select Action')
self.VistA.write('Add Tool Kit Block')
self.VistA.wait('Select TOOL KIT BLOCK:')
self.VistA.write('8')
self.VistA.wait('STARTING ROW:')
self.VistA.write('\r\r\r')
self.VistA.wait('Select Action')
self.VistA.write('Fast Selection Edit')
self.VistA.wait('Select Action:')
self.VistA.write('Group Add')
self.VistA.wait('HEADER')
self.VistA.write(groupname + '\r1\r\r')
for pitem in plist:
self.VistA.wait('Select Action')
self.VistA.write('Add Selection')
self.VistA.wait('Select PROBLEM:')
self.VistA.write(pitem)
index = self.VistA.multiwait(['Select PROBLEM','Ok'])
if index == 0:
self.VistA.write(icd10list[plist.index(pitem)])
self.VistA.wait('Ok')
self.VistA.write('\rGroup1\r\r^')
index = self.VistA.multiwait(['NARRATIVE','Select Action'])
if index == 0:
self.VistA.write('TEST')
else:
self.VistA.write('?')
self.VistA.wait('Select Action')
self.VistA.write('QUIT\rYES')
self.VistA.wait('Select Action')
self.VistA.write('QUIT\r\r\r')
self.VistA.wait('Integrated Billing Master Menu')
self.VistA.write('Problem List')
def checkOutOfOrder (self, menuName):
'''Remove Category from a Selection List'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('?');
index = self.VistA.multiwait(['SNOMED CT','Select Problem Selection Lists'])
self.VistA.write('')
if index == 0:
return False
else:
return True
def sellistib (self, formname, listname, clinic):
'''Remove Category from a Selection List'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem Selection Lists')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Copy Selection List from IB Encounter')
self.VistA.wait('Select a FORM:')
self.VistA.write(formname)
self.VistA.wait('LIST NAME')
self.VistA.write(listname)
self.VistA.wait('CLINIC')
self.VistA.write(clinic)
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def versellist(self, ssn, clinic, vlist):
'''Verify a clinic selection list, content and order'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(clinic)
vlist = ["PROBLEM:"] + vlist
while True:
index = self.VistA.multiwait(vlist)
if (index == len(vlist)-1):
self.VistA.wait('Select Item')
self.VistA.write('Quit')
break
if index == 0:
self.VistA.write('')
break
self.VistA.wait('Select Action')
self.VistA.write('Quit')
def verplist(self, ssn, vlist):
'''Verify a patient problem list, content and order'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
while True:
index = self.VistA.multiwait(vlist)
if index == len(vlist)-1:
break
self.VistA.wait('Select Action')
self.VistA.write('Quit')
def verlistpats(self, vlist):
'''Verify a patient problem list, content and order'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('List Patients with Problem List data')
self.VistA.wait('//')
self.VistA.write('')
while True:
index = self.VistA.multiwait(vlist)
if index == len(vlist)-1:
break
self.VistA.wait('to exit:')
self.VistA.write('')
def verpatsrch(self, prob, icd10,snomed, vlist):
'''Verify a patient problem list, content and order'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Search for Patients having selected Problem')
probList = [prob,icd10,snomed]
probIndex =0
while True:
index = self.VistA.multiwait(['Ok','PROBLEM'])
if index == 1:
self.VistA.write(probList[probIndex])
probIndex += 1
elif index == 0:
break
else:
self.VistA.write('?')
self.VistA.write('')
self.VistA.wait('Select STATUS:')
self.VistA.write('')
self.VistA.wait('DEVICE:')
self.VistA.write('')
while True:
index = self.VistA.multiwait(vlist)
if index == len(vlist)-1:
break
self.VistA.wait('to exit:')
self.VistA.write('')
self.VistA.wait('PROBLEM:')
self.VistA.write('')
def detview (self, ssn, probnum, vlist1, vlist2):
'''Checks the Detailed View'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('DT')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
while True:
index = self.VistA.multiwait(vlist1)
if index == len(vlist1)-1:
break
self.VistA.wait('Select Action')
self.VistA.write('')
while True:
index = self.VistA.multiwait(vlist2)
if index == len(vlist2)-1:
break
self.VistA.wait('Select Action')
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('')
def verifyproblem(self, ssn, problem):
'''Check that its unconfirmed'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('1')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(ssn)
self.VistA.wait('$') # check for $ verify mark
self.VistA.wait(problem) # check for $ verify mark
self.VistA.wait('Select Action:')
self.VistA.write('DT')
self.VistA.wait('Select Problem')
self.VistA.write('')
self.VistA.wait('CLERK')
self.VistA.write('q')
self.VistA.wait('Select Action:')
self.VistA.write('$')
self.VistA.wait('Select Problem')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('DT')
self.VistA.wait('Select Problem')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Q')
# verify again and confirm previous verification worked
self.VistA.wait('Select Action:')
self.VistA.write('$')
self.VistA.wait('Select Problem')
self.VistA.write('')
self.VistA.wait('does not require verification')
self.VistA.wait('Select Action:')
self.VistA.write('Q')
def selectnewpatient(self, ssn1, name1, ss2, name2):
'''This checks to see if the select new patient feature works properly'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn1)
self.VistA.wait(name1)
self.VistA.write('SP')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(ss2)
self.VistA.wait(name2)
self.VistA.write('Q')
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('')
def printproblemlist(self, ssn, vlist):
'''This checks that the print function inside problem list works properly'''
self.VistA.wait("Problem List Mgt Menu")
self.VistA.write('Patient Problem List')
self.VistA.wait('NAME:')
self.VistA.write(ssn)
self.VistA.wait('Select Action:')
self.VistA.write('PP')
self.VistA.wait('ll problems?')
self.VistA.write('A')
self.VistA.wait('DEVICE:')
self.VistA.write('HOME')
while True:
index = self.VistA.multiwait(vlist)
if index == len(vlist)-1:
break
self.VistA.wait('exit:')
self.VistA.write('^')
self.VistA.wait('Select Action')
self.VistA.write('')
def resequencecat(self, listname, catnames):
'''Tests re-sequence function inside of category build list'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('LIST NAME:')
self.VistA.write(listname)
self.VistA.wait('Select Action:')
self.VistA.write('SQ')
self.VistA.wait('Select Category')
self.VistA.write('1')
self.VistA.wait('SEQUENCE')
self.VistA.write('3')
self.VistA.wait_re(catnames[1])
self.VistA.wait_re(catnames[0])
self.VistA.write('SQ')
self.VistA.wait('Select Category')
self.VistA.write('2')
self.VistA.wait('SEQUENCE')
self.VistA.write('1')
self.VistA.wait_re(catnames[0])
self.VistA.wait_re(catnames[1])
self.VistA.wait('Select Action:')
self.VistA.write('VW')
self.VistA.wait('<1>')
self.VistA.write('')
self.VistA.wait('Save')
self.VistA.write('Yes')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def categorydisp(self, listname, catname):
'''Tests category display function'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('LIST NAME')
self.VistA.write(listname)
self.VistA.wait('Select Action:')
self.VistA.write('CD')
self.VistA.wait('Category')
self.VistA.write('1')
self.VistA.wait('HEADER:')
self.VistA.write(catname.upper())
self.VistA.wait('AUTOMATICALLY')
self.VistA.write('Yes')
self.VistA.wait(catname.upper())
self.VistA.write('CD')
self.VistA.wait('Category')
self.VistA.write('1')
self.VistA.wait('HEADER:')
self.VistA.write(catname)
self.VistA.wait('AUTOMATICALLY')
self.VistA.write('Yes')
self.VistA.wait('Select Action')
self.VistA.write('SV')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def changesellist(self, list1, list2, category=None):
'''Changes the Selection List'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Create Problem')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('Build')
self.VistA.wait('LIST NAME:')
self.VistA.write(list1)
self.VistA.wait('Select Action:')
self.VistA.write('CL')
self.VistA.wait('LIST NAME:')
self.VistA.write(list2)
self.VistA.wait_re(list2)
if category is None:
self.VistA.wait('No items available.')
else:
self.VistA.wait(category)
self.VistA.write('')
self.VistA.wait('Create Problem Selection Lists')
self.VistA.write('')
def editpart1(self, ssn, probnum, itemnum, chgval):
'''Simple edit of problem, items 1,2,4,5 or 6 only'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('ED')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
self.VistA.wait('Select Item')
self.VistA.write(itemnum) # select 1, 2,4,5,or6
def editpart2(self, ssn, probnum, itemnum, chgval, icd10='',snomed=''):
''' Edit for lock test'''
self.VistA.wait(':')
self.VistA.write('')
probList=[chgval,icd10,snomed]
probIndex = 0
while True:
rval = self.VistA.multiwait(['Select Item', 'Ok','A suitable term'])
if rval == 0:
self.VistA.write('SC')
break
elif rval == 1:
self.VistA.write('Yes')
elif rval == 2:
self.VistA.write(probList[probIndex])
probIndex += 1
self.VistA.wait('Select Action')
self.VistA.write('QUIT')
self.VistA.wait('Print a new problem list')
self.VistA.write('N')
def badeditpart1(self, ssn, probnum, itemnum, chgval,icd10):
''' Simple edit of problem, items 1,2,4,5 or 6 only'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('ED')
self.VistA.wait('Select Problem')
self.VistA.write(probnum) # which patient problem
# self.VistA.wait('Select Item')
# self.VistA.write(itemnum)
index = self.VistA.multiwait(['Select Problem', 'edited by another user'])
if index == 0:
self.VistA.write(icd10)
self.VistA.wait('edited by another user')
self.VistA.write('QUIT')
def editPLsite(self, ver, prompt, uselex, order, screendups):
'''Simple edit of problem, items 1,2,4,5 or 6 only'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Edit PL Site Parameters')
self.VistA.wait('VERIFY TRANSCRIBED PROBLEMS:')
self.VistA.write(ver)
self.VistA.wait('PROMPT FOR CHART COPY:')
self.VistA.write(prompt)
self.VistA.wait('USE CLINICAL LEXICON:')
self.VistA.write(uselex)
self.VistA.wait('DISPLAY ORDER:')
self.VistA.write(order)
self.VistA.wait('SCREEN DUPLICATE ENTRIES:')
self.VistA.write(screendups)
def checkVerplsetting(self, ssn):
''' Check Verify PL site setting'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('$')
self.VistA.wait('$ is not a valid selection')
self.VistA.wait('Select Action')
self.VistA.write('Q')
def checkRMsellist(self, ssn, clinic):
'''Check to verify response when adding problem via clinic with a removed selection list'''
self.VistA.wait('Problem List Mgt Menu')
self.VistA.write('Patient Problem List')
self.VistA.wait('PATIENT NAME')
self.VistA.write(ssn)
self.VistA.wait('Select Action')
self.VistA.write('AD')
self.VistA.wait('Clinic')
self.VistA.write(clinic)
index = self.VistA.multiwait(['Retrieving list of problems ...',"PROBLEM:"])
if index == 0:
self.VistA.wait('No items available. Returning to Problem List ...')
else:
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('Q')
| {
"content_hash": "e4fbee5b0a42809f6055d9e55ea4d55f",
"timestamp": "",
"source": "github",
"line_count": 1281,
"max_line_length": 109,
"avg_line_length": 39.40671350507416,
"alnum_prop": 0.5709984152139461,
"repo_name": "OSEHRA/VistA",
"id": "bfdc4839f8ae569b7bc59048efef9037aecee673",
"size": "51263",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Testing/Functional/RAS/lib/PLActions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6315"
},
{
"name": "Brightscript",
"bytes": "297"
},
{
"name": "CMake",
"bytes": "120463"
},
{
"name": "CSS",
"bytes": "132661"
},
{
"name": "Genshi",
"bytes": "72951258"
},
{
"name": "HTML",
"bytes": "2296661"
},
{
"name": "JavaScript",
"bytes": "2341060"
},
{
"name": "M",
"bytes": "483901"
},
{
"name": "PHP",
"bytes": "6750"
},
{
"name": "Pascal",
"bytes": "17825658"
},
{
"name": "Python",
"bytes": "1475872"
},
{
"name": "Ruby",
"bytes": "12147"
},
{
"name": "Shell",
"bytes": "98820"
}
],
"symlink_target": ""
} |
"""Test output of various incantations of example.sh
"""
import unittest
import subprocess
from textwrap import dedent
def run(script, *args):
cmd = [script] + list(args)
if hasattr(subprocess, 'check_output'):
# python 2.7+
output = subprocess.check_output(cmd, universal_newlines=True).strip()
else:
# python 2.6
output = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0].strip()
return output
def strip(val):
return dedent(val).strip()
class TestEverything(unittest.TestCase):
def test01(self):
output = run('./example.sh', 'infile', 'outfile')
expected = strip("""
required infile: infile
required outfile: outfile
the answer: 42
do the thing? no, do not do it
arg with multiple values: []
""")
self.assertEqual(output, expected)
def test02(self):
output = run('./example.sh', 'infile', 'outfile', '-m', 'one fish', 'two fish')
self.assertTrue('arg with multiple values: [one fish] [two fish]' in output)
def test03(self):
output = run('./example.sh', 'infile', 'outfile', '-d')
self.assertTrue('yes, do it' in output)
def test04(self):
output = run('./example.sh', 'infile', 'outfile', '-a', '0')
self.assertTrue('the answer: 0' in output)
def test05(self):
# argparse.bash prints a script template
output = run('./argparse.bash')
self.assertTrue(output.startswith('#!/usr/bin/env bash'))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "1b28fc3cae2e797a95fd084ec2067f80",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 87,
"avg_line_length": 26.466666666666665,
"alnum_prop": 0.6013853904282116,
"repo_name": "nhoffman/argparse-bash",
"id": "41fc947ba405beec13b32d66f6645a93b8026053",
"size": "1611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1611"
},
{
"name": "Shell",
"bytes": "3189"
}
],
"symlink_target": ""
} |
"""
===============================
Univariate Feature Selection
===============================
An example showing univariate feature selection.
Noisy (non informative) features are added to the iris data and
univariate feature selection is applied. For each feature, we plot the
p-values for the univariate feature selection and the corresponding
weights of an SVM. We can see that univariate feature selection
selects the informative features and that these have larger SVM weights.
In the total set of features, only the 4 first ones are significant. We
can see that they have the highest score with univariate feature
selection. The SVM attributes small weights to these features, but these
weight are non zero. Applying univariate feature selection before the SVM
increases the SVM weight attributed to the significant features, and will
thus improve classification.
"""
print __doc__
import numpy as np
import pylab as pl
from sklearn import datasets, svm
from sklearn.feature_selection import SelectPercentile, f_classif
###############################################################################
# import some data to play with
# The IRIS dataset
iris = datasets.load_iris()
# Some noisy data not correlated
E = np.random.normal(size=(len(iris.data), 35))
# Add the noisy data to the informative features
x = np.hstack((iris.data, E))
y = iris.target
###############################################################################
pl.figure(1)
pl.clf()
x_indices = np.arange(x.shape[-1])
###############################################################################
# Univariate feature selection with F-test for feature scoring
# We use the default selection function: the 10% most significant features
selector = SelectPercentile(f_classif, percentile=10)
selector.fit(x, y)
scores = -np.log10(selector.scores_)
scores /= scores.max()
pl.bar(x_indices - .45, scores, width=.3,
label=r'Univariate score ($-Log(p_{value})$)',
color='g')
###############################################################################
# Compare to the weights of an SVM
clf = svm.SVC(kernel='linear')
clf.fit(x, y)
svm_weights = (clf.coef_ ** 2).sum(axis=0)
svm_weights /= svm_weights.max()
pl.bar(x_indices - .15, svm_weights, width=.3, label='SVM weight',
color='r')
pl.title("Comparing feature selection")
pl.xlabel('Feature number')
pl.yticks(())
pl.axis('tight')
pl.legend(loc='upper right')
pl.show()
| {
"content_hash": "f28f3aae34ada6b6471a3fa2a9dd4388",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 33.013513513513516,
"alnum_prop": 0.6299631600491199,
"repo_name": "sgenoud/scikit-learn",
"id": "adfa850297d9ffca34fd76de2e29c57a7b9d5ff5",
"size": "2443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/plot_feature_selection.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "7396960"
},
{
"name": "C++",
"bytes": "408753"
},
{
"name": "JavaScript",
"bytes": "4736"
},
{
"name": "Objective-C",
"bytes": "4595"
},
{
"name": "Python",
"bytes": "3013862"
},
{
"name": "Shell",
"bytes": "687"
}
],
"symlink_target": ""
} |
"""
This is a Python PID module heavily inspired by the Arduino PID Library
written by Brett Beauregard.
http://brettbeauregard.com/blog/2011/04/improving-the-beginners-pid-introduction/
"""
import time
from enum import Enum
class Direction(Enum):
direct = 1
reverse = 2
class Mode(Enum):
automatic = 1
manual = 2
class PID(object):
def __init__(self, kp, ki, kd, set_point, controller_direction):
"""
The parameters specified here are those for for which we can't set up
reliable defaults, so we need to have the user set them.
:param kp: Proportional Tuning Parameter
:param ki: Integral Tuning Parameter
:param kd: Derivative Tuning Parameter
:param set_point: The value that we want the process to be.
:param controller_direction:
"""
self.kp = kp
self.ki = ki
self.kd = kd
self.direction = controller_direction
self.i_term = 0
self.out_max = 0
self.out_min = 0
self.last_input = 0
self.output = 0
self.input = 0
self.set_point = set_point
self.mode = Mode.automatic
self.set_output_limits(-40, 40)
self.sample_time = 3000
self.controller_direction = Direction.direct
self.set_controller_direction(controller_direction)
self.set_tunings(self.kp, self.ki, self.kd)
self.last_time = self.now()
@staticmethod
def now():
"""
Static method to make it easy to obtain the current time
in milliseconds.
:return: Current time in milliseconds.
"""
return int(round(time.time() * 1000))
def compute(self, input):
"""
This, as they say, is where the magic happens. This function should
be called every time "void loop()" executes. The function will decide
for itself whether a new PID Output needs to be computed.
:param input: Input value for the PID controller.
:return: Returns true when the output is computed,
false when nothing has been done.
"""
if self.mode is Mode.manual:
return 0, False
delta_time = self.now() - self.last_time
if delta_time >= self.sample_time:
error = self.set_point - input
self.i_term += (self.ki * error)
if self.i_term > self.out_max:
self.i_term = self.out_max
elif self.i_term < self.out_min:
self.i_term = self.out_min
delta_input = input - self.last_input
self.output = self.kp * error + self.i_term - self.kd * delta_input
if self.output > self.out_max:
self.output = self.out_max
elif self.output < self.out_min:
self.output = self.out_min
self.last_input = input
self.last_time = self.now()
return self.output, True
else:
return 0, False
def set_tunings(self, kp, ki, kd):
"""
This function allows the controller's dynamic performance to be
adjusted. It's called automatically from the constructor,
but tunings can also be adjusted on the fly during normal operation.
:param kp: Proportional Tuning Parameter
:param ki: Integral Tuning Parameter
:param kd: Derivative Tuning Parameter
"""
if kp < 0 or ki < 0 or ki < 0:
return
sample_time_in_sec = self.sample_time / 1000
self.kp = kp
self.ki = ki * sample_time_in_sec
self.kd = kd / sample_time_in_sec
if self.controller_direction is Direction.reverse:
self.kp = 0 - kp
self.ki = 0 - ki
self.kd = 0 - kd
def set_sample_time(self, sample_time):
"""
Sets the period, in milliseconds, at which the calculation is
performed.
:param sample_time: The period, in milliseconds,
at which the calculation is performed.
"""
if sample_time > 0:
ratio = sample_time / self.sample_time
self.ki *= ratio
self.kd /= ratio
self.sample_time = sample_time
def set_output_limits(self, min, max):
"""
This function will be used far more often than set_input_limits. While
the input to the controller will generally be in the 0-1023 range
(which is the default already), the output will be a little different.
Maybe they'll be doing a time window and will need 0-8000 or something.
Or maybe they'll want to clamp it from 0-125.
:param min: Minimum output value from the PID controller
:param max: Maximum output value from the PID controller
"""
if min >= max:
return
self.out_min = min
self.out_max = max
if self.mode == Mode.automatic:
if self.output > self.out_max:
self.output = self.out_max
elif self.output < self.out_min:
self.output = self.out_min
if self.i_term > self.out_max:
self.i_term = self.out_max
elif self.i_term < self.out_min:
self.i_term = self.out_min
def set_mode(self, mode):
"""
Allows the controller Mode to be set to manual (0) or Automatic
(non-zero) when the transition from manual to auto occurs,
the controller is automatically initialized.
:param mode: The mode of the PID controller.
Can be either manual or automatic.
"""
if self.mode is Mode.manual and mode is Mode.automatic:
self.initialize()
self.mode = mode
def initialize(self):
"""
Does all the things that need to happen to ensure a smooth transfer
from manual to automatic mode.
"""
self.i_term = self.output
self.last_input = self.input
if self.i_term > self.out_max:
self.i_term = self.out_max
elif self.i_term < self.out_min:
self.i_term = self.out_min
def set_controller_direction(self, direction):
"""
The PID will either be connected to a DIRECT acting process
(+Output leads to +Input) or a REVERSE acting process
(+Output leads to -Input.). We need to know which one,
because otherwise we may increase the output when we should be
decreasing. This is called from the constructor.
:param direction: The direction of the PID controller.
"""
if self.mode is Mode.automatic and direction is not self.direction:
self.kp = 0 - self.kp
self.ki = 0 - self.ki
self.kd = 0 - self.kd
self.direction = direction
def set_set_point(self, set_point):
self.set_point = set_point | {
"content_hash": "3d00593d97809abb8971593f095b7753",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 81,
"avg_line_length": 31.23076923076923,
"alnum_prop": 0.5850478122283396,
"repo_name": "johngroves/SCOUT",
"id": "7d4190de58d33245a70e0acbcbe6bef2059dac9a",
"size": "6902",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scout/Logic/pid.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "4677"
},
{
"name": "C++",
"bytes": "5982"
},
{
"name": "Python",
"bytes": "25712"
}
],
"symlink_target": ""
} |
__author__ = 'nb254'
from collections import Counter
import time
import csv
import pandas as pd
import math
from datetime import date
from dateutil.relativedelta import relativedelta
import Features as features
import TagAnalysis as tga
import util
import TimeFeatures as tmpf
NUM_OF_ANS = 4
HOUR = 60 * 60
def dataCut(data, time_cutoff):
data_sorted = data.sort(['TimeAsked'], ascending=False)
data_entries = 0
for (i, timestamp) in enumerate(data_sorted['TimeAsked']):
time_answered = tmpf.parseTime(timestamp)
if time_answered < time_cutoff:
data_entries = i
print data_entries, 'questions to cut'
break
data_cut = data_sorted[1:data_entries]
return data_cut
def activeAnswerers(answersUserIds, num_of_answers):
users_answers = Counter(answersUserIds)
#find active answerers: the ones who answer more than N answers
#print users_answers
answerers = []
for answerer in users_answers:
if users_answers[answerer] > num_of_answers:
#print "answerer: " + str(answerer) + " with number of answers: " + str(users_answers[answerer])
answerers.append(answerer)
return answerers
def respAnswerers(data, ANSWER_TIME):
users_answers = []
for index, row in data.iterrows():
resp_time = int(row['SecondsToAcceptedAnswer'])
if resp_time < ANSWER_TIME:
if not math.isnan(row['AnswererId']):
users_answers.append(int(row['AnswererId']))
#find active answerers: the ones who answer more than N answers
users_ans = Counter(users_answers)
return users_ans
def tags(data, answersUserIds, quest_tags):
i = 0
tags = [] # tags of active answerers
for index, row in data.iterrows():
if not math.isnan(row['AnswererId']):
if row['AnswererId'] in answersUserIds:
for tag in quest_tags[i]:
tag = tag.replace('"', '')
#print tag
# nasty cheating
tags.append(tag + "_" + str(int(row['AnswererId'])))
i = i + 1
#print tags
return tags
def answerersPerTag(active_tags):
unique = list(Counter(active_tags))
stat = Counter(unique)
temp = []
for elem in stat:
elem = elem.replace("_", " ")
elem = elem.split()
temp.append(elem[0])
answerersPerTag = Counter(temp)
return answerersPerTag
def avScores(data, ans_per_tag, quest_tags):
av_scores = []
i = 0
act_pas_ratio = 0
res_pas_ratio = 0
#count average scores for each question
tags = list(ans_per_tag)
for index, row in data.iterrows():
av_score = [0, 0, 0]
for tag in quest_tags[i]:
#tag = tag.replace("'", '')
#print tag
for j in range(0, len(av_score)):
if tag in tags[j]:
av_score[j] = float(av_score[j] + ans_per_tag[j][tag])
av_score[:] = [x / len(quest_tags[i]) for x in av_score] # divide each element by the length
if av_score[1] != 0:
act_pas_ratio = float(av_score[0] / av_score[1])
res_pas_ratio = float(av_score[2] / av_score[1])
av_scores.append([row['QuestionId'],
row['UserId'],
row['AnswererId'],
row['Tags'],
row['TimeAsked'],
av_score[0], act_pas_ratio,
av_score[2], res_pas_ratio])
i = i + 1
return av_scores
def prepareData(data, NUM_OF_ANS, TIME, not_saved=True):
if not_saved:
answersUserIds = data['AnswererId'].fillna(0.0).astype(int)
all_answerers = activeAnswerers(answersUserIds, 0)
util.saveElemsToCSV(DIR + 'temp/all_answerers.csv', all_answerers, header='')
act_answerers = activeAnswerers(answersUserIds, NUM_OF_ANS)
util.saveElemsToCSV(DIR + 'temp/act_answerers.csv', act_answerers, header='')
resp_users_ans = respAnswerers(data, TIME)
util.writeDict(DIR + 'temp/resp_users_ans.csv', resp_users_ans)
# the list of tags for all questions
quest_tags = tga.tagList(data)
util.saveElemsToCSV(DIR + 'temp/quest_tags.csv', quest_tags, header='')
else:
all_answerers = util.openListFromCSV(DIR + 'temp/all_answerers.csv')
all_answerers = [int(i) for i in all_answerers]
act_answerers = util.openListFromCSV(DIR + 'temp/act_answerers.csv')
act_answerers = [int(i) for i in act_answerers]
resp_users_ans = util.openDictfromCSV(DIR + 'temp/resp_users_ans.csv')
resp_users_ans = [int(i) for i in resp_users_ans]
quest_tags = util.openListFromCSV(DIR + 'temp/quest_tags.csv')
answerers = [act_answerers, all_answerers, resp_users_ans]
return answerers, quest_tags
def numSubsAns(data, answerers, quest_tags):
tagsa = [0, 0, 0]
ans_per_tag = [0, 0, 0]
#compute the number of active subscribers for each tag
for i in range (0, len(answerers)):
tagsa[i] = tags(data, answerers[i], quest_tags)
ans_per_tag[i] = answerersPerTag(tagsa[i])
av_scores = avScores(data, ans_per_tag, quest_tags)
return av_scores
'''
DIR = '/mnt/nb254_data/learning/data/'
filename = 'DATA_MERGED.csv'
#filename = 'DATAtoy1.csv'
filename_res = 'temp/useractivity.csv'
data = pd.read_csv(DIR + filename)
t_old, t_new = tmpf.findTimeSpan(data)
print 'from', t_old, 'to', t_new
#from 2008-07-31 21:42:52 to 2014-09-14 00:13:09
time_cutoff = time.strptime('2014-03-14 00:13:09', "%Y-%m-%d %H:%M:%S")
#datacut = dataCut(data, time_cutoff)
#datacut.to_csv(DIR + 'temp/datacut.csv')
datacut = pd.read_csv(DIR + 'temp/datacut.csv')
answerers, quest_tags = prepareData(datacut, NUM_OF_ANS=NUM_OF_ANS, TIME=HOUR, not_saved=True)
results = numSubsAns(datacut, answerers, quest_tags)
header = features.KEYS_US_ACT + features.USER_ACTIVITY
util.saveStatToCSV(DIR + filename_res, results, header, one_row=False)
'''
| {
"content_hash": "074ef11813a2c79a0981dd187959f7e0",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 105,
"avg_line_length": 37.14556962025316,
"alnum_prop": 0.6321349463281649,
"repo_name": "Nik0l/UTemPro",
"id": "99fe7886e258fc0672028129b0448dfedf3f3673",
"size": "5916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UserStats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "199683"
},
{
"name": "R",
"bytes": "1644"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import time
import _socket, poplib, imaplib
import frappe
from frappe import _
from frappe.utils import extract_email_id, convert_utc_to_user_timezone, now, cint, cstr, strip
from frappe.utils.scheduler import log
from email_reply_parser import EmailReplyParser
from email.header import decode_header
from frappe.utils.file_manager import get_random_filename
class EmailSizeExceededError(frappe.ValidationError): pass
class EmailTimeoutError(frappe.ValidationError): pass
class TotalSizeExceededError(frappe.ValidationError): pass
class LoginLimitExceeded(frappe.ValidationError): pass
class EmailServer:
"""Wrapper for POP server to pull emails."""
def __init__(self, args=None):
self.setup(args)
def setup(self, args=None):
# overrride
self.settings = args or frappe._dict()
def check_mails(self):
# overrride
return True
def process_message(self, mail):
# overrride
pass
def connect(self):
"""Connect to **Email Account**."""
if cint(self.settings.use_imap):
return self.connect_imap()
else:
return self.connect_pop()
def connect_imap(self):
"""Connect to IMAP"""
try:
if cint(self.settings.use_ssl):
self.imap = Timed_IMAP4_SSL(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
else:
self.imap = Timed_IMAP4(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
self.imap.login(self.settings.username, self.settings.password)
# connection established!
return True
except _socket.error:
# Invalid mail server -- due to refusing connection
frappe.msgprint(_('Invalid Mail Server. Please rectify and try again.'))
raise
except Exception, e:
frappe.msgprint(_('Cannot connect: {0}').format(str(e)))
raise
def connect_pop(self):
#this method return pop connection
try:
if cint(self.settings.use_ssl):
self.pop = Timed_POP3_SSL(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
else:
self.pop = Timed_POP3(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
self.pop.user(self.settings.username)
self.pop.pass_(self.settings.password)
# connection established!
return True
except _socket.error:
# Invalid mail server -- due to refusing connection
frappe.msgprint(_('Invalid Mail Server. Please rectify and try again.'))
raise
except poplib.error_proto, e:
if self.is_temporary_system_problem(e):
return False
else:
frappe.msgprint(_('Invalid User Name or Support Password. Please rectify and try again.'))
raise
def get_messages(self):
"""Returns new email messages in a list."""
if not self.check_mails():
return # nothing to do
frappe.db.commit()
if not self.connect():
return []
try:
# track if errors arised
self.errors = False
self.latest_messages = []
email_list = self.get_new_mails()
num = num_copy = len(email_list)
# WARNING: Hard coded max no. of messages to be popped
if num > 20: num = 20
# size limits
self.total_size = 0
self.max_email_size = cint(frappe.local.conf.get("max_email_size"))
self.max_total_size = 5 * self.max_email_size
for i, message_meta in enumerate(email_list):
# do not pull more than NUM emails
if (i+1) > num:
break
try:
self.retrieve_message(message_meta, i+1)
except (TotalSizeExceededError, EmailTimeoutError, LoginLimitExceeded):
break
# WARNING: Mark as read - message number 101 onwards from the pop list
# This is to avoid having too many messages entering the system
num = num_copy
if not cint(self.settings.use_imap):
if num > 100 and not self.errors:
for m in xrange(101, num+1):
self.pop.dele(m)
except Exception, e:
if self.has_login_limit_exceeded(e):
pass
else:
raise
finally:
# no matter the exception, pop should quit if connected
if cint(self.settings.use_imap):
self.imap.logout()
else:
self.pop.quit()
return self.latest_messages
def get_new_mails(self):
"""Return list of new mails"""
if cint(self.settings.use_imap):
self.imap.select("Inbox")
response, message = self.imap.uid('search', None, "UNSEEN")
email_list = message[0].split()
else:
email_list = self.pop.list()[1]
return email_list
def retrieve_message(self, message_meta, msg_num=None):
incoming_mail = None
try:
self.validate_message_limits(message_meta)
if cint(self.settings.use_imap):
status, message = self.imap.uid('fetch', message_meta, '(RFC822)')
self.latest_messages.append(message[0][1])
else:
msg = self.pop.retr(msg_num)
self.latest_messages.append(b'\n'.join(msg[1]))
except (TotalSizeExceededError, EmailTimeoutError):
# propagate this error to break the loop
self.errors = True
raise
except Exception, e:
if self.has_login_limit_exceeded(e):
self.errors = True
raise LoginLimitExceeded, e
else:
# log performs rollback and logs error in scheduler log
log("receive.get_messages", self.make_error_msg(msg_num, incoming_mail))
self.errors = True
frappe.db.rollback()
if not cint(self.settings.use_imap):
self.pop.dele(msg_num)
else:
if not cint(self.settings.use_imap):
self.pop.dele(msg_num)
def has_login_limit_exceeded(self, e):
return "-ERR Exceeded the login limit" in strip(cstr(e.message))
def is_temporary_system_problem(self, e):
messages = (
"-ERR [SYS/TEMP] Temporary system problem. Please try again later.",
"Connection timed out",
)
for message in messages:
if message in strip(cstr(e.message)) or message in strip(cstr(getattr(e, 'strerror', ''))):
return True
return False
def validate_message_limits(self, message_meta):
# throttle based on email size
if not self.max_email_size:
return
m, size = message_meta.split()
size = cint(size)
if size < self.max_email_size:
self.total_size += size
if self.total_size > self.max_total_size:
raise TotalSizeExceededError
else:
raise EmailSizeExceededError
def make_error_msg(self, msg_num, incoming_mail):
error_msg = "Error in retrieving email."
if not incoming_mail:
try:
# retrieve headers
incoming_mail = Email(b'\n'.join(self.pop.top(msg_num, 5)[1]))
except:
pass
if incoming_mail:
error_msg += "\nDate: {date}\nFrom: {from_email}\nSubject: {subject}\n".format(
date=incoming_mail.date, from_email=incoming_mail.from_email, subject=incoming_mail.subject)
return error_msg
class Email:
"""Wrapper for an email."""
def __init__(self, content):
"""Parses headers, content, attachments from given raw message.
:param content: Raw message."""
import email, email.utils
import datetime
self.raw = content
self.mail = email.message_from_string(self.raw)
self.text_content = ''
self.html_content = ''
self.attachments = []
self.cid_map = {}
self.parse()
self.set_content_and_type()
self.set_subject()
self.from_email = extract_email_id(self.mail["From"])
self.from_real_name = email.utils.parseaddr(self.mail["From"])[0]
if self.mail["Date"]:
utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"]))
utc_dt = datetime.datetime.utcfromtimestamp(utc)
self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S')
else:
self.date = now()
def parse(self):
"""Walk and process multi-part email."""
for part in self.mail.walk():
self.process_part(part)
def set_subject(self):
"""Parse and decode `Subject` header."""
import email.header
_subject = email.header.decode_header(self.mail.get("Subject", "No Subject"))
self.subject = _subject[0][0] or ""
if _subject[0][1]:
self.subject = self.subject.decode(_subject[0][1])
else:
# assume that the encoding is utf-8
self.subject = self.subject.decode("utf-8")
if not self.subject:
self.subject = "No Subject"
def set_content_and_type(self):
self.content, self.content_type = '[Blank Email]', 'text/plain'
if self.html_content:
self.content, self.content_type = self.html_content, 'text/html'
else:
self.content, self.content_type = EmailReplyParser.parse_reply(self.text_content), 'text/plain'
def process_part(self, part):
"""Parse email `part` and set it to `text_content`, `html_content` or `attachments`."""
content_type = part.get_content_type()
charset = part.get_content_charset()
if not charset: charset = self.get_charset(part)
if content_type == 'text/plain':
self.text_content += self.get_payload(part, charset)
if content_type == 'text/html':
self.html_content += self.get_payload(part, charset)
if part.get_filename():
self.get_attachment(part, charset)
def get_charset(self, part):
"""Detect chartset."""
charset = part.get_content_charset()
if not charset:
import chardet
charset = chardet.detect(str(part))['encoding']
return charset
def get_payload(self, part, charset):
try:
return unicode(part.get_payload(decode=True),str(charset),"ignore")
except LookupError:
return part.get_payload()
def get_attachment(self, part, charset):
fcontent = part.get_payload(decode=True)
if fcontent:
content_type = part.get_content_type()
fname = part.get_filename()
if fname:
try:
fname = cstr(decode_header(fname)[0][0])
except:
fname = get_random_filename(content_type=content_type)
else:
fname = get_random_filename(content_type=content_type)
self.attachments.append({
'content_type': content_type,
'fname': fname,
'fcontent': fcontent,
})
cid = (part.get("Content-Id") or "").strip("><")
if cid:
self.cid_map[fname] = cid
def save_attachments_in_doc(self, doc):
"""Save email attachments in given document."""
from frappe.utils.file_manager import save_file, MaxFileSizeReachedError
saved_attachments = []
for attachment in self.attachments:
try:
file_data = save_file(attachment['fname'], attachment['fcontent'],
doc.doctype, doc.name, is_private=1)
saved_attachments.append(file_data)
if attachment['fname'] in self.cid_map:
self.cid_map[file_data.name] = self.cid_map[attachment['fname']]
except MaxFileSizeReachedError:
# WARNING: bypass max file size exception
pass
except frappe.DuplicateEntryError:
# same file attached twice??
pass
return saved_attachments
def get_thread_id(self):
"""Extract thread ID from `[]`"""
import re
l = re.findall('(?<=\[)[\w/-]+', self.subject)
return l and l[0] or None
class TimerMixin(object):
def __init__(self, *args, **kwargs):
self.timeout = kwargs.pop('timeout', 0.0)
self.elapsed_time = 0.0
self._super.__init__(self, *args, **kwargs)
if self.timeout:
# set per operation timeout to one-fifth of total pop timeout
self.sock.settimeout(self.timeout / 5.0)
def _getline(self, *args, **kwargs):
start_time = time.time()
ret = self._super._getline(self, *args, **kwargs)
self.elapsed_time += time.time() - start_time
if self.timeout and self.elapsed_time > self.timeout:
raise EmailTimeoutError
return ret
def quit(self, *args, **kwargs):
self.elapsed_time = 0.0
return self._super.quit(self, *args, **kwargs)
class Timed_POP3(TimerMixin, poplib.POP3):
_super = poplib.POP3
class Timed_POP3_SSL(TimerMixin, poplib.POP3_SSL):
_super = poplib.POP3_SSL
class Timed_IMAP4(TimerMixin, imaplib.IMAP4):
_super = imaplib.IMAP4
class Timed_IMAP4_SSL(TimerMixin, imaplib.IMAP4_SSL):
_super = imaplib.IMAP4_SSL
| {
"content_hash": "bcd20dcd320e939151488b5755668557",
"timestamp": "",
"source": "github",
"line_count": 408,
"max_line_length": 98,
"avg_line_length": 28.110294117647058,
"alnum_prop": 0.687767024152062,
"repo_name": "gangadhar-kadam/helpdesk-frappe",
"id": "af8715815278ead85583e5b6caf96f37f966e7b5",
"size": "11570",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "frappe/email/receive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "280404"
},
{
"name": "HTML",
"bytes": "1327803"
},
{
"name": "JavaScript",
"bytes": "1092869"
},
{
"name": "Python",
"bytes": "1232416"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
"""
All functions for the ascidata module
@author: Martin Kuemmel, Jonas Haase
@organization: Space Telescope - European Coordinating Facility (ST-ECF)
@license: Gnu Public Licence
@contact: mkuemmel@eso.org
@since: 2005/09/13
$LastChangedBy: jhaase $
$LastChangedDate: 2007-07-11 08:45:32Z $
$HeadURL: http://astropy.scipy.org/svn/astrolib/trunk/asciidata/Lib/asciifunction.py $
"""
__version__ = "Version 1.1 $LastChangedRevision: 234 $"
from .asciidata import *
def open(filename, null=None, delimiter=None, comment_char=None):
"""
Constructor for the AsciiData class
@param filename: the filename to create the AsciiData from
@type filename: string
@param null: string to be interpretet as NULL
@type null: string
@param delimiter: string to be used as delimiter
@type delimiter: string
@param comment_char: string to be used as comment_char
@type comment_char: string
@return: the created AsciiData instance
@rtype: AsciiData
"""
return AsciiData(filename=filename, null=null, delimiter=delimiter,\
comment_char=comment_char)
def create(ncols, nrows, null=None, delimiter=None):
"""
Constructor for the empty AsciiData class
@param ncols: number of columns to be created
@type ncols: integer
@param nrows: number of columns to be created
@type nrows: integer
@param null: string to be interpretet as NULL
@type null: string
@param delimiter: string to be used as delimiter
@type delimiter: string
@return: the created AsciiData instance
@rtype: AsciiData
"""
return AsciiData(ncols=ncols, nrows=nrows, null=null, delimiter=delimiter,
columnInfo=0, headerComment=1)
def createSEx(ncols, nrows, null=None, delimiter=None):
"""
Constructor for the empty class in the SExtractor catalogue style
@param ncols: number of columns to be created
@type ncols: integer
@param nrows: number of columns to be created
@type nrows: integer
@param null: string to be interpretet as NULL
@type null: string
@param delimiter: string to be used as delimiter
@type delimiter: string
@return: the created AsciiData instance
@rtype: AsciiData
"""
return AsciiData(ncols=ncols, nrows=nrows, null=null, delimiter=delimiter,
columnInfo=1, headerComment=1)
| {
"content_hash": "4e41a8c7557754b908111fff28f29558",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 86,
"avg_line_length": 31.946666666666665,
"alnum_prop": 0.6940734557595993,
"repo_name": "davidharvey1986/pyRRG",
"id": "3aee5a153a86c567570263df9c835fda5931bbdc",
"size": "2396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/asciidata/asciifunction.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "8321"
},
{
"name": "Python",
"bytes": "5803472"
},
{
"name": "Shell",
"bytes": "3862"
}
],
"symlink_target": ""
} |
"""
Given the root node of a binary search tree (BST) and a value. You need to
find the node in the BST that the node's value equals the given value. Return
the subtree rooted with that node. If such node doesn't exist, you should return
NULL.
For example,
Given the tree:
4
/ \
2 7
/ \
1 3
And the value to search: 2
You should return this subtree:
2
/ \
1 3
In the example above, if we want to search the value 5, since there is no node
with value 5, we should return NULL.
Note that an empty tree is represented by NULL, therefore you would see the
expected output (serialized tree format) as [], not null.
"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def searchBST(self, root: TreeNode, val: int) -> TreeNode:
if not root:
return None
if root.val == val:
return root
elif root.val < val:
return self.searchBST(root.right, val)
else:
return self.searchBST(root.left, val)
| {
"content_hash": "22460b7ae4fd7b5499a441981dd386d3",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 80,
"avg_line_length": 23.895833333333332,
"alnum_prop": 0.6224934612031386,
"repo_name": "algorhythms/LeetCode",
"id": "a99a6a63e4d5315da14d5f0616bd8e4713278e53",
"size": "1166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "700 Search in a Binary Search Tree.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1444167"
}
],
"symlink_target": ""
} |
"""The Categorical distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util.tf_export import tf_export
def _broadcast_cat_event_and_params(event, params, base_dtype):
"""Broadcasts the event or distribution parameters."""
if event.dtype.is_integer:
pass
elif event.dtype.is_floating:
# When `validate_args=True` we've already ensured int/float casting
# is closed.
event = math_ops.cast(event, dtype=dtypes.int32)
else:
raise TypeError("`value` should have integer `dtype` or "
"`self.dtype` ({})".format(base_dtype))
shape_known_statically = (
params.shape.ndims is not None and
params.shape[:-1].is_fully_defined() and
event.shape.is_fully_defined())
if not shape_known_statically or params.shape[:-1] != event.shape:
params *= array_ops.ones_like(event[..., array_ops.newaxis],
dtype=params.dtype)
params_shape = array_ops.shape(params)[:-1]
event *= array_ops.ones(params_shape, dtype=event.dtype)
if params.shape.ndims is not None:
event.set_shape(tensor_shape.TensorShape(params.shape[:-1]))
return event, params
@tf_export("distributions.Categorical")
class Categorical(distribution.Distribution):
"""Categorical distribution.
The Categorical distribution is parameterized by either probabilities or
log-probabilities of a set of `K` classes. It is defined over the integers
`{0, 1, ..., K}`.
The Categorical distribution is closely related to the `OneHotCategorical` and
`Multinomial` distributions. The Categorical distribution can be intuited as
generating samples according to `argmax{ OneHotCategorical(probs) }` itself
being identical to `argmax{ Multinomial(probs, total_count=1) }`.
#### Mathematical Details
The probability mass function (pmf) is,
```none
pmf(k; pi) = prod_j pi_j**[k == j]
```
#### Pitfalls
The number of classes, `K`, must not exceed:
- the largest integer representable by `self.dtype`, i.e.,
`2**(mantissa_bits+1)` (IEEE 754),
- the maximum `Tensor` index, i.e., `2**31-1`.
In other words,
```python
K <= min(2**31-1, {
tf.float16: 2**11,
tf.float32: 2**24,
tf.float64: 2**53 }[param.dtype])
```
Note: This condition is validated only when `self.validate_args = True`.
#### Examples
Creates a 3-class distribution with the 2nd class being most likely.
```python
dist = Categorical(probs=[0.1, 0.5, 0.4])
n = 1e4
empirical_prob = tf.cast(
tf.histogram_fixed_width(
dist.sample(int(n)),
[0., 2],
nbins=3),
dtype=tf.float32) / n
# ==> array([ 0.1005, 0.5037, 0.3958], dtype=float32)
```
Creates a 3-class distribution with the 2nd class being most likely.
Parameterized by [logits](https://en.wikipedia.org/wiki/Logit) rather than
probabilities.
```python
dist = Categorical(logits=np.log([0.1, 0.5, 0.4])
n = 1e4
empirical_prob = tf.cast(
tf.histogram_fixed_width(
dist.sample(int(n)),
[0., 2],
nbins=3),
dtype=tf.float32) / n
# ==> array([0.1045, 0.5047, 0.3908], dtype=float32)
```
Creates a 3-class distribution with the 3rd class being most likely.
The distribution functions can be evaluated on counts.
```python
# counts is a scalar.
p = [0.1, 0.4, 0.5]
dist = Categorical(probs=p)
dist.prob(0) # Shape []
# p will be broadcast to [[0.1, 0.4, 0.5], [0.1, 0.4, 0.5]] to match counts.
counts = [1, 0]
dist.prob(counts) # Shape [2]
# p will be broadcast to shape [3, 5, 7, 3] to match counts.
counts = [[...]] # Shape [5, 7, 3]
dist.prob(counts) # Shape [5, 7, 3]
```
"""
def __init__(
self,
logits=None,
probs=None,
dtype=dtypes.int32,
validate_args=False,
allow_nan_stats=True,
name="Categorical"):
"""Initialize Categorical distributions using class log-probabilities.
Args:
logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities
of a set of Categorical distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of logits for each class. Only one of `logits` or
`probs` should be passed in.
probs: An N-D `Tensor`, `N >= 1`, representing the probabilities
of a set of Categorical distributions. The first `N - 1` dimensions
index into a batch of independent distributions and the last dimension
represents a vector of probabilities for each class. Only one of
`logits` or `probs` should be passed in.
dtype: The type of the event samples (default: int32).
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
with ops.name_scope(name, values=[logits, probs]) as name:
self._logits, self._probs = distribution_util.get_logits_and_probs(
logits=logits,
probs=probs,
validate_args=validate_args,
multidimensional=True,
name=name)
if validate_args:
self._logits = distribution_util.embed_check_categorical_event_shape(
self._logits)
logits_shape_static = self._logits.get_shape().with_rank_at_least(1)
if logits_shape_static.ndims is not None:
self._batch_rank = ops.convert_to_tensor(
logits_shape_static.ndims - 1,
dtype=dtypes.int32,
name="batch_rank")
else:
with ops.name_scope(name="batch_rank"):
self._batch_rank = array_ops.rank(self._logits) - 1
logits_shape = array_ops.shape(self._logits, name="logits_shape")
if logits_shape_static[-1].value is not None:
self._event_size = ops.convert_to_tensor(
logits_shape_static[-1].value,
dtype=dtypes.int32,
name="event_size")
else:
with ops.name_scope(name="event_size"):
self._event_size = logits_shape[self._batch_rank]
if logits_shape_static[:-1].is_fully_defined():
self._batch_shape_val = constant_op.constant(
logits_shape_static[:-1].as_list(),
dtype=dtypes.int32,
name="batch_shape")
else:
with ops.name_scope(name="batch_shape"):
self._batch_shape_val = logits_shape[:-1]
super(Categorical, self).__init__(
dtype=dtype,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._logits,
self._probs],
name=name)
@property
def event_size(self):
"""Scalar `int32` tensor: the number of classes."""
return self._event_size
@property
def logits(self):
"""Vector of coordinatewise logits."""
return self._logits
@property
def probs(self):
"""Vector of coordinatewise probabilities."""
return self._probs
def _batch_shape_tensor(self):
return array_ops.identity(self._batch_shape_val)
def _batch_shape(self):
return self.logits.get_shape()[:-1]
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
if self.logits.get_shape().ndims == 2:
logits_2d = self.logits
else:
logits_2d = array_ops.reshape(self.logits, [-1, self.event_size])
sample_dtype = dtypes.int64 if self.dtype.size > 4 else dtypes.int32
draws = random_ops.multinomial(
logits_2d, n, seed=seed, output_dtype=sample_dtype)
draws = array_ops.reshape(
array_ops.transpose(draws),
array_ops.concat([[n], self.batch_shape_tensor()], 0))
return math_ops.cast(draws, self.dtype)
def _cdf(self, k):
k = ops.convert_to_tensor(k, name="k")
if self.validate_args:
k = distribution_util.embed_check_integer_casting_closed(
k, target_dtype=dtypes.int32)
k, probs = _broadcast_cat_event_and_params(
k, self.probs, base_dtype=self.dtype.base_dtype)
# batch-flatten everything in order to use `sequence_mask()`.
batch_flattened_probs = array_ops.reshape(probs,
(-1, self._event_size))
batch_flattened_k = array_ops.reshape(k, [-1])
to_sum_over = array_ops.where(
array_ops.sequence_mask(batch_flattened_k, self._event_size),
batch_flattened_probs,
array_ops.zeros_like(batch_flattened_probs))
batch_flattened_cdf = math_ops.reduce_sum(to_sum_over, axis=-1)
# Reshape back to the shape of the argument.
return array_ops.reshape(batch_flattened_cdf, array_ops.shape(k))
def _log_prob(self, k):
k = ops.convert_to_tensor(k, name="k")
if self.validate_args:
k = distribution_util.embed_check_integer_casting_closed(
k, target_dtype=dtypes.int32)
k, logits = _broadcast_cat_event_and_params(
k, self.logits, base_dtype=self.dtype.base_dtype)
return -nn_ops.sparse_softmax_cross_entropy_with_logits(labels=k,
logits=logits)
def _entropy(self):
return -math_ops.reduce_sum(
nn_ops.log_softmax(self.logits) * self.probs, axis=-1)
def _mode(self):
ret = math_ops.argmax(self.logits, axis=self._batch_rank)
ret = math_ops.cast(ret, self.dtype)
ret.set_shape(self.batch_shape)
return ret
@kullback_leibler.RegisterKL(Categorical, Categorical)
def _kl_categorical_categorical(a, b, name=None):
"""Calculate the batched KL divergence KL(a || b) with a and b Categorical.
Args:
a: instance of a Categorical distribution object.
b: instance of a Categorical distribution object.
name: (optional) Name to use for created operations.
default is "kl_categorical_categorical".
Returns:
Batchwise KL(a || b)
"""
with ops.name_scope(name, "kl_categorical_categorical",
values=[a.logits, b.logits]):
# sum(probs log(probs / (1 - probs)))
delta_log_probs1 = (nn_ops.log_softmax(a.logits) -
nn_ops.log_softmax(b.logits))
return math_ops.reduce_sum(nn_ops.softmax(a.logits) * delta_log_probs1,
axis=-1)
| {
"content_hash": "d2265c029d865487ddb590b0bcdf4add",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 80,
"avg_line_length": 35.611111111111114,
"alnum_prop": 0.6470792165019934,
"repo_name": "kobejean/tensorflow",
"id": "fbbacf252100f30e9418a20343eb48e0a8583435",
"size": "12227",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/distributions/categorical.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "2867"
},
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "341894"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "49273038"
},
{
"name": "CMake",
"bytes": "195712"
},
{
"name": "Dockerfile",
"bytes": "36400"
},
{
"name": "Go",
"bytes": "1253646"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "836009"
},
{
"name": "Jupyter Notebook",
"bytes": "2604741"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52734"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99243"
},
{
"name": "PHP",
"bytes": "1357"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "41122917"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "466896"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
} |
def factorial(n):
factorial_total = 1
while n >= 1:
factorial_total = factorial_total *n
n = n -1
return factorial_total
n = int(input("enter in a number"))
ans = factorial(n)
ans = "factorial of entered number is " + repr(ans)
print(ans)
| {
"content_hash": "3fd5fbfda0a708d1406975741d07b761",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 51,
"avg_line_length": 14.8,
"alnum_prop": 0.5641891891891891,
"repo_name": "r-martin-/Code_College",
"id": "14d6474e4e8e05cdf526dff86d5b947f8e46bc99",
"size": "296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Exercise_code/100_exercises/factorial_Q2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "66432"
},
{
"name": "Shell",
"bytes": "6994"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.