text stringlengths 4 1.02M | meta dict |
|---|---|
from setuptools import setup
import elifedbtools
with open('README.rst') as fp:
readme = fp.read()
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(name='elifedbtools',
version=elifedbtools.__version__,
description='Tools for article and production data storage',
long_description=readme,
packages=['elifedbtools'],
license = 'MIT',
install_requires=install_requires,
url='https://github.com/elifesciences/elife-db-tools',
maintainer='eLife Sciences Publications Ltd.',
maintainer_email='py@elifesciences.org',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| {
"content_hash": "1ad30563a57eebd3e0aa32f8ba5d4b73",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 64,
"avg_line_length": 31.482758620689655,
"alnum_prop": 0.6506024096385542,
"repo_name": "elifesciences/elife-db-tools",
"id": "fd992bb118deacf004d70e87da1694b37155fc5c",
"size": "913",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "296"
},
{
"name": "Python",
"bytes": "2120"
}
],
"symlink_target": ""
} |
import re
import copy
import sys
import spec_tables
import sackgrp
def name_to_index(string, name_table):
i = 0
for name in name_table:
if string == name:
return [1, i]
i += 1
return [0, 0]
def name_to_index_or_die(string, name_table):
[found, idx] = name_to_index(string, name_table)
if (not found):
print(("spec scan failure on \"%s\"." % (string)))
sys.exit(1)
return idx
class spec_t:
def __init__(self, argcode):
self.code = argcode
def __mul__(a,b):
c = spec_t(spec_tables.mul_table[a.code][b.code]);
return c
def __eq__(a,b):
return (a.code == b.code)
def __ne__(a,b):
return not (a == b)
def __lt__(a,b):
return (a.code < b.code)
def __le__(a,b):
return (a.code <= b.code)
def __gt__(a,b):
return (a.code > b.code)
def __ge__(a,b):
return (a.code >= b.code)
def inv(a):
c = spec_t(spec_tables.inv_table[a.code]);
return c
def scan(self, string):
self.code = name_to_index_or_die(string, spec_tables.name_table)
def __str__(self):
return spec_tables.name_table[self.code]
def __repr__(self):
return self.__str__()
def params_from_string(params_string):
return params_string
def from_string(value_string, params_string):
not_used = params_from_string(params_string)
idx = name_to_index_or_die(value_string, spec_tables.name_table)
obj = spec_t(idx)
return obj
def install_table(cayley_table_with_names):
spec_tables.mul_table = []
spec_tables.inv_table = []
spec_tables.name_table = []
n = len(cayley_table_with_names)
# Populate the name table
spec_tables.name_table = copy.copy(cayley_table_with_names[0])
# Populate the mul table.
#
# I should do some checking on the cayley_table_with_names -- the user
# might have given me input which is non-square, or even ragged.
# Fill it with zeroes, so the matrix has the correct size and may be
# indexed.
row = [1] * n
for i in range(0, n):
spec_tables.mul_table.append(copy.copy(row))
# Now put real data in.
for i in range(0, n):
for j in range(0, n):
spec_tables.mul_table[i][j] = name_to_index_or_die(cayley_table_with_names[i][j], spec_tables.name_table)
# Populate the inv table.
# I am being crass here. I'm assuming the Cayley table is good before I
# start. The good news is that the is-group functions don't use the inv
# table.
G = []
for i in range(0, n):
G.append(spec_t(i))
[found, e] = sackgrp.find_id(G)
if (found):
for i in range(0, n):
x = G[i]
for j in range(0, n):
y = G[j]
z = x*y
if (z.code == e.code):
spec_tables.inv_table.append(j)
continue
# ================================================================
import unittest
if __name__ == '__main__':
class test_cases(unittest.TestCase):
def test_name_to_index(self):
pass # to be implemented
def test_name_to_index_or_die(self):
pass # to be implemented
def test___init__(self):
pass # to be implemented
def test___mul__(self):
pass # to be implemented
def test___eq__(self):
pass # to be implemented
def test___ne__(self):
pass # to be implemented
def test___lt__(self):
pass # to be implemented
def test___le__(self):
pass # to be implemented
def test___gt__(self):
pass # to be implemented
def test___ge__(self):
pass # to be implemented
def test_inv(self):
pass # to be implemented
def test_scan(self):
pass # to be implemented
def test___str__(self):
pass # to be implemented
def test___repr__(self):
pass # to be implemented
def test_params_from_string(self):
pass # to be implemented
def test_from_string(self):
pass # to be implemented
def test_install_table(self):
pass # to be implemented
# ----------------------------------------------------------------
unittest.main()
| {
"content_hash": "375b59c7cfaf65a228e087478dde9f11",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 117,
"avg_line_length": 26.238095238095237,
"alnum_prop": 0.5269963702359347,
"repo_name": "johnkerl/sack",
"id": "8e570ca3704fd601b42231d03cdcabc7e8cf5e0f",
"size": "4678",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "spec_tm.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "185167"
},
{
"name": "Shell",
"bytes": "718"
}
],
"symlink_target": ""
} |
import scrapy
from locations.items import GeojsonPointItem
import json
class MerrillLynchSpider(scrapy.Spider):
name = 'merrilllynch'
allowed_domains = ['ml.com']
start_urls = ('https://fa.ml.com/',)
def parse_branch(self, response):
data = json.loads(response.body_as_unicode())
for location in data["Results"]:
properties = {
'ref': location["UniqueId"],
'name': location["Company"],
'addr_full': location["Address1"].strip(),
'city': location["City"],
'state': location["Region"],
'country': location["Country"],
'postcode': location["PostalCode"],
'lat': float(location["GeoLat"]),
'lon': float(location["GeoLon"]),
'website': location["XmlData"]["parameters"].get("Url"),
'extras': {
'unit': location.get("Address2") or None
}
}
yield GeojsonPointItem(**properties)
def parse(self, response):
states = response.xpath('//section[@class="state-view"]//li/a/@data-state-abbrev').extract()
for state in states:
url = 'https://fa.ml.com/locator/api/InternalSearch'
payload = {
"Locator":"MER-WM-Offices",
"Region":state,
"Company":None,
"ProfileTypes":"Branch",
"DoFuzzyNameSearch":0,
"SearchRadius":100
}
yield scrapy.Request(url,
method='POST',
body=json.dumps(payload),
headers={'Content-Type':'application/json'},
callback=self.parse_branch)
| {
"content_hash": "1d6a7e5d0831d22894af0dc2ddd774fc",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 100,
"avg_line_length": 35.88235294117647,
"alnum_prop": 0.48688524590163934,
"repo_name": "iandees/all-the-places",
"id": "5c716f9606f2d61dee2c38655c64700041d068ce",
"size": "1854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "locations/spiders/merrilllynch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2134"
},
{
"name": "Python",
"bytes": "116132"
},
{
"name": "Shell",
"bytes": "4477"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from ... import core
from ... import layers
from ... import framework
def append_cast_op(i, o, prog):
"""
Append a cast op in a given Program to cast input `i` to data type `o.dtype`.
Args:
i (Variable): The input Variable.
o (Variable): The output Variable.
prog (Program): The Program to append cast op.
"""
prog.global_block().append_op(
type="cast",
inputs={"X": i},
outputs={"Out": o},
attrs={"in_dtype": i.dtype,
"out_dtype": o.dtype})
def _rename_arg(op, old_name, new_name):
"""
If an op has old_name input and output, rename these input
args new_name.
Args:
op (Operator): Current operator.
old_name (str): The old name of input args.
new_name (str): The new name of input args.
"""
op_desc = op.desc
if isinstance(op_desc, tuple):
op_desc = op_desc[0]
op_desc._rename_input(old_name, new_name)
op_desc._rename_output(old_name, new_name)
def _dtype_to_str(dtype):
"""
Convert specific variable type to its corresponding string.
Args:
dtype (VarType): Variable type.
"""
if dtype == core.VarDesc.VarType.FP16:
return 'fp16'
else:
return 'fp32'
def _insert_cast_op(block, op, idx, src_dtype, dest_dtype):
"""
Insert cast op and rename args of input and output.
Args:
block (Program): The block in which the operator is.
op (Operator): The operator to insert cast op.
idx (int): The index of current operator.
src_dtype (VarType): The input variable dtype of cast op.
desr_dtype (VarType): The output variable dtype of cast op.
Returns:
num_cast_op (int): The number of cast ops that have been inserted.
"""
num_cast_ops = 0
valid_types = [
core.VarDesc.VarType.LOD_TENSOR, core.VarDesc.VarType.SELECTED_ROWS,
core.VarDesc.VarType.LOD_TENSOR_ARRAY
]
for in_name in op.input_names:
if src_dtype == core.VarDesc.VarType.FP32 and op.type == 'batch_norm':
if in_name != 'X':
continue
for in_var_name in op.input(in_name):
in_var = block.var(in_var_name)
if in_var.type not in valid_types:
continue
if in_var.dtype == src_dtype:
out_var = block.create_var(
name=in_var.name + \
'.cast_' + _dtype_to_str(dest_dtype),
dtype=dest_dtype,
persistable=False,
stop_gradient=False)
block._insert_op(
idx,
type="cast",
inputs={"X": in_var},
outputs={"Out": out_var},
attrs={
"in_dtype": in_var.dtype,
"out_dtype": out_var.dtype
})
num_cast_ops += 1
_rename_arg(op, in_var.name, out_var.name)
else:
if op.has_attr('in_dtype'):
op._set_attr('in_dtype', dest_dtype)
if src_dtype == core.VarDesc.VarType.FP32:
for out_name in op.output_names:
if op.type == 'batch_norm' and out_name != 'Y':
continue
for out_var_name in op.output(out_name):
out_var = block.var(out_var_name)
if out_var.type not in valid_types:
continue
if out_var.dtype == core.VarDesc.VarType.FP32:
out_var.desc.set_dtype(core.VarDesc.VarType.FP16)
if op.has_attr('out_dtype'):
op._set_attr('out_dtype', core.VarDesc.VarType.FP16)
return num_cast_ops
def find_true_prev_op(ops, cur_op, var_name):
"""
Find the true prev op that outputs var_name variable.
Args:
ops (list): A list of ops.
cur_op (Operator): Current operator which has var_name variable.
var_name (string): Variable name.
"""
prev_op = []
for op in ops:
if op == cur_op:
break
for out_name in op.output_names:
for out_var_name in op.output(out_name):
if out_var_name == var_name:
prev_op.append(op)
if prev_op:
if not len(prev_op) == 1:
raise ValueError("There must be only one previous op "
"that outputs {0} variable".format(var_name))
else:
return prev_op[0]
return None
def rewrite_program(main_prog, amp_lists):
"""
Traverse all ops in current block and insert cast op according to
which set current op belongs to.
1. When an op belongs to the black list, add it to black set
2. When an op belongs to the white list, add it to white set
3. When an op belongs to the gray list. If one
of its inputs is the output of black set op or black list op,
add it to black set. If all of its previous ops are not black
op and one of its inputs is the output of white set op or
white list op, add it to white set.
4. When an op isn't in the lists, add it to black op set.
5. Add necessary cast ops to make sure that black set op will be
computed in fp32 mode, while white set op will be computed in
fp16 mode.
Args:
main_prog (Program): The main program for training.
"""
block = main_prog.global_block()
ops = block.ops
white_op_set = set()
black_op_set = set()
for op in ops:
if op.type in amp_lists.black_list:
black_op_set.add(op)
elif op.type in amp_lists.white_list:
white_op_set.add(op)
elif op.type in amp_lists.gray_list:
is_black_op = False
is_white_op = False
for in_name in op.input_names:
# if this op has inputs
if in_name:
for in_var_name in op.input(in_name):
in_var = block.var(in_var_name)
# this in_var isn't the output of other op
if in_var.op is None:
continue
elif in_var.op is op:
prev_op = find_true_prev_op(ops, op, in_var_name)
if prev_op is None:
continue
else:
prev_op = in_var.op
# if it's one of inputs
if prev_op in black_op_set or \
prev_op.type in amp_lists.black_list:
is_black_op = True
elif prev_op in white_op_set or \
prev_op.type in amp_lists.white_list:
is_white_op = True
if is_black_op:
black_op_set.add(op)
elif is_white_op:
white_op_set.add(op)
else:
pass
else:
# For numerical safe, we apply fp32 computation on ops that
# are not determined which list they should stay.
black_op_set.add(op)
idx = 0
while idx < len(ops):
op = ops[idx]
num_cast_ops = 0
if op in black_op_set:
num_cast_ops = _insert_cast_op(block, op, idx,
core.VarDesc.VarType.FP16,
core.VarDesc.VarType.FP32)
elif op in white_op_set:
num_cast_ops = _insert_cast_op(block, op, idx,
core.VarDesc.VarType.FP32,
core.VarDesc.VarType.FP16)
else:
pass
idx += num_cast_ops + 1
def update_role_var_grad(main_prog, params_grads):
"""
Update op_role_var attr for some ops to make sure the gradients
transfered across gpus is FP16.
1. Check whether the op that outputs gradient is cast or not.
2. If op is cast and gradient is FP32, remove the op_role_var
and find the prev op which outputs FP16 gradient
3. Update the op_role_var of the prev op.
Args:
main_prog (Program): The main program for training.
params_grads (list): A list of params and grads.
"""
block = main_prog.global_block()
BACKWARD = core.op_proto_and_checker_maker.OpRole.Backward
OPTIMIZE = core.op_proto_and_checker_maker.OpRole.Optimize
for p, g in params_grads:
op = g.op
if g.dtype == core.VarDesc.VarType.FP32 and op.type == 'cast':
role = op.attr('op_role')
if role & int(BACKWARD) and op.has_attr('op_role_var'):
op.desc.remove_attr("op_role_var")
else:
raise ValueError("The cast op {0} must be in BACKWARD role "
"and have op_role_var attr.".format(op))
fp16_grad_name = op.input(op.input_names[0])[0]
op_for_fp16_grad = find_true_prev_op(block.ops, op, fp16_grad_name)
op_role_var_attr_name = \
core.op_proto_and_checker_maker.kOpRoleVarAttrName()
attr_val = [p.name, fp16_grad_name]
if op_for_fp16_grad.has_attr(op_role_var_attr_name):
attr_val.extend(op_for_fp16_grad.attr(op_role_var_attr_name))
op_for_fp16_grad._set_attr(op_role_var_attr_name, attr_val)
# maximize the allreduce overlap
op._set_attr('op_role', OPTIMIZE)
def update_loss_scaling(is_overall_finite, prev_loss_scaling, num_good_steps,
num_bad_steps, incr_every_n_steps,
decr_every_n_nan_or_inf, incr_ratio, decr_ratio):
"""
Update loss scaling according to overall gradients. If all gradients is
finite after incr_every_n_steps, loss scaling will increase by incr_ratio.
Otherwisw, loss scaling will decrease by decr_ratio after
decr_every_n_nan_or_inf steps and each step some gradients are infinite.
Args:
is_overall_finite (Variable): A boolean variable indicates whether
all gradients are finite.
prev_loss_scaling (Variable): Previous loss scaling.
num_good_steps (Variable): A variable accumulates good steps in which
all gradients are finite.
num_bad_steps (Variable): A variable accumulates bad steps in which
some gradients are infinite.
incr_every_n_steps (Variable): A variable represents increasing loss
scaling every n consecutive steps with
finite gradients.
decr_every_n_nan_or_inf (Variable): A variable represents decreasing
loss scaling every n accumulated
steps with nan or inf gradients.
incr_ratio(float): The multiplier to use when increasing the loss
scaling.
decr_ratio(float): The less-than-one-multiplier to use when decreasing
loss scaling.
"""
zero_steps = layers.fill_constant(shape=[1], dtype='int32', value=0)
with layers.Switch() as switch:
with switch.case(is_overall_finite):
should_incr_loss_scaling = layers.less_than(incr_every_n_steps,
num_good_steps + 1)
with layers.Switch() as switch1:
with switch1.case(should_incr_loss_scaling):
new_loss_scaling = prev_loss_scaling * incr_ratio
loss_scaling_is_finite = layers.isfinite(new_loss_scaling)
with layers.Switch() as switch2:
with switch2.case(loss_scaling_is_finite):
layers.assign(new_loss_scaling, prev_loss_scaling)
with switch2.default():
pass
layers.assign(zero_steps, num_good_steps)
layers.assign(zero_steps, num_bad_steps)
with switch1.default():
layers.increment(num_good_steps)
layers.assign(zero_steps, num_bad_steps)
with switch.default():
should_decr_loss_scaling = layers.less_than(decr_every_n_nan_or_inf,
num_bad_steps + 1)
with layers.Switch() as switch3:
with switch3.case(should_decr_loss_scaling):
new_loss_scaling = prev_loss_scaling * decr_ratio
static_loss_scaling = \
layers.fill_constant(shape=[1],
dtype='float32',
value=1.0)
less_than_one = layers.less_than(new_loss_scaling,
static_loss_scaling)
with layers.Switch() as switch4:
with switch4.case(less_than_one):
layers.assign(static_loss_scaling,
prev_loss_scaling)
with switch4.default():
layers.assign(new_loss_scaling, prev_loss_scaling)
layers.assign(zero_steps, num_good_steps)
layers.assign(zero_steps, num_bad_steps)
with switch3.default():
layers.assign(zero_steps, num_good_steps)
layers.increment(num_bad_steps)
| {
"content_hash": "36a43053ad0e8ebf5ceb6d4284ab2589",
"timestamp": "",
"source": "github",
"line_count": 336,
"max_line_length": 81,
"avg_line_length": 41.270833333333336,
"alnum_prop": 0.5210211292997764,
"repo_name": "tensor-tang/Paddle",
"id": "05dfe27303505903533d6404de0e6ffe51a661ad",
"size": "14480",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/contrib/mixed_precision/fp16_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32490"
},
{
"name": "C++",
"bytes": "10161819"
},
{
"name": "CMake",
"bytes": "290828"
},
{
"name": "Cuda",
"bytes": "1183095"
},
{
"name": "Dockerfile",
"bytes": "10002"
},
{
"name": "Python",
"bytes": "7082088"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Shell",
"bytes": "200906"
}
],
"symlink_target": ""
} |
"""
test_make_webpage_rdf.py -- given a uri and optional attributes,
make RDF for a webpage entity
Version 0.1 MC 2013-12-121
-- Initial version.
"""
__author__ = "Michael Conlon"
__copyright__ = "Copyright 2013, University of Florida"
__license__ = "BSD 3-Clause license"
__version__ = "0.1"
import vivotools as vt
from datetime import datetime
print datetime.now(),"Start"
print vt.make_webpage_rdf("http://google.com/article_name")
print vt.make_webpage_rdf(None)
print vt.make_webpage_rdf("http://google.com/article_name",rank="2")
print vt.make_webpage_rdf("http://google.com/article_name",\
harvested_by="Test Harvest",\
link_anchor_text="Home Page",\
rank="8",\
uri_type="http://vivo.ufl.edu/ontology/vivo-ufl/SomeType")
print vt.make_webpage_rdf("http://google.com/article_name")
print datetime.now(),"Finish"
| {
"content_hash": "0e0f618a344c600b14fdaac4ac643977",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 68,
"avg_line_length": 31.107142857142858,
"alnum_prop": 0.6796785304247991,
"repo_name": "mconlon17/vivo-1.6-upgrade",
"id": "b6ee8496e882c73e0d9f1ca687b1c6fdfb89f83e",
"size": "871",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/test_make_webpage_rdf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "68639"
},
{
"name": "TeX",
"bytes": "551"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
import settings
import argparse
import sys
import hashlib
import os
import logging
from source import HttpSource
from source import FileSource
if sys.version_info[0] < 3:
from ConfigParser import SafeConfigParser
else:
from configparser import SafeConfigParser
CONFIG = 'config.ini'
REPORT_FILE = 'verification_report.txt'
def getKey(item):
return item[0]
def check_sources(origSource, newSource):
num_missing = 0
num_desc_match_err = 0
num_bin_match_err = 0
num_files = 0
for newResourceName in newSource:
binary = False
try:
# fetch the resource triples from new place & sort it
newTriplesStr = newSource.fetchResourceTriples(newResourceName)
newTriples = sorted(newTriplesStr.strip().split('\n'))
# fetch the resource from original place
origResourceName = translate_to_desc(newSource, origSource, newTriples[0][1:newTriples[0].find('> <')])
origTriples = origSource.fetchResourceTriples(origResourceName)
# was the resource there?
if origTriples is None:
logger.error('ERR: Resource Missing: Resource not found in original system:\n\t{0}'.format(newResourceName))
num_missing += 1
continue
logger.info('Looking at: {}...'.format(newResourceName))
# sort the triples
origTriples = sorted(origTriples.strip().split('\n'))
if settings.FCREPO_BINARY_URI in newTriplesStr:
binary = True
# test if they are eqivalent
if set(origTriples) != set(newTriples):
logger.error('ERR: Resource Mismatch: {0}'.format(newResourceName))
num_desc_match_err += 1
if binary is True:
origSHA1 = [x for x in origTriples if 'http://www.loc.gov/premis/rdf/v1#hasMessageDigest' in x]
if len(origSHA1) != 1:
logger.error('Couldn\'t find SHA1 for binary: {0}\n'.format(newResourceName))
origSHA1 = origSHA1[0][origSHA1[0].rfind('> <') + 3:-3]
if not check_binaries(origSource, newSource, origResourceName, newResourceName,
origSHA1.replace('urn:sha1:', '')):
num_bin_match_err += 1
num_files += 1
except IOError as err:
logger.error('Unable to access resource: {0}\nError: {1}\n'.format(newResourceName, err))
logger.info('\nDone checking objects. Looked at {0} objects in {1}.\n'.format(num_files, newSource))
return {'rec_count':num_files, 'missing':num_missing, 'desc_mismatch':num_desc_match_err,
'bin_mismatch':num_bin_match_err}
def check_binaries(origSource, newSource, origResourceName, newResourceName, origResourceSHA1):
origSHA1 = hashlib.sha1()
newSHA1 = hashlib.sha1()
origSHA1.update(origSource.fetchBinaryResource(origResourceName))
newSHA1.update(newSource.fetchBinaryResource(newResourceName))
logger.debug('SHA1:\n\tresource: {0}\n\torig sha1: {1}\n\tnew sha1: {2}'.format(
origResourceSHA1, origSHA1.hexdigest(), newSHA1.hexdigest()))
# logic: compare what the original description file says to what was computed.
# then compare the two newly computed values, if all is well, they should all be equal
if origResourceSHA1 != origSHA1.hexdigest() or origSHA1.hexdigest() != newSHA1.hexdigest():
logger.debug('SHA1:\n\tfrom resource: {0}\n\torig sha1: {1}\n\tnew sha1: {2}'.format(
origResourceSHA1, origSHA1.hexdigest(), newSHA1.hexdigest()))
logger.error('ERR: Binary Mismatch: Binary resources do not match for resource: {}'.format(
origResourceName))
logger.error('\tSHA1:\n\t\tresource: {0}\n\t\torig sha1: {1}\n\t\tnew sha1: {2}'.format(
origResourceSHA1, origSHA1.hexdigest(), newSHA1.hexdigest()))
return False
return True
# normalize the resource string for the different systems.
# this will return the translation for getting the description data for an object.
def translate_to_desc(origin, recipient, resource):
logger.debug("translate: resource is: {0}\n\tfrom:{1}\n\tto:{2}".format(resource, origin, recipient))
if isinstance(origin, FileSource):
res = resource.replace(origin.getBaseUri(), recipient.getBaseUri())
if isinstance(recipient, FileSource):
return resource
if isinstance(recipient, HttpSource):
if settings.FILE_FCR_METADATA in resource:
return res.replace(settings.FILE_FCR_METADATA, 'fcr:metadata')
else:
return res + '/fcr:metadata'
elif isinstance(origin, HttpSource):
if isinstance(recipient, HttpSource):
return resource
if isinstance(recipient, FileSource):
if origin.is_binary(resource):
if 'fcr:metadata' in resource:
res = resource.replace('fcr:metadata', settings.FILE_FCR_METADATA)
return res.replace(origin.baseUri, recipient.desc_dir)
else:
res = resource + '/' + settings.FILE_FCR_METADATA
return res.replace(origin.baseUri, recipient.desc_dir)
else:
res = resource
if res.endswith('/'):
res = res[:-1]
res += recipient.getFileExt()
return res.replace(origin.baseUri, recipient.desc_dir)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--loglevel',
help='''Level of output into log [DEBUG, INFO, WARN,
ERROR], default is WARN. To list the records
being looked at, set this to INFO''',
default='WARN')
parser.add_argument('--config', '-c', help='Path to import/export config')
parser.add_argument('--user', '-u', help='''Server credentials in the form
user:password''')
args = parser.parse_args()
settings.init()
if not args.config:
cfgParser = SafeConfigParser()
cfgParser.read(CONFIG)
test_mode = cfgParser.get('general', 'test_mode')
fedoraUrl = cfgParser.get('fedora1', 'baseUri')
auth = tuple(cfgParser.get('fedora1', 'auth').split(':'))
fileDir = cfgParser.get('file1', 'baseUri')
if not fileDir.endswith('/'):
fileDir += '/'
fileExt = cfgParser.get('file1', 'ext')
binDir = (fileDir +
cfgParser.get('file1', 'bin_path') +
cfgParser.get('file1', 'prefix')
)
descDir = (fileDir +
cfgParser.get('file1', 'desc_path') +
cfgParser.get('file1', 'prefix')
)
out_file = cfgParser.get('general', 'report_dir') + REPORT_FILE
else:
print("loading opts from import/export config file")
with open(args.config, 'r') as f:
opts = [line for line in f.read().split('\n')]
for line in range(len(opts)):
if opts[line] == '-m':
test_mode = opts[line + 1]
elif opts[line] == '-r':
fedoraUrl = opts[line + 1]
elif opts[line] == '-d':
descPath = opts[line + 1]
elif opts[line] == '-b':
binPath = opts[line + 1]
elif opts[line] == '-x':
fileExt = opts[line + 1]
elif opts[line] == '-l':
pass
else:
pass
fileDir = os.path.commonprefix([descPath, binPath])
descDir = fileDir + os.path.relpath(descPath, fileDir) + "/rest"
binDir = fileDir + os.path.relpath(binPath, fileDir) + "/rest"
out_file = './verification_report.txt'
auth = tuple(args.user.split(':'))
loglevel = args.loglevel
numeric_level = getattr(logging, loglevel.upper(), None)
logger = logging.getLogger('output')
filehandler = logging.FileHandler(filename=out_file, mode='w')
filehandler.setLevel(numeric_level)
logger.addHandler(filehandler)
logger.setLevel(numeric_level)
logger.debug('bin_dir = {0}\ndesc_dir = {1}'.format(binDir, descDir))
if (binDir is None or descDir is None) or (binDir == descDir):
logger.error('Unable to run: the export must be in two separate directories.')
exit()
httpSource = HttpSource(fedoraUrl, auth)
fileSource = FileSource(fileDir, descDir, binDir, fileExt)
logger.warn('Checking differences between two systems:');
logger.warn('\tSource One: {0}\n\tSource Two: {1}\n'.format(fedoraUrl, fileDir))
import_stats = {}
export_stats = {}
if test_mode == 'export' or test_mode == 'both':
logger.warn('------- Export test: walking the files comparing them to Fedora ---------\n')
export_stats = check_sources(origSource=httpSource, newSource=fileSource)
if test_mode == 'import' or test_mode == 'both':
logger.warn('------- Import test: walking Fedora comparing that to the files ---------\n')
import_stats = check_sources(origSource=fileSource, newSource=httpSource)
total_objects = 0
if len(export_stats):
logger.warn('Export test results:\n\tMissing Records: {}'.format(export_stats['missing']))
logger.warn('\tRDF Resource Mismatch: {}'.format(export_stats['desc_mismatch']))
logger.warn('\tNon RDF Resource Mismatch: {}'.format(export_stats['bin_mismatch']))
total_objects = export_stats['rec_count']
if len(import_stats):
logger.warn('Import test results:\n\tMissing Records: {}'.format(import_stats['missing']))
logger.warn('\tRDF Resource Mismatch: {}'.format(import_stats['desc_mismatch']))
logger.warn('\tNon RDF Resource Mismatch: {}'.format(import_stats['bin_mismatch']))
total_objects += import_stats['rec_count']
logger.warn('*'*100)
logger.warn('\nFinished verifying systems. Looked at {0} total objects.\n'.format(total_objects))
| {
"content_hash": "452bb8b21d30313021a0c55e89cc98dd",
"timestamp": "",
"source": "github",
"line_count": 258,
"max_line_length": 124,
"avg_line_length": 39.93798449612403,
"alnum_prop": 0.5987965838509317,
"repo_name": "bseeger/fcrepo-sample-dataset-tests",
"id": "8ed01def389cef817c14ad59c886d43efc7b3fd1",
"size": "11166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fcrepo-import-export-tests/verify/verify.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17567"
}
],
"symlink_target": ""
} |
"""
Tests For Goodness Weigher.
"""
from jacket.storage.scheduler.weights import goodness
from jacket.storage import test
from jacket.tests.storage.unit.scheduler import fakes
class GoodnessWeigherTestCase(test.TestCase):
def setUp(self):
super(GoodnessWeigherTestCase, self).setUp()
def test_goodness_weigher_with_no_goodness_function(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'foo': '50'
}
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(0, weight)
def test_goodness_weigher_passing_host(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '100'
}
})
host_state_2 = fakes.FakeHostState('host2', {
'host': 'host2.example.com',
'capabilities': {
'goodness_function': '0'
}
})
host_state_3 = fakes.FakeHostState('host3', {
'host': 'host3.example.com',
'capabilities': {
'goodness_function': '100 / 2'
}
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(100, weight)
weight = weigher._weigh_object(host_state_2, weight_properties)
self.assertEqual(0, weight)
weight = weigher._weigh_object(host_state_3, weight_properties)
self.assertEqual(50, weight)
def test_goodness_weigher_capabilities_substitution(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'foo': 50,
'goodness_function': '10 + capabilities.foo'
}
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(60, weight)
def test_goodness_weigher_extra_specs_substitution(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '10 + extra.foo'
}
})
weight_properties = {
'volume_type': {
'extra_specs': {
'foo': 50
}
}
}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(60, weight)
def test_goodness_weigher_volume_substitution(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '10 + volume.foo'
}
})
weight_properties = {
'request_spec': {
'volume_properties': {
'foo': 50
}
}
}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(60, weight)
def test_goodness_weigher_qos_substitution(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '10 + qos.foo'
}
})
weight_properties = {
'qos_specs': {
'foo': 50
}
}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(60, weight)
def test_goodness_weigher_stats_substitution(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': 'stats.free_capacity_gb > 20'
},
'free_capacity_gb': 50
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(100, weight)
def test_goodness_weigher_invalid_substitution(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '10 + stats.my_val'
},
'foo': 50
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(0, weight)
def test_goodness_weigher_host_rating_out_of_bounds(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '-10'
}
})
host_state_2 = fakes.FakeHostState('host2', {
'host': 'host2.example.com',
'capabilities': {
'goodness_function': '200'
}
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(0, weight)
weight = weigher._weigh_object(host_state_2, weight_properties)
self.assertEqual(0, weight)
def test_goodness_weigher_invalid_goodness_function(self):
weigher = goodness.GoodnessWeigher()
host_state = fakes.FakeHostState('host1', {
'host': 'host.example.com',
'capabilities': {
'goodness_function': '50 / 0'
}
})
weight_properties = {}
weight = weigher._weigh_object(host_state, weight_properties)
self.assertEqual(0, weight)
| {
"content_hash": "3fbf0a9016001906fd8fd2167b60133d",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 71,
"avg_line_length": 32.71891891891892,
"alnum_prop": 0.541714852139435,
"repo_name": "HybridF5/jacket",
"id": "928da51f4f7f06539465dcea79972e3d89c3adc7",
"size": "6663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jacket/tests/storage/unit/scheduler/test_goodness_weigher.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "26995056"
},
{
"name": "Shell",
"bytes": "28464"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
} |
"""The main URLconf for the project."""
# We'll use Django's tools for parsing URLs/Routes.
from django.conf.urls import include, url
# Define the URLs/Routes for the project.
urlpatterns = [
# This pulls in the URLs for the ``health`` app.
# It will prefix all URLs from that app with ``/health``.
url(r'^health', include('apps.health.urls')),
]
| {
"content_hash": "31502e17894cd52a04388ba4fd3f976a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 61,
"avg_line_length": 27.923076923076923,
"alnum_prop": 0.6804407713498623,
"repo_name": "0xadada/dockdj",
"id": "473c5d31eb768ed2159a0db1197afbc42d3afa77",
"size": "387",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/project/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4767"
},
{
"name": "HTML",
"bytes": "2954"
},
{
"name": "JavaScript",
"bytes": "6017"
},
{
"name": "Python",
"bytes": "25144"
},
{
"name": "Shell",
"bytes": "21884"
}
],
"symlink_target": ""
} |
"""Localize route predicate."""
def language(field):
"""Create language predicate for given url match field."""
def predicate(info, request):
"""Check whether language is one of the defaults."""
if field in info["match"] and info["match"][field] in request.registry["localize"]["locales"]["available"]:
return True
return False
return predicate
language.__text__ = "language predicate, to determine allowed languages in route"
| {
"content_hash": "8cb82cdc69b8a1a8f63c49ae9794cfe6",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 115,
"avg_line_length": 30.0625,
"alnum_prop": 0.6611226611226612,
"repo_name": "fizyk/pyramid_localize",
"id": "9230881e6592725ea5f1d462309626b6508b73e1",
"size": "697",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/pyramid_localize/routing/predicates.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "2727"
},
{
"name": "Python",
"bytes": "38493"
}
],
"symlink_target": ""
} |
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render_to_response
from models import Region,Egroups,ZoneAllowedResourceType,TopLevelAllocationByZone,TopLevelAllocation
from forms import RegionForm
from django.db.models import Sum
from templatetags.filters import displayNone
from models import Zone
from django.db import transaction
from getPrivileges import isSuperUser
from ldapSearch import checkEGroup
from django.db.models import Q
from commonFunctions import addLog,getLog,getRegionInfo,addUpdateLog
from validator import *
from django.db import connection
import simplejson
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from commonFunctions import *
def checkNameIgnoreCase(regionName):
regionExists = False
if Region.objects.filter(name__iexact=regionName).exists():
regionExists = True
return regionExists
def isAdminOfAnyRegion(adminGroups):
userIsAdmin = False
if len(adminGroups) < 1:
return userIsAdmin
qset = Q(admin_group__exact=adminGroups[0])
if len(adminGroups) > 1:
for group in adminGroups[1:]:
qset = qset | Q(admin_group__exact=group)
if (Region.objects.filter(qset)).exists():
userIsAdmin = True
return userIsAdmin
def isAdminForRegion(regionName, adminGroups):
userIsAdminOfRegion = False
if len(adminGroups) < 1:
return userIsAdminOfRegion
try:
#regionGroup = Region.objects.get(name=regionName).values_list('admin_group__name')
regionObj = Region.objects.get(name=regionName)
regiongroup = regionObj.admin_group.name
if regiongroup in adminGroups:
userIsAdminOfRegion = True
else:
userIsAdminOfRegion = False
except Region.DoesNotExist:
return userIsAdminOfRegion
return userIsAdminOfRegion
def isUserAllowedToUpdateOrDeleteRegion(regionName,groupsList):
userIsSuperUser = isSuperUser(groupsList)
if userIsSuperUser:
return True
else:
userIsAdmin = isAdminForRegion(regionName, groupsList)
return userIsAdmin
@transaction.commit_on_success
def addnew(request):
groups = request.META.get('ADFS_GROUP','')
groupsList = groups.split(';') ;
userIsSuperUser = isSuperUser(groupsList)
## Check user cloudman resource manager privileges
if not userIsSuperUser:
message = "You don't have cloudman resource manager privileges. Hence you are not authorized to add new Region";
html = "<html><body> %s.</body></html>" % message
return HttpResponse(html)
if request.method == 'POST':
form = RegionForm(request.POST)
### Check whether all the fields for creating a region are provided with non-empty values
if form.is_valid():
redirectURL = '/cloudman/message/?msg='
name = form.cleaned_data['name']
regionExists = checkNameIgnoreCase(name)
if regionExists:
msgAlreadyExists = 'Region ' + name + ' already exists. Hence Add Region Operation Stopped'
return HttpResponseRedirect(redirectURL + msgAlreadyExists)
description = form.cleaned_data['description']
admin_group = form.cleaned_data['admin_group']
comment = form.cleaned_data['comment']
## Check that name provided as admin_group exists in the EGroups TABLE
## If not, then check its existence in external egroup database through ldap
## If not present there also, then raise an alert or else add the group name to EGroups table also
egroup = None
try:
egroup = Egroups.objects.get(name=admin_group)
except Egroups.DoesNotExist:
if not (checkEGroup(admin_group)):
errorMessage = 'Selected Admin E-Group ' + admin_group + ' does not exists'
return HttpResponseRedirect(redirectURL + errorMessage)
egroup = Egroups(name=admin_group)
egroup.save()
## Create the Region with all the required values
regionObj = Region(name=name, description=description, admin_group=egroup)
regionObj.save()
regionObj = Region.objects.get(name=name)
if addLog(request,name,comment,regionObj,None,'region','add',True):
transaction.commit()
## Return the Success message
msgSuccess = 'New region ' + name + ' added successfully'
else:
transaction.rollback()
msgSuccess = 'Error in creating New region ' + name
html = "<html><HEAD><meta HTTP-EQUIV=\"REFRESH\" content=\"4; url=/cloudman/region/list/\"></HEAD><body> %s.</body></html>" % msgSuccess
return HttpResponse(html)
else:
## If not POST operation, then return the Region Creation Form
form = RegionForm()
return render_to_response('region/addnew.html',locals(),context_instance=RequestContext(request))
def regionAllZoneInfo(request):
mimetype = 'application/javascript'
regionName = request.REQUEST.get("name", "")
jsondata = []
try:
zoneInfoList = Zone.objects.filter(region__name = regionName).all()
for zoneInfo in zoneInfoList:
jsondata.append({'hepspec':displayNone(zoneInfo.hepspecs),'memory':displayNone(zoneInfo.memory),'storage':displayNone(zoneInfo.storage),'bandwidth':displayNone(zoneInfo.bandwidth),'zonename':zoneInfo.name,'description':zoneInfo.description,'memoryovercommit':zoneInfo.memory_overcommit,'hepspecovercommit':zoneInfo.hepspec_overcommit})
except Exception:
printStackTrace()
data = simplejson.dumps(jsondata)
return HttpResponse(data,mimetype)
def listall(request):
groups = request.META.get('ADFS_GROUP','')
groupsList = groups.split(';') ;
userIsSuperUser = isSuperUser(groupsList)
zoneInfo = Zone.objects.values('region_id').annotate(hepspec=Sum('hepspecs'),memory=Sum('memory'),storage=Sum('storage'),bandwidth=Sum('bandwidth'))
region_capacity = {}
for item in zoneInfo:
region_capacity[item['region_id']] = {'hepspec':item['hepspec'],'memory':item['memory'],'storage':item['storage'],'bandwidth':item['bandwidth']}
regionInfoList = []
regionsList = Region.objects.all().order_by('name').select_related('admin_group__name')
for region in regionsList:
if region.id in region_capacity:
regionInfoList.append({'name':region.name,'egroup':region.admin_group.name,'description':region.description,'capacity':region_capacity[region.id]})
else:
regionInfoList.append({'name':region.name,'egroup':region.admin_group.name,'description':region.description,'capacity':{'hepspec':None,'memory':None,'storage':None,'bandwidth':None}})
return render_to_response('region/listall.html',locals(),context_instance=RequestContext(request))
def getdetails(request):
regionName = request.REQUEST.get("name", "")
regionInfo = None
redirectURL = '/cloudman/message/?msg='
## Get the region object
try:
regionInfo = Region.objects.select_related('admin_group').get(name=regionName)
except Region.DoesNotExist:
errorMessage = 'Region Name ' + regionName + ' does not exists'
return HttpResponseRedirect(redirectURL + errorMessage)
## Get the zones information located in this region
zonesInfo = Zone.objects.filter(region__name = regionName).order_by('name')
## Get the allowed resource types information for all the zones present in this region
allowedResourceTypesList = ZoneAllowedResourceType.objects.select_related('resource_type','zone').filter(zone__region__name=regionName).order_by('resource_type__name')
object_id = regionInfo.id
changeLogList = getLog('region',regionName,object_id,None)
return render_to_response('region/getdetails.html',locals(),context_instance=RequestContext(request))
@transaction.commit_on_success
def delete(request):
regionName = request.REQUEST.get("name", "")
comment = request.REQUEST.get("comment", "deleting")
redirectURL = '/cloudman/message/?msg='
groups = request.META.get('ADFS_GROUP','')
groupsList = groups.split(';') ;
## Update is allowed if user has either cloudman resource manager privileges or
## belongs to the egroup selected as administrative e-group for this region
if not isUserAllowedToUpdateOrDeleteRegion(regionName,groupsList):
message = "You neither have membership of administrative group of region " + regionName + " nor possess Cloudman Resource Manager Privileges. Hence you are not authorized to Delete Region"
html = "<html><body> %s.</body></html>" % message
return HttpResponse(html)
## Get the Region Object
regionObject = None
try:
regionObject = Region.objects.get(name=regionName)
except Region.DoesNotExist:
failureMessage = "Region with Name " + regionName + " could not be found"
return HttpResponseRedirect(redirectURL+failureMessage)
## Check whether any zones are defined for this region
zoneNames = Zone.objects.filter(region__name__iexact = regionName).values_list('name', flat=True).order_by('name')
finalMessage = ''
zoneNamesList = list(zoneNames)
## If zones are defined, then alert the user and do not delete the region
if len(zoneNamesList) > 0:
finalMessage = finalMessage + "Zone Names: " + (', '.join(zoneNamesList)) + "<br/>"
if not finalMessage == '':
finalMessage = "Region with Name " + regionName + " Could not be deleted because it is being used in " + "<br/>" + finalMessage
html = "<html><body> %s</body></html>" % finalMessage
return HttpResponse(html)
## If no zones, then delete the region and return a success message to the user
status = addLog(request,regionName,comment,regionObject,None,'region','delete',False)
regionObject.delete()
if status:
transaction.commit()
message = "Region with Name " + regionName + " deleted successfully "
else:
transaction.rollback()
message = "Error in deleting Region with Name " + regionName
html = "<html><HEAD><meta HTTP-EQUIV=\"REFRESH\" content=\"4; url=/cloudman/region/list/\"></HEAD><body> %s.</body></html>" % message
return HttpResponse(html)
def getstats(request):
## Provide the region resource statistics in json format
## If the request is not an ajax call, then return status 400 - BAD REQUEST
## First, the entire region hepspecs information is calculated (how much total and how much used)
## Then, for each zone in this region, individual hepspecs stats will be published (how much total and how much used)
#if request.is_ajax():
format = 'json'
mimetype = 'application/javascript'
regionName = request.GET['name']
## Step 1: Entire Region Stats
## Get the sum of Hepspec from all the zones in this region
## If all the zones have a NULL value, then assign the sum to 0
totalRegionHepSpecs = sum([zn.hepspectotal() for zn in Zone.objects.filter(region__name=regionName)])
if (totalRegionHepSpecs == None):
totalRegionHepSpecs = 0
## Get the hepspec of each top level allocation done using resources from this region
topLevelAllocationByZoneObjects = TopLevelAllocationByZone.objects.filter(zone__region__name=regionName).values('hepspec')
## calculate the total hepspec by adding all the top level allocation hepspec from this region
totalAllocHepSpecs = 0.0
for oneObject in topLevelAllocationByZoneObjects:
if (oneObject['hepspec'] != None):
totalAllocHepSpecs = totalAllocHepSpecs + oneObject['hepspec']
## Frame a json object with the total and used hepspec values for this region
regionStatsInfo = [{"pk": regionName, "model": "cloudman.region", "fields": {"tothepspecs": totalRegionHepSpecs, "usedhepspecs": totalAllocHepSpecs}}]
## Step 2: Stats for each Zone in this region
## Get the names of all the zones in this region
zonesList = Zone.objects.filter(region__name=regionName).values('name').order_by('name')
for zoneInfo in zonesList:
zoneName = zoneInfo['name']
## Calculate the hepspec of the zone (remember hepspec_overcommit and so use function hepspectotal())
totalZoneHepSpecs = Zone.objects.get(name=zoneName, region__name=regionName)
if totalZoneHepSpecs.hepspecs == None:
totalZoneHepSpecs.hepspecs = 0
else:
totalZoneHepSpecs.hepspecs = totalZoneHepSpecs.hepspectotal()
## Now get the total hepspec already allocated from this zone in the top level allocations
topLevelAllocationByZoneObjects = TopLevelAllocationByZone.objects.filter(zone__name=zoneName, zone__region__name=regionName).values('hepspec')
totalAllocHepSpecs = 0.0
for oneObject in topLevelAllocationByZoneObjects:
if (oneObject['hepspec'] != None):
totalAllocHepSpecs = totalAllocHepSpecs + oneObject['hepspec']
## Frame a json object for each zone with their hepspec stats
regionStatsInfo.append({"pk": zoneName, "model": "cloudman.zone", "fields": {"tothepspecs": totalZoneHepSpecs.hepspecs, "usedhepspecs": totalAllocHepSpecs}})
## finally dump the json objects and send that as a response to the ajax query
data = simplejson.dumps(regionStatsInfo)
return HttpResponse(data,mimetype)
#else:
# return HttpResponse(status=400)
@transaction.commit_on_success
def update(request):
regionName = request.REQUEST.get("name", "")
redirectURL = '/cloudman/message/?msg='
groups = request.META.get('ADFS_GROUP','')
groupsList = groups.split(';') ;
## Update is allowed if user has either cloudman resource manager privileges or
## belongs to the egroup selected as administrative e-group for this region
if not isUserAllowedToUpdateOrDeleteRegion(regionName,groupsList):
message = "You neither have membership of administrative group of region " + regionName + " nor possess Cloudman Resource Manager Privileges. Hence you are not authorized to Edit Region";
html = "<html><body> %s.</body></html>" % message
return HttpResponse(html)
## Get the region Object
regionObject = None
try:
regionObject = Region.objects.get(name=regionName)
except Region.DoesNotExist:
failureMessage = "Region with Name " + regionName + " could not be found"
return HttpResponseRedirect(redirectURL+failureMessage)
oldRegionInfo = getRegionInfo(regionObject)
## If the current request is due to form submission then do update
## or else return to template to display the update form
if request.method == 'POST':
## Existing values
currName = regionObject.name
currDescription = regionObject.description
currAdmin_group = regionObject.admin_group
## New Values
newName = request.POST['name']
newDescription = request.POST['description']
newAdmin_group = request.POST['admin_group']
comment = request.REQUEST.get("comment", "")
try:
validate_name(newName)
validate_descr(newDescription)
validate_name(newAdmin_group)
validate_comment(comment)
except ValidationError as e:
message ='Edit Region Form '+', '.join(e.messages)
html = "<html><HEAD><meta HTTP-EQUIV=\"REFRESH\" content=\"4; url=/cloudman/region/list/\"></HEAD><body> %s.</body></html>" % message
return HttpResponse(html)
## Check for atleast one field value change
if ( (currName == newName) and (currDescription == newDescription) and (currAdmin_group == newAdmin_group) ):
message = 'No New Value provided for any field to perform Edit Operation. Hence Edit Region ' + regionName + ' aborted'
return HttpResponseRedirect(redirectURL + message)
## Assign the new name to the region if it is changed
if (currName != newName):
if (newName == ''):
errorMsg = 'Region name field cannot be left blank. So Edit Region operation stopped'
return HttpResponseRedirect(redirectURL + errorMsg)
regionExists = checkNameIgnoreCase(newName)
if regionExists:
msgAlreadyExists = 'Region ' + newName + ' already exists. Hence Edit Region Operation Stopped'
return HttpResponseRedirect(redirectURL + msgAlreadyExists);
regionObject.name = newName
## Assign the new description if it is changed
if (currDescription != newDescription):
regionObject.description = newDescription
## If admin egroup is changed, then first check its existence in the local egroups table
## If not present, then check its existence in the external egroup database through ldap
## If checked using external database and found, then add the egroup to the local egroups table
## If not found both local and external, then return an error to the user
egroup = None
if (currAdmin_group != newAdmin_group):
if (newAdmin_group == ''):
errorMsg = 'Admin E-Group field cannot be left blank. So Edit Region operation stopped'
return HttpResponseRedirect(redirectURL + errorMsg)
try:
egroup = Egroups.objects.get(name=newAdmin_group)
except Egroups.DoesNotExist:
if not (checkEGroup(newAdmin_group)):
errorMessage = 'Selected Admin E-Group ' + newAdmin_group + ' does not exists'
return HttpResponseRedirect(redirectURL + errorMessage)
egroup = Egroups(name=newAdmin_group)
egroup.save()
regionObject.admin_group = egroup
## Save the new values and return success message to the user
regionObject.save()
newRegionInfo = getRegionInfo(regionObject)
objectId = regionObject.id
if addUpdateLog(request,newName,objectId,comment,oldRegionInfo,newRegionInfo,'region',True):
transaction.commit()
message = 'Region ' + regionName + ' Successfully Updated'
else:
message = 'Error in Updating Region ' + regionName
transaction.rollback()
html = "<html><HEAD><meta HTTP-EQUIV=\"REFRESH\" content=\"4; url=/cloudman/region/list/\"></HEAD><body> %s.</body></html>" % message
return HttpResponse(html)
else:
form=RegionForm();
return render_to_response('region/update.html',locals(),context_instance=RequestContext(request))
def listonlynames(request):
regionsNameList = Region.objects.all().values('name').order_by('name')
return render_to_response('region/listonlynames.html',locals(),context_instance=RequestContext(request))
## The following functions as of now not used..they are used to draw pie charts with matplotlib python library
def getRegionHepSpecsPieChart(request):
regionName = request.REQUEST.get("regionname", "")
#totalRegionsHepSpecs = Zone.objects.filter(region__name=regionName).aggregate(total_hepSpecs=Sum('hepspecs'))
totalRegionHepSpecs = sum([zn.hepspectotal() for zn in Zone.objects.filter(region__name=regionName)])
if (totalRegionHepSpecs == None):
totalRegionHepSpecs = 0
topLevelAllocationByZoneObjects = TopLevelAllocationByZone.objects.filter(zone__region__name=regionName).values('hepspec_fraction', 'zone__hepspecs')
totalAllocHepSpecs = 0.0
for oneObject in topLevelAllocationByZoneObjects:
if ( (oneObject['hepspec_fraction'] != None) and (oneObject['zone__hepspecs'] != None) ):
totalAllocHepSpecs = totalAllocHepSpecs + ((oneObject['hepspec_fraction'] * oneObject['zone__hepspecs'])/100)
fig = Figure(figsize=(4,4))
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111)
labels = []
fracs = []
allotedPer = 0
if totalRegionHepSpecs > 0:
allotedPer = (totalAllocHepSpecs/totalRegionHepSpecs) * 100
freePer = 100 - allotedPer
labels.append('Free')
fracs.append(freePer)
if (allotedPer > 0):
labels.append('Allocated')
fracs.append(allotedPer)
patches, texts, autotexts = ax.pie(fracs, explode=None, labels=labels, colors=('g', 'r', 'c', 'm', 'y', 'k', 'w', 'b'), autopct='%.2f%%', pctdistance=0.4, labeldistance=1.1, shadow=False)
ax.set_title('\n Hepspec Allocation - Region - ' + regionName + '\n Total: ' + str(round(totalRegionHepSpecs, 3)), fontdict=None, verticalalignment='bottom')
ax.grid(True)
#fig.canvas.mpl_connect('button_press_event', onclick)
response=HttpResponse(content_type='image/png')
canvas.print_png(response)
canvas.draw()
return response
def getAllRegionHepSpecsPieChart(request):
#totalRegionsHepSpecs = Zone.objects.all().aggregate(total_hepSpecs=Sum('hepspecs'))
totalRegionHepSpecs = sum([zn.hepspectotal() for zn in Zone.objects.all()])
if (totalRegionHepSpecs == None):
totalRegionHepSpecs = 0
#totalAllocatedHepSpecs = TopLevelAllocationByZone.objects.all().extra(select = {'total': 'SUM((hepspec_fraction * zone.hepspecs)/100)'}).values('hepspec_fraction', 'zone__hepspecs', 'total')
topLevelAllocationObjects = TopLevelAllocation.objects.all().values('hepspec')
totalAllocHepSpecs = 0.0
for oneObject in topLevelAllocationObjects:
if (oneObject['hepspec'] != None):
totalAllocHepSpecs = totalAllocHepSpecs + oneObject['hepspec']
fig = Figure(figsize=(4,4))
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111)
labels = []
fracs = []
allotedPer = 0
if totalRegionHepSpecs > 0:
allotedPer = (totalAllocHepSpecs/totalRegionHepSpecs) * 100
freePer = 100 - allotedPer
labels.append('Free')
fracs.append(freePer)
if allotedPer > 0:
labels.append('Allocated')
fracs.append(allotedPer)
patches, texts, autotexts = ax.pie(fracs, explode=None, labels=labels, colors=('g', 'r', 'c', 'm', 'y', 'k', 'w', 'b'), autopct='%.2f%%', pctdistance=0.4, labeldistance=1.1, shadow=False)
ax.set_title('\n Total Hepspec Allocation - All Regions \n Total: ' + str(round(totalRegionHepSpecs, 3)), fontdict=None, verticalalignment='bottom')
ax.grid(True)
#fig.canvas.mpl_connect('button_press_event', onclick)
response=HttpResponse(content_type='image/png')
canvas.print_png(response)
canvas.draw()
return response
def getZoneHepSpecsPieChart(request):
regionName = request.REQUEST.get("regionname", "")
zoneName = request.REQUEST.get("zonename", "")
totalZoneHepSpecs = Zone.objects.get(name=zoneName, region__name=regionName)
if totalZoneHepSpecs.hepspecs == None:
totalZoneHepSpecs.hepspecs = 0
else:
totalZoneHepSpecs.hepspecs = totalZoneHepSpecs.hepspectotal()
topLevelAllocationByZoneObjects = TopLevelAllocationByZone.objects.filter(zone__name=zoneName, zone__region__name=regionName).values('hepspec_fraction', 'zone__hepspecs')
totalAllocHepSpecs = 0.0
for oneObject in topLevelAllocationByZoneObjects:
if ( (oneObject['hepspec_fraction'] != None) and (oneObject['zone__hepspecs'] != None) ):
totalAllocHepSpecs = totalAllocHepSpecs + ((oneObject['hepspec_fraction'] * oneObject['zone__hepspecs'])/100)
fig = Figure(figsize=(4,4))
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111)
labels = []
fracs = []
allotedPer = 0
if (totalZoneHepSpecs.hepspecs) > 0:
allotedPer = (totalAllocHepSpecs/totalZoneHepSpecs.hepspecs) * 100
freePer = 100 - allotedPer
labels.append('Free')
fracs.append(freePer)
if (allotedPer > 0):
labels.append('Allocated')
fracs.append(allotedPer)
patches, texts, autotexts = ax.pie(fracs, explode=None, labels=labels, colors=('g', 'r', 'c', 'm', 'y', 'k', 'w', 'b'), autopct='%.2f%%', pctdistance=0.4, labeldistance=1.1, shadow=False)
ax.set_title('\n Hepspec Allocation - Zone - ' + zoneName + '\n Region: ' + regionName + '(Total: ' + str(round(totalZoneHepSpecs.hepspecs, 3)) + ')', fontdict=None, verticalalignment='bottom')
ax.grid(True)
response=HttpResponse(content_type='image/png')
canvas.print_png(response)
canvas.draw()
return response
| {
"content_hash": "c1771e1ba52657435ae7b4a1da3798ff",
"timestamp": "",
"source": "github",
"line_count": 478,
"max_line_length": 338,
"avg_line_length": 47.50209205020921,
"alnum_prop": 0.7395842508588039,
"repo_name": "cernops/CloudMan",
"id": "2847ad67479f1347e4e6b44fb667b1fd944e2efd",
"size": "22706",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudman/cloudman/regionQueries.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "31999"
},
{
"name": "JavaScript",
"bytes": "101757"
},
{
"name": "Python",
"bytes": "591308"
},
{
"name": "Shell",
"bytes": "423"
},
{
"name": "TeX",
"bytes": "95737"
}
],
"symlink_target": ""
} |
from dal import autocomplete
from django.urls import re_path as url
from .models import TModel
class LinkedDataView(autocomplete.Select2QuerySetView):
def get_queryset(self):
qs = super(LinkedDataView, self).get_queryset()
possessor = self.forwarded.get('possessor', None)
secret = self.forwarded.get('secret', None)
if secret != 42:
return qs.none()
if possessor:
return qs.filter(owner_id=possessor)
return qs
urlpatterns = [
url(
'^linked_data/$',
LinkedDataView.as_view(model=TModel),
name='linked_data_rf'
),
]
| {
"content_hash": "e8c0905166bc998eeaafd8d72e78a4ca",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 57,
"avg_line_length": 21.133333333333333,
"alnum_prop": 0.6214511041009464,
"repo_name": "yourlabs/django-autocomplete-light",
"id": "8b839af10f4b074aadd6a8052a2fc5ab79548679",
"size": "634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_project/rename_forward/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11205"
},
{
"name": "HTML",
"bytes": "5709"
},
{
"name": "JavaScript",
"bytes": "27379"
},
{
"name": "Python",
"bytes": "210537"
},
{
"name": "Shell",
"bytes": "1950"
}
],
"symlink_target": ""
} |
'''
@author: xiaowing
@license: Apache Lincese 2.0
'''
import psycopg2
import sys
sys.path.append('...')
import settings
class DbUtil():
def __init__(self):
self.conn = None
self.config = settings.TinyssoSettings()
def OpenDbConnection(self):
try:
self.conn = psycopg2.connect(host = self.config.getPgHost(),
port = self.config.getPgPort(),
database = self.config.getPgDatabase(),
user = self.config.getPgUser(),
password = self.config.getPgUserPassword())
self.conn.autocommit = True
return self.conn
except psycopg2.DatabaseError as dberror:
# TODO: record the error into log
return None
def CloseDbConnection(self):
if self.conn != None:
if isinstance(self.conn, psycopg2.extensions.connection):
self.conn.close()
class SSOUserAccess(DbUtil):
def __init__(self):
super().__init__()
def GetUserMail(self, username, password):
if self.conn != None:
if isinstance(self.conn, psycopg2.extensions.connection):
cur = self.conn.cursor()
cur.execute("SELECT user_mail FROM m_sch.m_user_auth WHERE user_id=%(user)s AND user_password=%(pass)s AND user_actived=true;",
{'user': username, 'pass': password})
res = cur.fetchone()
cur.close()
return res[0]
def AddUser(self, username, password, email):
if self.conn != None:
if isinstance(self.conn, psycopg2.extensions.connection):
try:
cur = self.conn.cursor()
cur.execute("INSERT INTO m_sch.m_user_auth (user_id, user_password, user_mail, user_actived) VALUES (%(uname)s, %(pwd)s, %(email)s, true);",
{'uname': username, 'pwd': password, 'email': email})
cur.close()
except psycopg2.DatabaseError as dberror:
raise dberror | {
"content_hash": "73bb58b4f5dbbb463824356d23107dc4",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 160,
"avg_line_length": 38.53448275862069,
"alnum_prop": 0.516331096196868,
"repo_name": "xiaowing/tinysso",
"id": "ccc7724cb43549f5c1a1c2aa8ffe8b68784672e0",
"size": "2235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/SsoEntity/db/DBAccess.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "529"
},
{
"name": "C#",
"bytes": "12233"
},
{
"name": "HTML",
"bytes": "4237"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "28601"
}
],
"symlink_target": ""
} |
import os
import signal
import subprocess
import logging
import socket
import time
import redis
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
redis_ver = '2.6.13'
redis_bdir = '/tmp/cache-' + os.environ['USER'] + '-systemless_test'
redis_url = redis_bdir + '/redis-'+redis_ver+'.tar.gz'
redis_exe = redis_bdir + '/bin/redis-server'
def install_redis():
if not os.path.exists(redis_bdir):
output,_ = call_command_("mkdir " + redis_bdir)
if not os.path.exists(redis_url):
process = subprocess.Popen(['wget', '-P', redis_bdir,
'https://redis.googlecode.com/files/redis-'\
+ redis_ver + '.tar.gz'],
cwd=redis_bdir)
process.wait()
if process.returncode is not 0:
raise SystemError('wget '+redis_url)
if not os.path.exists(redis_bdir + '/redis-'+redis_ver):
process = subprocess.Popen(['tar', 'xzvf', redis_url],
cwd=redis_bdir)
process.wait()
if process.returncode is not 0:
raise SystemError('untar '+redis_url)
if not os.path.exists(redis_exe):
process = subprocess.Popen(['make', 'PREFIX=' + redis_bdir, 'install'],
cwd=redis_bdir + '/redis-'+redis_ver)
process.wait()
if process.returncode is not 0:
raise SystemError('install '+redis_url)
def get_redis_path():
if not os.path.exists(redis_exe):
install_redis()
return redis_exe
def redis_version():
'''
Determine redis-server version
'''
return 2.6
'''
command = "redis-server --version"
logging.info('redis_version call 1')
process = subprocess.Popen(command.split(' '), stdout=subprocess.PIPE)
logging.info('redis_version call 2')
output, _ = process.communicate()
if "v=2.6" in output[0]:
return 2.6
else:
return 2.4
'''
def start_redis(port, password=None):
'''
Client uses this function to start an instance of redis
Arguments:
cport : An unused TCP port for redis to use as the client port
'''
exe = get_redis_path()
version = redis_version()
if version == 2.6:
redis_conf = "redis.26.conf"
else:
redis_conf = "redis.24.conf"
conftemplate = os.path.dirname(os.path.abspath(__file__)) + "/" +\
redis_conf
redisbase = "/tmp/redis.%s.%d/" % (os.getenv('USER', 'None'), port)
output, _ = call_command_("rm -rf " + redisbase)
output, _ = call_command_("mkdir " + redisbase)
output, _ = call_command_("mkdir " + redisbase + "cache")
logging.info('Redis Port %d' % port)
output, _ = call_command_("cp " + conftemplate + " " + redisbase +
redis_conf)
replace_string_(redisbase + redis_conf,
[("/var/run/redis_6379.pid", redisbase + "pid"),
("port 6379", "port " + str(port)),
("/var/log/redis_6379.log", redisbase + "log"),
("/var/lib/redis/6379", redisbase + "cache")])
if password:
replace_string_(redisbase + redis_conf,[("# requirepass foobared","requirepass " + password)])
command = exe + " " + redisbase + redis_conf
subprocess.Popen(command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
r = redis.StrictRedis(host='localhost', port=port, db=0, password=password)
done = False
start_wait = os.getenv('CONTRIAL_ANALYTICS_TEST_MAX_START_WAIT_TIME', 15)
cnt = 0
while not done:
try:
r.ping()
except:
cnt += 1
if cnt > start_wait:
logging.info('Redis Failed. Logs below: ')
with open(redisbase + "log", 'r') as fin:
logging.info(fin.read())
return False
logging.info('Redis not ready')
time.sleep(1)
else:
done = True
logging.info('Redis ready')
return True
def stop_redis(port, password=None):
'''
Client uses this function to stop an instance of redis
This will only work for redis instances that were started by this module
Arguments:
cport : The Client Port for the instance of redis to be stopped
'''
r = redis.StrictRedis(host='localhost', port=port, db=0, password=password)
r.shutdown()
del r
redisbase = "/tmp/redis.%s.%d/" % (os.getenv('USER', 'None'), port)
output, _ = call_command_("rm -rf " + redisbase)
def replace_string_(filePath, findreplace):
"replaces all findStr by repStr in file filePath"
print filePath
tempName = filePath + '~~~'
input = open(filePath)
output = open(tempName, 'w')
s = input.read()
for couple in findreplace:
outtext = s.replace(couple[0], couple[1])
s = outtext
output.write(outtext)
output.close()
input.close()
os.rename(tempName, filePath)
def call_command_(command):
process = subprocess.Popen(command.split(' '),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
return process.communicate()
if __name__ == "__main__":
cs = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
cs.bind(("", 0))
cport = cs.getsockname()[1]
cs.close()
start_redis(cport)
| {
"content_hash": "5084cf0efe5d5c137528da1749702732",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 101,
"avg_line_length": 34.18012422360248,
"alnum_prop": 0.5636925313465383,
"repo_name": "sajuptpm/contrail-controller",
"id": "d05604c0d8217774936fbddca02197567df2bc32",
"size": "5730",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/analytics/test/utils/mockredis/mockredis/mockredis.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "51767"
},
{
"name": "C++",
"bytes": "19050770"
},
{
"name": "CSS",
"bytes": "531"
},
{
"name": "Groff",
"bytes": "36777"
},
{
"name": "HTML",
"bytes": "519766"
},
{
"name": "Java",
"bytes": "171966"
},
{
"name": "LLVM",
"bytes": "2937"
},
{
"name": "Lua",
"bytes": "5819"
},
{
"name": "Makefile",
"bytes": "12449"
},
{
"name": "Protocol Buffer",
"bytes": "6129"
},
{
"name": "Python",
"bytes": "4813021"
},
{
"name": "Shell",
"bytes": "81402"
},
{
"name": "Thrift",
"bytes": "40763"
},
{
"name": "Yacc",
"bytes": "7737"
}
],
"symlink_target": ""
} |
from google.cloud import dialogflowcx_v3beta1
async def sample_validate_flow():
# Create a client
client = dialogflowcx_v3beta1.FlowsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3beta1.ValidateFlowRequest(
name="name_value",
)
# Make the request
response = await client.validate_flow(request=request)
# Handle the response
print(response)
# [END dialogflow_v3beta1_generated_Flows_ValidateFlow_async]
| {
"content_hash": "ed25723c88c0c2a89b30e7cb289d3573",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 61,
"avg_line_length": 25.105263157894736,
"alnum_prop": 0.7211740041928721,
"repo_name": "googleapis/python-dialogflow-cx",
"id": "7948018f4e09a8d3d1b28231299f2283d91c615b",
"size": "1866",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/dialogflow_v3beta1_generated_flows_validate_flow_async.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "10904903"
},
{
"name": "Shell",
"bytes": "30681"
}
],
"symlink_target": ""
} |
"""The multinic extension."""
import webob
from webob import exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'multinic')
class MultinicController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(MultinicController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def _get_instance(self, context, instance_id):
try:
return self.compute_api.get(context, instance_id)
except exception.InstanceNotFound:
msg = _("Server not found")
raise exc.HTTPNotFound(msg)
@wsgi.action('addFixedIp')
def _add_fixed_ip(self, req, id, body):
"""Adds an IP on a given network to an instance."""
context = req.environ['nova.context']
authorize(context)
# Validate the input entity
if 'networkId' not in body['addFixedIp']:
msg = _("Missing 'networkId' argument for addFixedIp")
raise exc.HTTPUnprocessableEntity(explanation=msg)
instance = self._get_instance(context, id)
network_id = body['addFixedIp']['networkId']
self.compute_api.add_fixed_ip(context, instance, network_id)
return webob.Response(status_int=202)
@wsgi.action('removeFixedIp')
def _remove_fixed_ip(self, req, id, body):
"""Removes an IP from an instance."""
context = req.environ['nova.context']
authorize(context)
# Validate the input entity
if 'address' not in body['removeFixedIp']:
msg = _("Missing 'address' argument for removeFixedIp")
raise exc.HTTPUnprocessableEntity(explanation=msg)
instance = self._get_instance(context, id)
address = body['removeFixedIp']['address']
try:
self.compute_api.remove_fixed_ip(context, instance, address)
except exception.FixedIpNotFoundForSpecificInstance:
LOG.exception(_("Unable to find address %r") % address)
raise exc.HTTPBadRequest()
return webob.Response(status_int=202)
# Note: The class name is as it has to be for this to be loaded as an
# extension--only first character capitalized.
class Multinic(extensions.ExtensionDescriptor):
"""Multiple network support"""
name = "Multinic"
alias = "NMN"
namespace = "http://docs.openstack.org/compute/ext/multinic/api/v1.1"
updated = "2011-06-09T00:00:00+00:00"
def get_controller_extensions(self):
controller = MultinicController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
| {
"content_hash": "7b33a9a8f1d5ef716e42ed8a630c821f",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 79,
"avg_line_length": 34.51851851851852,
"alnum_prop": 0.6570100143061517,
"repo_name": "eneabio/nova",
"id": "74f3d3bf2b3eb36bc0ad370a4a38aa0aca77f8b0",
"size": "3426",
"binary": false,
"copies": "5",
"ref": "refs/heads/stable/essex",
"path": "nova/api/openstack/compute/contrib/multinic.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "5803523"
},
{
"name": "Shell",
"bytes": "27008"
}
],
"symlink_target": ""
} |
"""Support for Rheem EcoNet thermostats."""
from pyeconet.equipment import EquipmentType
from pyeconet.equipment.thermostat import ThermostatFanMode, ThermostatOperationMode
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE
from . import EcoNetEntity
from .const import DOMAIN, EQUIPMENT
ECONET_STATE_TO_HA = {
ThermostatOperationMode.HEATING: HVAC_MODE_HEAT,
ThermostatOperationMode.COOLING: HVAC_MODE_COOL,
ThermostatOperationMode.OFF: HVAC_MODE_OFF,
ThermostatOperationMode.AUTO: HVAC_MODE_HEAT_COOL,
ThermostatOperationMode.FAN_ONLY: HVAC_MODE_FAN_ONLY,
}
HA_STATE_TO_ECONET = {value: key for key, value in ECONET_STATE_TO_HA.items()}
ECONET_FAN_STATE_TO_HA = {
ThermostatFanMode.AUTO: FAN_AUTO,
ThermostatFanMode.LOW: FAN_LOW,
ThermostatFanMode.MEDIUM: FAN_MEDIUM,
ThermostatFanMode.HIGH: FAN_HIGH,
}
HA_FAN_STATE_TO_ECONET = {value: key for key, value in ECONET_FAN_STATE_TO_HA.items()}
SUPPORT_FLAGS_THERMOSTAT = (
SUPPORT_TARGET_TEMPERATURE
| SUPPORT_TARGET_TEMPERATURE_RANGE
| SUPPORT_FAN_MODE
| SUPPORT_AUX_HEAT
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up EcoNet thermostat based on a config entry."""
equipment = hass.data[DOMAIN][EQUIPMENT][entry.entry_id]
async_add_entities(
[
EcoNetThermostat(thermostat)
for thermostat in equipment[EquipmentType.THERMOSTAT]
],
)
class EcoNetThermostat(EcoNetEntity, ClimateEntity):
"""Define a Econet thermostat."""
def __init__(self, thermostat):
"""Initialize."""
super().__init__(thermostat)
self._running = thermostat.running
self._poll = True
self.econet_state_to_ha = {}
self.ha_state_to_econet = {}
self.op_list = []
for mode in self._econet.modes:
if mode not in [
ThermostatOperationMode.UNKNOWN,
ThermostatOperationMode.EMERGENCY_HEAT,
]:
ha_mode = ECONET_STATE_TO_HA[mode]
self.op_list.append(ha_mode)
@property
def supported_features(self):
"""Return the list of supported features."""
if self._econet.supports_humidifier:
return SUPPORT_FLAGS_THERMOSTAT | SUPPORT_TARGET_HUMIDITY
return SUPPORT_FLAGS_THERMOSTAT
@property
def current_temperature(self):
"""Return the current temperature."""
return self._econet.set_point
@property
def current_humidity(self):
"""Return the current humidity."""
return self._econet.humidity
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
if self._econet.supports_humidifier:
return self._econet.dehumidifier_set_point
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_COOL:
return self._econet.cool_set_point
if self.hvac_mode == HVAC_MODE_HEAT:
return self._econet.heat_set_point
return None
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_HEAT_COOL:
return self._econet.heat_set_point
return None
@property
def target_temperature_high(self):
"""Return the higher bound temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_HEAT_COOL:
return self._econet.cool_set_point
return None
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if target_temp:
self._econet.set_set_point(target_temp, None, None)
if target_temp_low or target_temp_high:
self._econet.set_set_point(None, target_temp_high, target_temp_low)
@property
def is_aux_heat(self):
"""Return true if aux heater."""
return self._econet.mode == ThermostatOperationMode.EMERGENCY_HEAT
@property
def hvac_modes(self):
"""Return hvac operation ie. heat, cool mode.
Needs to be one of HVAC_MODE_*.
"""
return self.op_list
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool, mode.
Needs to be one of HVAC_MODE_*.
"""
econet_mode = self._econet.mode
_current_op = HVAC_MODE_OFF
if econet_mode is not None:
_current_op = ECONET_STATE_TO_HA[econet_mode]
return _current_op
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
hvac_mode_to_set = HA_STATE_TO_ECONET.get(hvac_mode)
if hvac_mode_to_set is None:
raise ValueError(f"{hvac_mode} is not a valid mode.")
self._econet.set_mode(hvac_mode_to_set)
def set_humidity(self, humidity: int):
"""Set new target humidity."""
self._econet.set_dehumidifier_set_point(humidity)
@property
def fan_mode(self):
"""Return the current fan mode."""
econet_fan_mode = self._econet.fan_mode
# Remove this after we figure out how to handle med lo and med hi
if econet_fan_mode in [ThermostatFanMode.MEDHI, ThermostatFanMode.MEDLO]:
econet_fan_mode = ThermostatFanMode.MEDIUM
_current_fan_mode = FAN_AUTO
if econet_fan_mode is not None:
_current_fan_mode = ECONET_FAN_STATE_TO_HA[econet_fan_mode]
return _current_fan_mode
@property
def fan_modes(self):
"""Return the fan modes."""
econet_fan_modes = self._econet.fan_modes
fan_list = []
for mode in econet_fan_modes:
# Remove the MEDLO MEDHI once we figure out how to handle it
if mode not in [
ThermostatFanMode.UNKNOWN,
ThermostatFanMode.MEDLO,
ThermostatFanMode.MEDHI,
]:
fan_list.append(ECONET_FAN_STATE_TO_HA[mode])
return fan_list
def set_fan_mode(self, fan_mode):
"""Set the fan mode."""
self._econet.set_fan_mode(HA_FAN_STATE_TO_ECONET[fan_mode])
def turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
self._econet.set_mode(ThermostatOperationMode.EMERGENCY_HEAT)
def turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
self._econet.set_mode(ThermostatOperationMode.HEATING)
@property
def min_temp(self):
"""Return the minimum temperature."""
return self._econet.set_point_limits[0]
@property
def max_temp(self):
"""Return the maximum temperature."""
return self._econet.set_point_limits[1]
@property
def min_humidity(self) -> int:
"""Return the minimum humidity."""
return self._econet.dehumidifier_set_point_limits[0]
@property
def max_humidity(self) -> int:
"""Return the maximum humidity."""
return self._econet.dehumidifier_set_point_limits[1]
| {
"content_hash": "eedbba2fb26593a68673cd2b3bd0bd5c",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 86,
"avg_line_length": 32.43037974683544,
"alnum_prop": 0.6307572209211554,
"repo_name": "lukas-hetzenecker/home-assistant",
"id": "24bac5164667f99a69bbc6accd6e4565f21c7b94",
"size": "7686",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/econet/climate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38023745"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
import mock
import time
import logging
from oslo_config import cfg
from networking_vsphere.agent import ovsvapp_agent
from networking_vsphere.common import constants as ovsvapp_const
from networking_vsphere.common import error
from networking_vsphere.tests import base
from networking_vsphere.tests.unit.drivers import fake_manager
from networking_vsphere.utils import resource_util
from neutron.agent.common import ovs_lib
from neutron.common import utils as n_utils
from neutron.plugins.common import constants as p_const
from neutron.plugins.common import utils as p_utils
from neutron.plugins.ml2.drivers.openvswitch.agent import ovs_neutron_agent as ovs_agent # noqa
NETWORK_ID = 'fake_net_id'
VNIC_ADDED = 'VNIC_ADDED'
FAKE_DEVICE_ID = 'fake_device_id'
FAKE_VM = 'fake_vm'
FAKE_HOST_1 = 'fake_host_1'
FAKE_HOST_2 = 'fake_host_2'
FAKE_CLUSTER_MOID = 'fake_cluster_moid'
FAKE_CLUSTER_1 = 'fake_cluster_1'
FAKE_CLUSTER_2 = 'fake_cluster_2'
FAKE_VCENTER = 'fake_vcenter'
FAKE_PORT_1 = 'fake_port_1'
FAKE_PORT_2 = 'fake_port_2'
FAKE_PORT_3 = 'fake_port_3'
FAKE_PORT_4 = 'fake_port_4'
MAC_ADDRESS = '01:02:03:04:05:06'
FAKE_CONTEXT = 'fake_context'
FAKE_SG = {'fake_sg': 'fake_sg_rule'}
FAKE_SG_RULE = {'security_group_source_groups': ['fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'security_group_id': 'fake_id'
}],
'sg_provider_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'source_port_range_min': 67,
'source_port_range_max': 67,
'port_range_min': 68,
'port_range_max': 68
}]
}
FAKE_SG_RULES = {FAKE_PORT_1: FAKE_SG_RULE}
FAKE_SG_RULES_MULTI_PORTS = {FAKE_PORT_1: FAKE_SG_RULE,
FAKE_PORT_2: FAKE_SG_RULE
}
FAKE_SG_RULES_MISSING = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress'
}]
}
}
FAKE_SG_RULES_PARTIAL = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'port_range_min': 22,
'port_range_max': 22
}]
}
}
DEVICE = {'id': FAKE_DEVICE_ID,
'cluster_id': FAKE_CLUSTER_1,
'host': FAKE_HOST_1,
'vcenter': FAKE_VCENTER}
class SampleEvent(object):
def __init__(self, type, host, cluster, srcobj, host_changed=False):
self.event_type = type
self.host_name = host
self.cluster_id = cluster
self.src_obj = srcobj
self.host_changed = host_changed
class VM(object):
def __init__(self, uuid, vnics):
self.uuid = uuid
self.vnics = vnics
class SamplePort(object):
def __init__(self, port_uuid, mac_address=None, pg_id=None):
self.port_uuid = port_uuid
self.mac_address = mac_address
self.pg_id = pg_id
class SamplePortUIDMac(object):
def __init__(self, port_uuid, mac_address):
self.port_uuid = port_uuid
self.mac_address = mac_address
class TestOVSvAppAgent(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.check_ovsvapp_agent_restart')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
def setUp(self, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges, mock_check_ovsvapp_agent_restart,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_setup_logging, mock_init):
super(TestOVSvAppAgent, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_check_ovsvapp_agent_restart.return_value = False
mock_get_port_ofport.return_value = 5
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def _build_port(self, port):
port = {'admin_state_up': False,
'id': port,
'device': DEVICE,
'network_id': NETWORK_ID,
'physical_network': 'physnet1',
'segmentation_id': '1001',
'lvid': 1,
'network_type': 'vlan',
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def _build_update_port(self, port):
port = {'admin_state_up': False,
'id': port,
'network_id': NETWORK_ID,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def test_setup_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn,\
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.setup_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
def test_setup_security_br(self):
cfg.CONF.set_override('security_bridge_mapping',
"br-fake:fake_if", 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
self.agent.int_br = mock.Mock()
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(ovs_lib, "OVSBridge") as mock_ovs_br, \
mock.patch.object(self.agent.sec_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6):
self.agent.setup_security_br()
self.assertTrue(mock_ovs_br.called)
self.assertTrue(self.agent.sec_br.add_patch_port.called)
self.assertTrue(mock_logger_info.called)
def test_recover_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn, \
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.recover_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
def test_recover_physical_bridges(self):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.LOG, 'error') as mock_logger_error, \
mock.patch.object(self.agent, "br_phys_cls") as mock_ovs_br, \
mock.patch.object(ovs_lib.BaseOVS,
"get_bridges",
return_value=['br-eth1']
), \
mock.patch.object(p_utils, 'get_interface_name'
) as mock_int_name, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6) as mock_get_ofport:
self.agent.recover_physical_bridges(self.agent.bridge_mappings)
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_logger_error.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(mock_get_ofport.called)
self.assertTrue(mock_int_name.called)
self.assertEqual(self.agent.int_ofports['physnet1'], 6)
def test_init_ovs_flows(self):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
self.agent.patch_sec_ofport = 5
self.agent.int_ofports = {'physnet1': 'br-eth1'}
self.agent.phys_ofports = {"physnet1": "br-eth1"}
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.br = mock.Mock()
with mock.patch.object(self.agent.int_br,
"delete_flows"
) as mock_int_br_delete_flows, \
mock.patch.object(self.agent,
"br_phys_cls") as mock_ovs_br, \
mock.patch.object(self.agent.int_br,
"add_flow") as mock_int_br_add_flow:
self.agent._init_ovs_flows(self.agent.bridge_mappings)
self.assertTrue(mock_int_br_delete_flows.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(br.delete_flows.called)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_int_br_add_flow.called)
def test_update_port_bindings(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port"])
) as mock_update_ports_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._update_port_bindings()
self.assertTrue(mock_update_ports_binding.called)
self.assertFalse(self.agent.ports_to_bind)
self.assertFalse(mock_log_exception.called)
def test_update_port_bindings_rpc_exception(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
side_effect=Exception()
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._update_port_bindings)
self.assertTrue(mock_update_port_binding.called)
self.assertTrue(mock_log_exception.called)
self.assertEqual(set(['fake_port']),
self.agent.ports_to_bind)
def test_update_port_bindings_partial(self):
self.agent.ports_to_bind.add("fake_port1")
self.agent.ports_to_bind.add("fake_port2")
self.agent.ports_to_bind.add("fake_port3")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port1",
"fake_port2"])
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'):
self.agent._update_port_bindings()
self.assertTrue(mock_update_port_binding.called)
self.assertEqual(set(["fake_port3"]),
self.agent.ports_to_bind)
def test_setup_ovs_bridges_vlan(self):
cfg.CONF.set_override('tenant_network_types',
"vlan", 'OVSVAPP')
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
with mock.patch.object(self.agent, 'setup_physical_bridges'
) as mock_phys_brs, \
mock.patch.object(self.agent, '_init_ovs_flows'
) as mock_init_ovs_flows:
self.agent.setup_ovs_bridges()
mock_phys_brs.assert_called_with(self.agent.bridge_mappings)
mock_init_ovs_flows.assert_called_with(self.agent.bridge_mappings)
def test_setup_ovs_bridges_vxlan(self):
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent, 'setup_tunnel_br'
) as mock_setup_tunnel_br, \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
mock_setup_tunnel_br.assert_called_with("br-tun")
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_setup_ovs_bridges_vxlan_ofport(self):
cfg.CONF.set_override('tenant_network_types',
"vxlan", 'OVSVAPP')
cfg.CONF.set_override('local_ip',
"10.10.10.10", 'OVSVAPP')
cfg.CONF.set_override('tunnel_bridge',
"br-tun", 'OVSVAPP')
self.agent.tun_br = mock.Mock()
self.agent.int_br = mock.Mock()
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.tun_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6), \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
self.assertTrue(self.agent.tun_br.add_patch_port.called)
self.assertEqual(self.agent.patch_tun_ofport, 6)
self.assertEqual(self.agent.patch_int_ofport, 5)
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_mitigate_ovs_restart_vlan(self):
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"
) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"
) as mock_sec_br, \
mock.patch.object(self.agent.sg_agent, "init_firewall"
) as mock_init_fw, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "_init_ovs_flows"
) as mock_init_flows, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertTrue(mock_phys_brs.called)
self.assertTrue(mock_sec_br.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertTrue(mock_init_fw.called)
self.assertTrue(mock_init_flows.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(2, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_vxlan(self):
self.agent.enable_tunneling = True
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"), \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"), \
mock.patch.object(self.agent.sg_agent, "init_firewall"
), \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "tunnel_sync"
) as mock_tun_sync, \
mock.patch.object(self.agent, "_init_ovs_flows"), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_setup_tunnel_br.called)
self.assertTrue(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_phys_brs.called)
self.assertTrue(mock_tun_sync.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(len(self.agent.devices_to_filter), 2)
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_exception(self):
self.agent.enable_tunneling = False
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set()
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, "info") as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br",
side_effect=Exception()) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.LOG, "exception"
) as mock_exception_log, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertFalse(mock_phys_brs.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_logger_info.called)
self.assertTrue(mock_exception_log.called)
self.assertFalse(self.agent.refresh_firewall_required)
self.assertEqual(0, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
self.assertFalse(monitor_info.called)
def _get_fake_port(self, port_id):
return {'id': port_id,
'port_id': port_id,
'mac_address': MAC_ADDRESS,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'security_groups': FAKE_SG,
'segmentation_id': 1232,
'lvid': 1,
'network_id': 'fake_network',
'device_id': FAKE_DEVICE_ID,
'admin_state_up': True,
'physical_network': 'physnet1',
'network_type': 'vlan'}
def _build_phys_brs(self, port):
phys_net = port['physical_network']
self.agent.phys_brs[phys_net] = {}
self.agent.phys_brs[phys_net]['eth_ofport'] = 5
br = self.agent.phys_brs[phys_net]['br'] = mock.Mock()
br.add_flows(port['segmentation_id'],
port['mac_address'],
5)
br.delete_flows(port['mac_address'],
port['segmentation_id'])
return br
def test_process_port(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
mock_prov_local_vlan.assert_called_with(fakeport)
self.assertTrue(br.add_flows.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_port_existing_network(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = {}
net_id = fakeport['network_id']
self.agent.local_vlan_map[net_id] = self._build_lvm(fakeport)
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
self.assertFalse(mock_prov_local_vlan.called)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_with_few_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(1, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_with_more_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678',
'2123', '2234', '2345', '2456', '2567', '2678',
'3123', '3234', '3345', '3456', '3567', '3678',
'4123', '4234', '4345', '4456', '4567', '4678',
'5123', '5234', '5345', '5456', '5567', '5678',
'6123', '6234', '6345', '6456', '6567', '6678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(2, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_sublist_single_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_provision_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_provision_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_multiple_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_single_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.local_vlan_map = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_uncached_devices_sublist_multiple_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
fakeport_2["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.local_vlan_map = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
def test_process_uncached_devices_sublist_stale_vm_port(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_3 = self._get_fake_port(FAKE_PORT_3)
self.agent.ports_dict = {}
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self._build_phys_brs(fakeport_3)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.ports_to_bind = set([FAKE_PORT_3, FAKE_PORT_4])
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
self.agent.vnic_info[FAKE_PORT_3] = fakeport_3
devices = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
self.agent.sg_agent.remove_devices_filter = mock.Mock()
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent.sg_agent,
'remove_devices_filter'
)as mock_remove_device_filter, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_3, self.agent.ports_to_bind)
self.assertIn(FAKE_PORT_4, self.agent.ports_to_bind)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_3, self.agent.vnic_info)
mock_remove_device_filter.assert_called_with(FAKE_PORT_3)
def test_update_firewall(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.vnic_info[FAKE_PORT_1] = {}
self.agent.vnic_info[FAKE_PORT_2] = {}
self.agent.refresh_firewall_required = True
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertFalse(self.agent.refresh_firewall_required)
self.assertFalse(self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1,
FAKE_PORT_2]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(2, monitor_info.call_count)
def test_update_firewall_get_ports_exception(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.refresh_firewall_required = True
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
side_effect=Exception()
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(set([FAKE_PORT_2]), self.agent.devices_to_filter)
self.assertNotIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(1, monitor_info.call_count)
def test_check_for_updates_no_updates(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_ovs_restarted(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=0) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_devices_to_filter(self):
self.agent.refresh_firewall_required = True
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall,\
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_firewall_refresh(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=True
) as mock_firewall_refresh,\
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_port_bindings(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_update_port_bindings.called)
def test_update_devices_up(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
ret_value = {'devices_up': [FAKE_PORT_1],
'failed_devices_up': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertFalse(self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_up_rpc_exception(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
side_effect=Exception()
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertTrue(log_exception.called)
def test_update_devices_up_partial(self):
self.agent.devices_up_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_up': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_up': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_down(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
ret_value = {'devices_down': [FAKE_PORT_1],
'failed_devices_down': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertFalse(self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_update_devices_down_rpc_exception(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
side_effect=Exception()
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_down_list)
self.assertTrue(log_exception.called)
def test_update_devices_down_partial(self):
self.agent.devices_down_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_down': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_down': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_report_state(self):
with mock.patch.object(self.agent.state_rpc,
"report_state") as report_st:
self.agent._report_state()
report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertNotIn("start_flag", self.agent.agent_state)
self.assertFalse(self.agent.use_call)
self.assertEqual(cfg.CONF.host,
self.agent.agent_state["host"])
def test_report_state_fail(self):
with mock.patch.object(self.agent.state_rpc,
"report_state",
side_effect=Exception()) as mock_report_st, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._report_state()
mock_report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertTrue(mock_log_exception.called)
def test_process_event_ignore_event(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(VNIC_ADDED, FAKE_HOST_1,
FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added") as mock_add_vm, \
mock.patch.object(self.agent,
"_notify_device_updated") as mock_update_vm, \
mock.patch.object(self.agent,
"_notify_device_deleted") as mock_del_vm, \
mock.patch.object(self.LOG, 'debug') as mock_log_debug:
self.agent.process_event(event)
self.assertFalse(mock_add_vm.called)
self.assertFalse(mock_update_vm.called)
self.assertFalse(mock_del_vm.called)
self.assertTrue(mock_log_debug.called)
def test_process_event_exception(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added",
side_effect=Exception()) as mock_add_vm, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'error') as mock_log_error:
self.agent.process_event(event)
self.assertTrue(mock_add_vm.called)
self.assertTrue(mock_log_error.called)
self.assertTrue(mock_log_exception.called)
def test_process_event_vm_create_nonics_non_host_non_cluster(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
def test_process_event_vm_create_nonics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
self.assertEqual(FAKE_CLUSTER_MOID, self.agent.cluster_moid)
def test_process_event_vm_create_nics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_other_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
def test_process_event_vm_create_nics_host(self):
self.agent.esx_hostname = FAKE_HOST_1
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_other_ports)
def test_process_event_vm_updated_nonhost(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm = VM(FAKE_VM, [vm_port1])
event = SampleEvent(ovsvapp_const.VM_UPDATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm, True)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.process_event(event)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
def test_process_event_vm_delete_hosted_vm_vlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self._build_lvm(port)
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
) as mock_post_del_vm, \
mock.patch.object(self.LOG, 'debug'), \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
self.assertTrue(br.delete_flows.called)
def test_process_event_vm_delete_hosted_vm_vxlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as (post_del_vm):
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(post_del_vm.called)
def test_process_event_vm_delete_non_hosted_vm(self):
self.agent.esx_hostname = FAKE_HOST_2
self.agent.cluster_other_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as mock_post_del_vm, \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid,
self.agent.cluster_other_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
def test_notify_device_added_with_hosted_vm(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_added_rpc_exception(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
side_effect=Exception()) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
)as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_added, vm, host)
self.assertTrue(mock_log_exception.called)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
def test_notify_device_added_with_retry(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=False) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertTrue(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_migration_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent._add_ports_to_host_ports([FAKE_PORT_1])
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, FAKE_HOST_2, True)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_host_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(br.add_flows.called)
def test_notify_device_updated_vlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_notify_device_updated_host_vlan_multiple_nic(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
br1 = self._build_phys_brs(port1)
br2 = self._build_phys_brs(port2)
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
self.agent.ports_dict[port2['id']] = self.agent._build_port_info(port2)
self._build_lvm(port1)
self._build_lvm(port2)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
self.assertEqual(1, mock_update_device_binding.call_count)
self.assertTrue(br1.add_flows.called)
self.assertTrue(br2.add_flows.called)
def _build_lvm(self, port):
net_id = port['network_id']
self.agent.local_vlan_map[net_id] = ovs_agent.LocalVLANMapping(
port['lvid'], port['network_type'],
port['physical_network'],
'1234')
def test_notify_device_updated_host_vxlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
port1 = self._build_port(FAKE_PORT_1)
port1['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_vxlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_map_port_to_common_model_vlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
network, port = self.agent._map_port_to_common_model(expected_port)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_map_port_to_common_model_vxlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
network, port = self.agent._map_port_to_common_model(expected_port, 1)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_device_create_cluster_mismatch(self):
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_2
with mock.patch.object(self.agent,
'_process_create_ports',
return_value=True) as mock_create_ports, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE)
self.assertTrue(mock_logger_debug.called)
self.assertFalse(mock_create_ports.called)
def test_device_create_non_hosted_vm(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.esx_hostname = FAKE_HOST_2
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
mock_add_devices_fn.assert_called_with(ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(self.agent.devices_up_list)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_partial_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_PARTIAL
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vxlan(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.local_vlan_map = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_vxlan_sg_rule_missing(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.local_vlan_map = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_create_port_exception(self):
ports = [self._build_port(FAKE_PORT_1)]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().create_port = mock.Mock(
side_effect=Exception())
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
), \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug, \
mock.patch.object(self.LOG, 'exception') as mock_log_excep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent.device_create,
FAKE_CONTEXT, device=DEVICE,
ports=ports, sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_log_excep.called)
def test_port_update_admin_state_up(self):
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports = set([port['id']])
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
updated_port = self._build_update_port(FAKE_PORT_1)
updated_port['admin_state_up'] = True
self.devices_up_list = []
neutron_port = {'port': updated_port,
'segmentation_id': port['segmentation_id']}
with mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.port_update(FAKE_CONTEXT, **neutron_port)
self.assertEqual(neutron_port['port']['admin_state_up'],
self.agent.ports_dict[port['id']].
admin_state_up)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertFalse(mock_log_exception.called)
self.assertTrue(mock_logger_debug.called)
def test_device_update_maintenance_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_shutdown_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = False
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertFalse(power_off.called)
self.assertFalse(maintenance_mode.called)
self.assertTrue(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_ovsvapp_alreadly_powered_off(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertTrue(log_exception.called)
def test_device_update_maintenance_mode_exception(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode",
side_effect=Exception()
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep') as time_sleep:
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=False)
self.assertTrue(log_exception.called)
self.assertTrue(time_sleep.called)
def test_enhanced_sg_provider_updated(self):
kwargs = {'network_id': NETWORK_ID}
with mock.patch.object(self.LOG, 'info') as log_info, \
mock.patch.object(self.agent.sg_agent, "sg_provider_updated"
) as mock_sg_provider_updated:
self.agent.enhanced_sg_provider_updated(FAKE_CONTEXT, **kwargs)
self.assertTrue(log_info.called)
mock_sg_provider_updated.assert_called_with(NETWORK_ID)
def test_device_create_hosted_vm_vlan_multiple_physnet(self):
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
port2['physical_network'] = "physnet2"
port2['segmentation_id'] = "2005"
port2['network_id'] = "fake_net2"
ports = [port1, port2]
self._build_phys_brs(port1)
self._build_phys_brs(port2)
self.agent.phys_ofports = {}
self.agent.phys_ofports[port1['physical_network']] = 4
self.agent.phys_ofports[port2['physical_network']] = 5
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.int_br = mock.Mock()
self.agent.patch_sec_ofport = 1
self.agent.int_ofports = {'physnet1': 2, 'physnet2': 3}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
), \
mock.patch.object(self.agent.int_br, 'provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MULTI_PORTS
), \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertEqual([FAKE_PORT_1, FAKE_PORT_2],
self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_prov_local_vlan.called)
mock_prov_local_vlan.assert_any_call(
port1['network_type'],
port1['lvid'],
port1['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet1'], None)
mock_prov_local_vlan.assert_any_call(
port2['network_type'],
port2['lvid'],
port2['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet2'], None)
| {
"content_hash": "e536f9f8f992532ed45e179a277d3da8",
"timestamp": "",
"source": "github",
"line_count": 1957,
"max_line_length": 96,
"avg_line_length": 52.59018906489525,
"alnum_prop": 0.5454192131676366,
"repo_name": "VTabolin/networking-vsphere",
"id": "4ffe6117a0061c7fbfd21e7071edd86f3be78ea7",
"size": "103582",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1115749"
},
{
"name": "Shell",
"bytes": "7264"
}
],
"symlink_target": ""
} |
"""SSD Keras-based MobilenetV1 FPN Feature Extractor."""
import tensorflow as tf
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.models import feature_map_generators
from object_detection.models.keras_models import mobilenet_v1
from object_detection.models.keras_models import model_utils
from object_detection.utils import ops
from object_detection.utils import shape_utils
# A modified config of mobilenet v1 that makes it more detection friendly.
def _create_modified_mobilenet_config():
conv_def_block_12 = model_utils.ConvDefs(conv_name='conv_pw_12', filters=512)
conv_def_block_13 = model_utils.ConvDefs(conv_name='conv_pw_13', filters=256)
return [conv_def_block_12, conv_def_block_13]
class SSDMobileNetV1FpnKerasFeatureExtractor(
ssd_meta_arch.SSDKerasFeatureExtractor):
"""SSD Feature Extractor using Keras-based MobilenetV1 FPN features."""
def __init__(self,
is_training,
depth_multiplier,
min_depth,
pad_to_multiple,
conv_hyperparams,
freeze_batchnorm,
inplace_batchnorm_update,
fpn_min_level=3,
fpn_max_level=7,
additional_layer_depth=256,
use_explicit_padding=False,
use_depthwise=False,
use_native_resize_op=False,
override_base_feature_extractor_hyperparams=False,
name=None):
"""SSD Keras based FPN feature extractor Mobilenet v1 architecture.
Args:
is_training: whether the network is in training mode.
depth_multiplier: float depth multiplier for feature extractor.
min_depth: minimum feature extractor depth.
pad_to_multiple: the nearest multiple to zero pad the input height and
width dimensions to.
conv_hyperparams: a `hyperparams_builder.KerasLayerHyperparams` object
containing convolution hyperparameters for the layers added on top of
the base feature extractor.
freeze_batchnorm: whether to freeze batch norm parameters during
training or not. When training with a small batch size (e.g. 1), it is
desirable to freeze batch norm update and use pretrained batch norm
params.
inplace_batchnorm_update: whether to update batch norm moving average
values inplace. When this is false train op must add a control
dependency on tf.graphkeys.UPDATE_OPS collection in order to update
batch norm statistics.
fpn_min_level: the highest resolution feature map to use in FPN. The valid
values are {2, 3, 4, 5} which map to MobileNet v1 layers
{Conv2d_3_pointwise, Conv2d_5_pointwise, Conv2d_11_pointwise,
Conv2d_13_pointwise}, respectively.
fpn_max_level: the smallest resolution feature map to construct or use in
FPN. FPN constructions uses features maps starting from fpn_min_level
upto the fpn_max_level. In the case that there are not enough feature
maps in the backbone network, additional feature maps are created by
applying stride 2 convolutions until we get the desired number of fpn
levels.
additional_layer_depth: additional feature map layer channel depth.
use_explicit_padding: Whether to use explicit padding when extracting
features. Default is False.
use_depthwise: whether to use depthwise convolutions. Default is False.
use_native_resize_op: Whether to use tf.image.nearest_neighbor_resize
to do upsampling in FPN. Default is false.
override_base_feature_extractor_hyperparams: Whether to override
hyperparameters of the base feature extractor with the one from
`conv_hyperparams`.
name: a string name scope to assign to the model. If 'None', Keras
will auto-generate one from the class name.
"""
super(SSDMobileNetV1FpnKerasFeatureExtractor, self).__init__(
is_training=is_training,
depth_multiplier=depth_multiplier,
min_depth=min_depth,
pad_to_multiple=pad_to_multiple,
conv_hyperparams=conv_hyperparams,
freeze_batchnorm=freeze_batchnorm,
inplace_batchnorm_update=inplace_batchnorm_update,
use_explicit_padding=use_explicit_padding,
use_depthwise=use_depthwise,
override_base_feature_extractor_hyperparams=
override_base_feature_extractor_hyperparams,
name=name)
self._fpn_min_level = fpn_min_level
self._fpn_max_level = fpn_max_level
self._additional_layer_depth = additional_layer_depth
self._conv_defs = None
if self._use_depthwise:
self._conv_defs = _create_modified_mobilenet_config()
self._use_native_resize_op = use_native_resize_op
self._feature_blocks = [
'Conv2d_3_pointwise', 'Conv2d_5_pointwise', 'Conv2d_11_pointwise',
'Conv2d_13_pointwise'
]
self._mobilenet_v1 = None
self._fpn_features_generator = None
self._coarse_feature_layers = []
def build(self, input_shape):
full_mobilenet_v1 = mobilenet_v1.mobilenet_v1(
batchnorm_training=(self._is_training and not self._freeze_batchnorm),
conv_hyperparams=(self._conv_hyperparams
if self._override_base_feature_extractor_hyperparams
else None),
weights=None,
use_explicit_padding=self._use_explicit_padding,
alpha=self._depth_multiplier,
min_depth=self._min_depth,
conv_defs=self._conv_defs,
include_top=False)
conv2d_3_pointwise = full_mobilenet_v1.get_layer(
name='conv_pw_3_relu').output
conv2d_5_pointwise = full_mobilenet_v1.get_layer(
name='conv_pw_5_relu').output
conv2d_11_pointwise = full_mobilenet_v1.get_layer(
name='conv_pw_11_relu').output
conv2d_13_pointwise = full_mobilenet_v1.get_layer(
name='conv_pw_13_relu').output
self._mobilenet_v1 = tf.keras.Model(
inputs=full_mobilenet_v1.inputs,
outputs=[conv2d_3_pointwise, conv2d_5_pointwise,
conv2d_11_pointwise, conv2d_13_pointwise]
)
# pylint:disable=g-long-lambda
self._depth_fn = lambda d: max(
int(d * self._depth_multiplier), self._min_depth)
self._base_fpn_max_level = min(self._fpn_max_level, 5)
self._num_levels = self._base_fpn_max_level + 1 - self._fpn_min_level
self._fpn_features_generator = (
feature_map_generators.KerasFpnTopDownFeatureMaps(
num_levels=self._num_levels,
depth=self._depth_fn(self._additional_layer_depth),
use_depthwise=self._use_depthwise,
use_explicit_padding=self._use_explicit_padding,
use_native_resize_op=self._use_native_resize_op,
is_training=self._is_training,
conv_hyperparams=self._conv_hyperparams,
freeze_batchnorm=self._freeze_batchnorm,
name='FeatureMaps'))
# Construct coarse feature layers
padding = 'VALID' if self._use_explicit_padding else 'SAME'
kernel_size = 3
stride = 2
for i in range(self._base_fpn_max_level + 1, self._fpn_max_level + 1):
coarse_feature_layers = []
if self._use_explicit_padding:
def fixed_padding(features, kernel_size=kernel_size):
return ops.fixed_padding(features, kernel_size)
coarse_feature_layers.append(tf.keras.layers.Lambda(
fixed_padding, name='fixed_padding'))
layer_name = 'bottom_up_Conv2d_{}'.format(
i - self._base_fpn_max_level + 13)
conv_block = feature_map_generators.create_conv_block(
self._use_depthwise, kernel_size, padding, stride, layer_name,
self._conv_hyperparams, self._is_training, self._freeze_batchnorm,
self._depth_fn(self._additional_layer_depth))
coarse_feature_layers.extend(conv_block)
self._coarse_feature_layers.append(coarse_feature_layers)
self.built = True
def preprocess(self, resized_inputs):
"""SSD preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def _extract_features(self, preprocessed_inputs):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
"""
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
image_features = self._mobilenet_v1(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple))
feature_block_list = []
for level in range(self._fpn_min_level, self._base_fpn_max_level + 1):
feature_block_list.append(self._feature_blocks[level - 2])
feature_start_index = len(self._feature_blocks) - self._num_levels
fpn_input_image_features = [
(key, image_features[feature_start_index + index])
for index, key in enumerate(feature_block_list)]
fpn_features = self._fpn_features_generator(fpn_input_image_features)
feature_maps = []
for level in range(self._fpn_min_level, self._base_fpn_max_level + 1):
feature_maps.append(fpn_features['top_down_{}'.format(
self._feature_blocks[level - 2])])
last_feature_map = fpn_features['top_down_{}'.format(
self._feature_blocks[self._base_fpn_max_level - 2])]
for coarse_feature_layers in self._coarse_feature_layers:
for layer in coarse_feature_layers:
last_feature_map = layer(last_feature_map)
feature_maps.append(last_feature_map)
return feature_maps
| {
"content_hash": "fd8697aa4f12f9b2e4787a92b2129413",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 80,
"avg_line_length": 44.455357142857146,
"alnum_prop": 0.6700140590480016,
"repo_name": "alexgorban/models",
"id": "d2d276060018e5f64f1616c18cf7fd971965bf5e",
"size": "10648",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1619012"
},
{
"name": "Dockerfile",
"bytes": "9821"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33316"
},
{
"name": "Jupyter Notebook",
"bytes": "454746"
},
{
"name": "Makefile",
"bytes": "4933"
},
{
"name": "Python",
"bytes": "16363107"
},
{
"name": "Shell",
"bytes": "144095"
},
{
"name": "Starlark",
"bytes": "148029"
}
],
"symlink_target": ""
} |
from swift3.controllers.base import Controller, bucket_operation
from swift3.etree import Element, tostring
from swift3.response import HTTPOk, S3NotImplemented
class VersioningController(Controller):
"""
Handles the following APIs:
- GET Bucket versioning
- PUT Bucket versioning
Those APIs are logged as VERSIONING operations in the S3 server log.
"""
@bucket_operation
def GET(self, req):
"""
Handles GET Bucket versioning.
"""
req.get_response(self.app, method='HEAD')
# Just report there is no versioning configured here.
elem = Element('VersioningConfiguration')
body = tostring(elem)
return HTTPOk(body=body, content_type="text/plain")
@bucket_operation
def PUT(self, req):
"""
Handles PUT Bucket versioning.
"""
raise S3NotImplemented()
| {
"content_hash": "0a7934b673767c651b29892f492f2609",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 72,
"avg_line_length": 26.96969696969697,
"alnum_prop": 0.6550561797752809,
"repo_name": "tumf/swift3",
"id": "c5da917afae6e145ea93c9f2709b4f254b0a081a",
"size": "1486",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "swift3/controllers/versioning.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "506719"
},
{
"name": "Shell",
"bytes": "5143"
}
],
"symlink_target": ""
} |
import WebIDL
def WebIDLTest(parser, harness):
try:
parser.parse("""
enum TestEnumDuplicateValue {
"",
""
};
""")
harness.ok(False, "Should have thrown!")
except:
harness.ok(True, "Enum TestEnumDuplicateValue should throw")
| {
"content_hash": "26681c121ee92aba0653ba3947f97956",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 68,
"avg_line_length": 24.384615384615383,
"alnum_prop": 0.5173501577287066,
"repo_name": "sergecodd/FireFox-OS",
"id": "51205d209e731e1f30a40877bccfd027724716cd",
"size": "317",
"binary": false,
"copies": "276",
"ref": "refs/heads/master",
"path": "B2G/gecko/dom/bindings/parser/tests/test_enum_duplicate_values.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "443"
},
{
"name": "ApacheConf",
"bytes": "85"
},
{
"name": "Assembly",
"bytes": "5123438"
},
{
"name": "Awk",
"bytes": "46481"
},
{
"name": "Batchfile",
"bytes": "56250"
},
{
"name": "C",
"bytes": "101720951"
},
{
"name": "C#",
"bytes": "38531"
},
{
"name": "C++",
"bytes": "148896543"
},
{
"name": "CMake",
"bytes": "23541"
},
{
"name": "CSS",
"bytes": "2758664"
},
{
"name": "DIGITAL Command Language",
"bytes": "56757"
},
{
"name": "Emacs Lisp",
"bytes": "12694"
},
{
"name": "Erlang",
"bytes": "889"
},
{
"name": "FLUX",
"bytes": "34449"
},
{
"name": "GLSL",
"bytes": "26344"
},
{
"name": "Gnuplot",
"bytes": "710"
},
{
"name": "Groff",
"bytes": "447012"
},
{
"name": "HTML",
"bytes": "43343468"
},
{
"name": "IDL",
"bytes": "1455122"
},
{
"name": "Java",
"bytes": "43261012"
},
{
"name": "JavaScript",
"bytes": "46646658"
},
{
"name": "Lex",
"bytes": "38358"
},
{
"name": "Logos",
"bytes": "21054"
},
{
"name": "Makefile",
"bytes": "2733844"
},
{
"name": "Matlab",
"bytes": "67316"
},
{
"name": "Max",
"bytes": "3698"
},
{
"name": "NSIS",
"bytes": "421625"
},
{
"name": "Objective-C",
"bytes": "877657"
},
{
"name": "Objective-C++",
"bytes": "737713"
},
{
"name": "PHP",
"bytes": "17415"
},
{
"name": "Pascal",
"bytes": "6780"
},
{
"name": "Perl",
"bytes": "1153180"
},
{
"name": "Perl6",
"bytes": "1255"
},
{
"name": "PostScript",
"bytes": "1139"
},
{
"name": "PowerShell",
"bytes": "8252"
},
{
"name": "Protocol Buffer",
"bytes": "26553"
},
{
"name": "Python",
"bytes": "8453201"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3481"
},
{
"name": "Ruby",
"bytes": "5116"
},
{
"name": "Scilab",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "3383832"
},
{
"name": "SourcePawn",
"bytes": "23661"
},
{
"name": "TeX",
"bytes": "879606"
},
{
"name": "WebIDL",
"bytes": "1902"
},
{
"name": "XSLT",
"bytes": "13134"
},
{
"name": "Yacc",
"bytes": "112744"
}
],
"symlink_target": ""
} |
##
# Import Modules
#
from __future__ import print_function
import xml.dom.minidom
import codecs
from Common.LongFilePathSupport import OpenLongFilePath as open
## Create a element of XML
#
# @param Name
# @param String
# @param NodeList
# @param AttributeList
#
# @revel Element
#
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if isinstance(Item, type([])):
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
else:
Element.appendChild(Item)
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
## Get a list of XML nodes using XPath style syntax.
#
# Return a list of XML DOM nodes from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
# @revel Nodes A list of XML nodes matching XPath style Sting.
#
def XmlList(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Nodes = [Dom]
Index = 0
End = len(TagList) - 1
while Index <= End:
ChildNodes = []
for Node in Nodes:
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
if Index < End:
ChildNodes.extend(Node.childNodes)
else:
ChildNodes.append(Node)
Nodes = ChildNodes
ChildNodes = []
Index += 1
return Nodes
## Get a single XML node using XPath style syntax.
#
# Return a single XML DOM node from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
# @revel Node A single XML node matching XPath style Sting.
#
def XmlNode(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return ""
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Index = 0
End = len(TagList) - 1
ChildNodes = [Dom]
while Index <= End:
for Node in ChildNodes:
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
if Index < End:
ChildNodes = Node.childNodes
else:
return Node
break
Index += 1
return ""
## Get a single XML element using XPath style syntax.
#
# Return a single XML element from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
#
# @revel Element An XML element matching XPath style Sting.
#
def XmlElement(Dom, String):
try:
return XmlNode(Dom, String).firstChild.data.strip()
except:
return ""
## Get a single XML element of the current node.
#
# Return a single XML element specified by the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
# @revel Element An XML element in current root Dom.
#
def XmlElementData(Dom):
try:
return Dom.firstChild.data.strip()
except:
return ""
## Get a list of XML elements using XPath style syntax.
#
# Return a list of XML elements from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM object.
# @param String A XPath style path.
#
# @revel Elements A list of XML elements matching XPath style Sting.
#
def XmlElementList(Dom, String):
return map(XmlElementData, XmlList(Dom, String))
## Get the XML attribute of the current node.
#
# Return a single XML attribute named Attribute from the current root Dom.
# If the input Dom or Attribute is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Attribute The name of Attribute.
#
# @revel Element A single XML element matching XPath style Sting.
#
def XmlAttribute(Dom, Attribute):
try:
return Dom.getAttribute(Attribute).strip()
except:
return ''
## Get the XML node name of the current node.
#
# Return a single XML node name from the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
# @revel Element A single XML element matching XPath style Sting.
#
def XmlNodeName(Dom):
try:
return Dom.nodeName.strip()
except:
return ''
## Parse an XML file.
#
# Parse the input XML file named FileName and return a XML DOM it stands for.
# If the input File is not a valid XML file, then an empty string is returned.
#
# @param FileName The XML file name.
#
# @revel Dom The Dom object achieved from the XML file.
#
def XmlParseFile(FileName):
try:
XmlFile = codecs.open(FileName,encoding='utf_8_sig')
Dom = xml.dom.minidom.parse(XmlFile)
XmlFile.close()
return Dom
except Exception as X:
print(X)
return ""
# This acts like the main() function for the script, unless it is 'import'ed
# into another script.
if __name__ == '__main__':
# Nothing to do here. Could do some unit tests.
A = CreateXmlElement('AAA', 'CCC', [['AAA', '111'], ['BBB', '222']], [['A', '1'], ['B', '2']])
B = CreateXmlElement('ZZZ', 'CCC', [['XXX', '111'], ['YYY', '222']], [['A', '1'], ['B', '2']])
C = CreateXmlList('DDD', 'EEE', [A, B], ['FFF', 'GGG'])
print(C.toprettyxml(indent = " "))
pass
| {
"content_hash": "800707194b57e70a83286141c4bea283",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 99,
"avg_line_length": 31.862385321100916,
"alnum_prop": 0.5858047797293406,
"repo_name": "google/google-ctf",
"id": "00cbc4e55e529fef51e4141a5266a494b948b5bf",
"size": "7617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/edk2/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AIDL",
"bytes": "508"
},
{
"name": "Assembly",
"bytes": "107617"
},
{
"name": "BASIC",
"bytes": "6068"
},
{
"name": "Batchfile",
"bytes": "1032"
},
{
"name": "Blade",
"bytes": "14530"
},
{
"name": "C",
"bytes": "1481904"
},
{
"name": "C++",
"bytes": "2139472"
},
{
"name": "CMake",
"bytes": "11595"
},
{
"name": "CSS",
"bytes": "172375"
},
{
"name": "Dart",
"bytes": "6282"
},
{
"name": "Dockerfile",
"bytes": "232352"
},
{
"name": "EJS",
"bytes": "92308"
},
{
"name": "Emacs Lisp",
"bytes": "2668"
},
{
"name": "GDB",
"bytes": "273"
},
{
"name": "GLSL",
"bytes": "33392"
},
{
"name": "Go",
"bytes": "3031142"
},
{
"name": "HTML",
"bytes": "467647"
},
{
"name": "Java",
"bytes": "174199"
},
{
"name": "JavaScript",
"bytes": "2643200"
},
{
"name": "Lua",
"bytes": "5944"
},
{
"name": "Makefile",
"bytes": "149152"
},
{
"name": "NSIS",
"bytes": "2800"
},
{
"name": "Nix",
"bytes": "139"
},
{
"name": "PHP",
"bytes": "311900"
},
{
"name": "Perl",
"bytes": "32742"
},
{
"name": "Pug",
"bytes": "8752"
},
{
"name": "Python",
"bytes": "1756592"
},
{
"name": "Red",
"bytes": "188"
},
{
"name": "Rust",
"bytes": "541267"
},
{
"name": "Sage",
"bytes": "39814"
},
{
"name": "Shell",
"bytes": "382149"
},
{
"name": "Smali",
"bytes": "2316656"
},
{
"name": "Starlark",
"bytes": "8216"
},
{
"name": "SystemVerilog",
"bytes": "16466"
},
{
"name": "VCL",
"bytes": "895"
},
{
"name": "Verilog",
"bytes": "7230"
},
{
"name": "Vim Script",
"bytes": "890"
},
{
"name": "Vue",
"bytes": "10248"
}
],
"symlink_target": ""
} |
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import exception
from nova.openstack.common.gettextutils import _
class ServerMetadataController(wsgi.Controller):
"""The server metadata API controller for the OpenStack API."""
def __init__(self):
self.compute_api = compute.API()
super(ServerMetadataController, self).__init__()
def _get_metadata(self, context, server_id):
try:
server = self.compute_api.get(context, server_id)
meta = self.compute_api.get_instance_metadata(context, server)
except exception.InstanceNotFound:
msg = _('Server does not exist')
raise exc.HTTPNotFound(explanation=msg)
meta_dict = {}
for key, value in meta.iteritems():
meta_dict[key] = value
return meta_dict
@extensions.expected_errors(404)
@wsgi.serializers(xml=common.MetadataTemplate)
def index(self, req, server_id):
"""Returns the list of metadata for a given instance."""
context = req.environ['nova.context']
return {'metadata': self._get_metadata(context, server_id)}
@extensions.expected_errors((400, 404, 409, 413))
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def create(self, req, server_id, body):
if not self.is_valid_body(body, 'metadata'):
msg = _("Malformed request body")
raise exc.HTTPBadRequest(explanation=msg)
metadata = body['metadata']
context = req.environ['nova.context']
new_metadata = self._update_instance_metadata(context,
server_id,
metadata,
delete=False)
return {'metadata': new_metadata}
@extensions.expected_errors((400, 404, 409, 413))
@wsgi.serializers(xml=common.MetaItemTemplate)
@wsgi.deserializers(xml=common.MetaItemDeserializer)
def update(self, req, server_id, id, body):
if not self.is_valid_body(body, 'metadata'):
msg = _("Malformed request body")
raise exc.HTTPBadRequest(explanation=msg)
meta_item = body['metadata']
if id not in meta_item:
expl = _('Request body and URI mismatch')
raise exc.HTTPBadRequest(explanation=expl)
if len(meta_item) > 1:
expl = _('Request body contains too many items')
raise exc.HTTPBadRequest(explanation=expl)
context = req.environ['nova.context']
self._update_instance_metadata(context,
server_id,
meta_item,
delete=False)
return {'metadata': meta_item}
@extensions.expected_errors((400, 404, 409, 413))
@wsgi.serializers(xml=common.MetadataTemplate)
@wsgi.deserializers(xml=common.MetadataDeserializer)
def update_all(self, req, server_id, body):
if not self.is_valid_body(body, 'metadata'):
msg = _("Malformed request body")
raise exc.HTTPBadRequest(explanation=msg)
metadata = body['metadata']
context = req.environ['nova.context']
new_metadata = self._update_instance_metadata(context,
server_id,
metadata,
delete=True)
return {'metadata': new_metadata}
def _update_instance_metadata(self, context, server_id, metadata,
delete=False):
try:
server = self.compute_api.get(context, server_id)
return self.compute_api.update_instance_metadata(context,
server,
metadata,
delete)
except exception.InstanceNotFound:
msg = _('Server does not exist')
raise exc.HTTPNotFound(explanation=msg)
except exception.InvalidMetadata as error:
raise exc.HTTPBadRequest(explanation=error.format_message())
except exception.InvalidMetadataSize as error:
raise exc.HTTPRequestEntityTooLarge(
explanation=error.format_message())
except exception.QuotaError as error:
raise exc.HTTPRequestEntityTooLarge(
explanation=error.format_message(),
headers={'Retry-After': 0})
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'update metadata')
@extensions.expected_errors(404)
@wsgi.serializers(xml=common.MetaItemTemplate)
def show(self, req, server_id, id):
"""Return a single metadata item."""
context = req.environ['nova.context']
data = self._get_metadata(context, server_id)
try:
return {'metadata': {id: data[id]}}
except KeyError:
msg = _("Metadata item was not found")
raise exc.HTTPNotFound(explanation=msg)
@extensions.expected_errors((404, 409))
@wsgi.response(204)
def delete(self, req, server_id, id):
"""Deletes an existing metadata."""
context = req.environ['nova.context']
metadata = self._get_metadata(context, server_id)
if id not in metadata:
msg = _("Metadata item was not found")
raise exc.HTTPNotFound(explanation=msg)
try:
server = self.compute_api.get(context, server_id)
self.compute_api.delete_instance_metadata(context, server, id)
except exception.InstanceNotFound:
msg = _('Server does not exist')
raise exc.HTTPNotFound(explanation=msg)
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'delete metadata')
class ServerMetadata(extensions.V3APIExtensionBase):
"""Server Metadata API."""
name = "Server Metadata"
alias = "server-metadata"
namespace = "http://docs.openstack.org/compute/core/server_metadata/v3"
version = 1
def get_resources(self):
parent = {'member_name': 'server',
'collection_name': 'servers'}
resources = [extensions.ResourceExtension('metadata',
ServerMetadataController(),
member_name='server_meta',
parent=parent,
custom_routes_fn=
self.server_metadata_map
)]
return resources
def get_controller_extensions(self):
return []
def server_metadata_map(self, mapper, wsgi_resource):
mapper.connect("metadata", "/servers/{server_id}/metadata",
controller=wsgi_resource,
action='update_all', conditions={"method": ['PUT']})
| {
"content_hash": "dec211294d09555232fc51729748004c",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 78,
"avg_line_length": 40.11764705882353,
"alnum_prop": 0.5598507064782725,
"repo_name": "plumgrid/plumgrid-nova",
"id": "85556acd60b2a221c504fd32f5a2cdf31667f7d5",
"size": "8183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/api/openstack/compute/plugins/v3/server_metadata.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11944269"
},
{
"name": "Shell",
"bytes": "17148"
}
],
"symlink_target": ""
} |
import copy
import json
import six
from jinja2 import Template, Environment, StrictUndefined, meta, exceptions
from st2common import log as logging
from st2common.constants.action import ACTION_KV_PREFIX
from st2common.constants.system import SYSTEM_KV_PREFIX
from st2common.exceptions import actionrunner
from st2common.services.keyvalues import KeyValueLookup
from st2common.util.casts import get_cast
from st2common.util.compat import to_unicode
LOG = logging.getLogger(__name__)
__all__ = [
'get_resolved_params',
'get_rendered_params',
'get_finalized_params',
]
def _split_params(runner_parameters, action_parameters, mixed_params):
def pf(params, skips):
result = {k: v for k, v in six.iteritems(mixed_params)
if k in params and k not in skips}
return result
return (pf(runner_parameters, {}), pf(action_parameters, runner_parameters))
def _get_resolved_runner_params(runner_parameters, action_parameters,
actionexec_runner_parameters):
# Runner parameters should use the defaults from the RunnerType object.
# The runner parameter defaults may be overridden by values provided in
# the Action and liveaction.
# Create runner parameter by merging default values with dynamic values
resolved_params = {k: v['default'] if 'default' in v else None
for k, v in six.iteritems(runner_parameters)}
# pick overrides from action_parameters & actionexec_runner_parameters
for param_name, param_value in six.iteritems(runner_parameters):
# No override if param is immutable
if param_value.get('immutable', False):
continue
# Check if param exists in action_parameters and if it has a default value then
# pickup the override.
if param_name in action_parameters:
action_param = action_parameters[param_name]
if action_param.get('default', False):
resolved_params[param_name] = action_param['default']
# No further override (from liveaction) if param is immutable
if action_param.get('immutable', False):
continue
# Finally pick up override from actionexec_runner_parameters
if param_name in actionexec_runner_parameters:
resolved_params[param_name] = actionexec_runner_parameters[param_name]
return resolved_params
def _get_resolved_action_params(runner_parameters, action_parameters,
actionexec_action_parameters):
# Create action parameters by merging default values with dynamic values
resolved_params = {k: v['default'] if 'default' in v else None
for k, v in six.iteritems(action_parameters)
if k not in runner_parameters}
# pick overrides from actionexec_action_parameters
for param_name, param_value in six.iteritems(action_parameters):
# No override if param is immutable
if param_value.get('immutable', False):
continue
if param_name in actionexec_action_parameters and param_name not in runner_parameters:
resolved_params[param_name] = actionexec_action_parameters[param_name]
return resolved_params
def get_resolved_params(runnertype_parameter_info, action_parameter_info, actionexec_parameters):
'''
Looks at the parameter values from runner, action and action execution to fully resolve the
values. Resolution is the process of determinig the value of a parameter by taking into
consideration default, immutable and user supplied values.
'''
# Runner parameters should use the defaults from the RunnerType object.
# The runner parameter defaults may be overridden by values provided in
# the Action and liveaction.
actionexec_runner_parameters, actionexec_action_parameters = _split_params(
runnertype_parameter_info, action_parameter_info, actionexec_parameters)
runner_params = _get_resolved_runner_params(runnertype_parameter_info,
action_parameter_info,
actionexec_runner_parameters)
action_params = _get_resolved_action_params(runnertype_parameter_info,
action_parameter_info,
actionexec_action_parameters)
return runner_params, action_params
def _is_template(template_str):
template_str = to_unicode(template_str)
template = Template(template_str)
try:
return template_str != template.render({})
except exceptions.UndefinedError:
return True
def _renderable_context_param_split(action_parameters, runner_parameters, base_context=None):
# To render the params it is necessary to combine the params together so that cross
# parameter category references are resolved.
renderable_params = {}
# shallow copy since this will be updated
context_params = copy.copy(base_context) if base_context else {}
def do_render_context_split(source_params):
'''
Will split the supplied source_params into renderable_params and context_params. As part of
the split also makes sure that the all params are essentially strings.
'''
for k, v in six.iteritems(source_params):
renderable_v = v
# dict and list to be converted to str
if isinstance(renderable_v, dict) or isinstance(renderable_v, list):
renderable_v = json.dumps(renderable_v)
# only str can contain templates
if (isinstance(renderable_v, str) or isinstance(renderable_v, unicode)) and \
_is_template(renderable_v):
renderable_params[k] = renderable_v
elif isinstance(v, dict) or isinstance(v, list):
# For context use the renderable value for dict and list params. The template
# rendering by jinja yields a non json.loads compatible value leading to issues
# while performing casts.
context_params[k] = renderable_v
else:
# For context use the original value.
context_params[k] = v
do_render_context_split(action_parameters)
do_render_context_split(runner_parameters)
return (renderable_params, context_params)
def _check_availability(param, param_dependencies, renderable_params, context):
for dependency in param_dependencies:
if dependency not in renderable_params and dependency not in context:
return False
return True
def _check_cyclic(dep_chain, dependencies):
last_idx = len(dep_chain) - 1
last_value = dep_chain[last_idx]
for dependency in dependencies.get(last_value, []):
if dependency in dep_chain:
dep_chain.append(dependency)
return False
dep_chain.append(dependency)
if not _check_cyclic(dep_chain, dependencies):
return False
dep_chain.pop()
return True
def _validate_dependencies(renderable_params, context):
'''
Validates dependencies between the parameters.
e.g.
{
'a': '{{b}}',
'b': '{{a}}'
}
In this example 'a' requires 'b' for template rendering and vice-versa. There is no way for
these templates to be rendered and will be flagged with an ActionRunnerException.
'''
env = Environment(undefined=StrictUndefined)
dependencies = {}
for k, v in six.iteritems(renderable_params):
template_ast = env.parse(v)
dependencies[k] = meta.find_undeclared_variables(template_ast)
for k, v in six.iteritems(dependencies):
if not _check_availability(k, v, renderable_params, context):
msg = 'Dependecy unsatisfied - %s: %s.' % (k, v)
raise actionrunner.ActionRunnerException(msg)
dep_chain = []
dep_chain.append(k)
if not _check_cyclic(dep_chain, dependencies):
msg = 'Cyclic dependecy found - %s.' % dep_chain
raise actionrunner.ActionRunnerException(msg)
def _do_render_params(renderable_params, context):
'''
Will render the params per the context and will return best attempt to render. Render attempts
with missing params will leave blanks.
'''
if not renderable_params:
return renderable_params
_validate_dependencies(renderable_params, context)
env = Environment(undefined=StrictUndefined)
rendered_params = {}
rendered_params.update(context)
# Maps parameter key to render exception
# We save the exception so we can throw a more meaningful exception at the end if rendering of
# some parameter fails
parameter_render_exceptions = {}
num_parameters = len(renderable_params) + len(context)
# After how many attempts at failing to render parameter we should bail out
max_rendered_parameters_unchanged_count = num_parameters
rendered_params_unchanged_count = 0
while len(renderable_params) != 0:
renderable_params_pre_loop = renderable_params.copy()
for k, v in six.iteritems(renderable_params):
template = env.from_string(v)
try:
rendered = template.render(rendered_params)
rendered_params[k] = rendered
if k in parameter_render_exceptions:
del parameter_render_exceptions[k]
except Exception as e:
# Note: This sucks, but because we support multi level and out of order
# rendering, we can't throw an exception here yet since the parameter could get
# rendered in future iteration
LOG.debug('Failed to render %s: %s', k, v, exc_info=True)
parameter_render_exceptions[k] = e
for k in rendered_params:
if k in renderable_params:
del renderable_params[k]
if renderable_params_pre_loop == renderable_params:
rendered_params_unchanged_count += 1
# Make sure we terminate and don't end up in an infinite loop if we
# tried to render all the parameters but rendering of some parameters
# still fails
if rendered_params_unchanged_count >= max_rendered_parameters_unchanged_count:
k = parameter_render_exceptions.keys()[0]
e = parameter_render_exceptions[k]
msg = 'Failed to render parameter "%s": %s' % (k, str(e))
raise actionrunner.ActionRunnerException(msg)
return rendered_params
def _cast_params(rendered, parameter_schemas):
casted_params = {}
for k, v in six.iteritems(rendered):
# Add uncasted first and then override with casted param. Not all params will end up
# being cast.
casted_params[k] = v
# No casting if the value is None. It leads to weird cases like str(None) = 'None'
# leading to downstream failures as well as int(None) leading to TypeError.
if v is None:
continue
parameter_schema = parameter_schemas.get(k, None)
if not parameter_schema:
continue
parameter_type = parameter_schema.get('type', None)
if not parameter_type:
continue
cast = get_cast(cast_type=parameter_type)
if not cast:
continue
casted_params[k] = cast(v)
return casted_params
def get_rendered_params(runner_parameters, action_parameters, action_context,
runnertype_parameter_info, action_parameter_info):
'''
Renders the templates in runner_parameters and action_parameters. Using the type information
from *_parameter_info will appropriately cast the parameters.
'''
# To render the params it is necessary to combine the params together so that cross
# parameter category references are also rendered correctly. Particularly in the cases where
# a runner parameter is overridden in an action it is likely that a runner parameter could
# depend on an action parameter.
render_context = {SYSTEM_KV_PREFIX: KeyValueLookup()}
render_context[ACTION_KV_PREFIX] = action_context
renderable_params, context = _renderable_context_param_split(action_parameters,
runner_parameters,
render_context)
rendered_params = _do_render_params(renderable_params, context)
template_free_params = {}
template_free_params.update(rendered_params)
template_free_params.update(context)
r_runner_parameters, r_action_parameters = _split_params(runnertype_parameter_info,
action_parameter_info,
template_free_params)
return (_cast_params(r_runner_parameters, runnertype_parameter_info),
_cast_params(r_action_parameters, action_parameter_info))
def get_finalized_params(runnertype_parameter_info, action_parameter_info, liveaction_parameters,
action_context):
'''
Finalize the parameters for an action to execute by doing the following -
1. Split the parameters into those consumed by runner and action into separate dicts.
2. Render any templates in the parameters.
'''
runner_params, action_params = get_resolved_params(runnertype_parameter_info,
action_parameter_info,
liveaction_parameters)
runner_params, action_params = get_rendered_params(runner_params, action_params,
action_context,
runnertype_parameter_info,
action_parameter_info)
return (runner_params, action_params)
| {
"content_hash": "93c2b9eb0be99d1db70a87546a626c1b",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 99,
"avg_line_length": 43.351851851851855,
"alnum_prop": 0.6403958422326641,
"repo_name": "grengojbo/st2",
"id": "fe3871d220c09bbfb453f8ea7cba3a0e07f96a19",
"size": "14826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "st2actions/st2actions/utils/param_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "21186"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "2091976"
},
{
"name": "Shell",
"bytes": "7518"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
} |
"""
Single Channel Noise Reduction
==============================
Collection of single channel noise reduction (SCNR) algorithms for speech:
- :doc:`Spectral Subtraction <pyroomacoustics.denoise.spectral_subtraction>` [1]_
- :doc:`Subspace Approach <pyroomacoustics.denoise.subspace>` [2]_
- :doc:`Iterative Wiener Filtering <pyroomacoustics.denoise.iterative_wiener>` [3]_
At `this repository <https://github.com/santi-pdp/segan>`_, a deep learning approach in Python can be found.
References
----------
.. [1] M. Berouti, R. Schwartz, and J. Makhoul, *Enhancement of speech corrupted by acoustic noise,*
ICASSP '79. IEEE International Conference on Acoustics, Speech, and Signal Processing, 1979, pp. 208-211.
.. [2] Y. Ephraim and H. L. Van Trees, *A signal subspace approach for speech enhancement,*
IEEE Transactions on Speech and Audio Processing, vol. 3, no. 4, pp. 251-266, Jul 1995.
.. [3] J. Lim and A. Oppenheim, *All-Pole Modeling of Degraded Speech,*
IEEE Transactions on Acoustics, Speech, and Signal Processing 26.3 (1978): 197-210.
"""
from .spectral_subtraction import *
from .subspace import *
from .iterative_wiener import *
| {
"content_hash": "89d75b72db4e9cdcee57597f8b33c168",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 109,
"avg_line_length": 40.172413793103445,
"alnum_prop": 0.7124463519313304,
"repo_name": "LCAV/pyroomacoustics",
"id": "842bdbd2bed8f7cd92001b2b2ab8096e7f0b57c0",
"size": "1165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyroomacoustics/denoise/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "96552"
},
{
"name": "Cython",
"bytes": "2700"
},
{
"name": "Dockerfile",
"bytes": "735"
},
{
"name": "Python",
"bytes": "941773"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Template.default_archive_template'
db.add_column(u'main_template', 'default_archive_template',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Template.default_archive_template'
db.delete_column(u'main_template', 'default_archive_template')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.approval': {
'Meta': {'object_name': 'Approval'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'processed_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'main.channel': {
'Meta': {'ordering': "['name']", 'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'image_is_banner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Channel']", 'null': 'True'}),
'reverse_order': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'main.curatedgroup': {
'Meta': {'object_name': 'CuratedGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
u'main.event': {
'Meta': {'object_name': 'Event'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'archive_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modified_user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Participant']", 'symmetrical': 'False'}),
'pin': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'popcorn_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40', 'db_index': 'True'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'initiated'", 'max_length': '20', 'db_index': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Template']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'transcript': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'upload': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_upload'", 'null': 'True', 'to': u"orm['uploads.Upload']"})
},
u'main.eventassignment': {
'Meta': {'object_name': 'EventAssignment'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Location']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'main.eventhitstats': {
'Meta': {'object_name': 'EventHitStats'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'shortcode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'total_hits': ('django.db.models.fields.IntegerField', [], {})
},
u'main.eventoldslug': {
'Meta': {'object_name': 'EventOldSlug'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215'})
},
u'main.eventrevision': {
'Meta': {'object_name': 'EventRevision'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
u'main.eventtweet': {
'Meta': {'object_name': 'EventTweet'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_placeholder': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'send_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'sent_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'tweet_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
u'main.location': {
'Meta': {'ordering': "['name']", 'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'main.locationdefaultenvironment': {
'Meta': {'unique_together': "(('location', 'privacy', 'template'),)", 'object_name': 'LocationDefaultEnvironment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']"}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Template']"}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {})
},
u'main.participant': {
'Meta': {'object_name': 'Participant'},
'blog_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'clear_token': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'cleared': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '15', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'participant_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '65', 'blank': 'True'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'topic_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
},
u'main.suggestedevent': {
'Meta': {'object_name': 'SuggestedEvent'},
'accepted': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'null': 'True', 'blank': 'True'}),
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'first_submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Participant']", 'symmetrical': 'False'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'}),
'popcorn_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'review_comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'upcoming': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'upload': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'upload'", 'null': 'True', 'to': u"orm['uploads.Upload']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'main.suggestedeventcomment': {
'Meta': {'object_name': 'SuggestedEventComment'},
'comment': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.SuggestedEvent']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'main.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'main.template': {
'Meta': {'ordering': "['name']", 'object_name': 'Template'},
'content': ('django.db.models.fields.TextField', [], {}),
'default_archive_template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'default_popcorn_template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.urlmatch': {
'Meta': {'object_name': 'URLMatch'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'string': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'use_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'main.urltransform': {
'Meta': {'object_name': 'URLTransform'},
'find': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.URLMatch']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'replace_with': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'main.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'contributor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'main.vidlysubmission': {
'Meta': {'object_name': 'VidlySubmission'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
'hd': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'submission_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 6, 11, 0, 0)'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'token_protection': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'uploads.upload': {
'Meta': {'object_name': 'Upload'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event'", 'null': 'True', 'to': u"orm['main.Event']"}),
'file_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suggested_event'", 'null': 'True', 'to': u"orm['main.SuggestedEvent']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '400'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['main'] | {
"content_hash": "3ceec58cc1d3ba009fda4d7bbc4616ed",
"timestamp": "",
"source": "github",
"line_count": 302,
"max_line_length": 209,
"avg_line_length": 83.93377483443709,
"alnum_prop": 0.5486823418021146,
"repo_name": "anu7495/airmozilla",
"id": "ec356f4dbb6cb25b0421d36f98b3ab1e8dccb9bf",
"size": "25372",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "airmozilla/main/migrations/0040_auto__add_field_template_default_archive_template.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4527"
},
{
"name": "Brightscript",
"bytes": "67473"
},
{
"name": "CSS",
"bytes": "1714450"
},
{
"name": "HTML",
"bytes": "2400800"
},
{
"name": "JavaScript",
"bytes": "3197024"
},
{
"name": "Makefile",
"bytes": "13548"
},
{
"name": "Puppet",
"bytes": "6677"
},
{
"name": "Python",
"bytes": "3404929"
},
{
"name": "Ruby",
"bytes": "4978"
},
{
"name": "Shell",
"bytes": "3536"
},
{
"name": "Smarty",
"bytes": "2081"
}
],
"symlink_target": ""
} |
"""
Note that it been proved that the Kepler-Heisenberg is not fully integrable:
Andrzej Maciejewski, Tomasz Stachowiak - 2021 Feb
https://www.researchsquare.com/article/rs-272845/v1
"""
import copy
import numpy as np
import sympy as sp
import sympy.core.symbol
import vorpy.manifold
import vorpy.symbolic
import vorpy.symplectic
import vorpy.tensor
class FancyException(Exception): # TODO: Rename
pass
def lie_bracket__test ():
n = 3
X = vorpy.symbolic.tensor('X', (n,))
X_reshaped = X.reshape(-1)
A = np.array([
sp.Function(f'A_{i}')(*list(X_reshaped))
for i in range(n)
])
B = np.array([
sp.Function(f'B_{i}')(*list(X_reshaped))
for i in range(n)
])
#print(f'A = {A}')
#print(f'B = {B}')
lb__A_B = vorpy.manifold.lie_bracket(A, B, X)
#print(f'lb__A_B = {lb__A_B}')
f = sp.Function('f')(*list(X_reshaped))
#print(f'f = {f}')
# Compute the Lie bracket the smart way (just as a function of the vector fields' coordinate expressions),
# applied to a generic function of the coordinates.
computed_value = vorpy.manifold.apply_vector_field_to_function(lb__A_B, f, X)
# Compute the Lie bracket the definitional way (as the commutator of vector fields acting as derivations
# on functions), applied to a generic function of the coordinates.
expected_value = vorpy.manifold.apply_vector_field_to_function(A, vorpy.manifold.apply_vector_field_to_function(B, f, X), X) - vorpy.manifold.apply_vector_field_to_function(B, vorpy.manifold.apply_vector_field_to_function(A, f, X), X)
error = (computed_value - expected_value).simplify()
#print(f'error in lie brackets (expected value is 0) = {error}')
if error != 0:
raise FancyException(f'Error in computed vs expected Lie bracket value was not zero, but instead was {error}')
print(f'lie_bracket__test passed')
def phase_space_coordinates ():
return np.array((
( sp.var('x'), sp.var('y'), sp.var('z')),
(sp.var('p_x'), sp.var('p_y'), sp.var('p_z')),
))
def P_x (qp):
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
return p_x - y*p_z/2
def P_y (qp):
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
return p_y + x*p_z/2
class FancyFunction(sp.Function):
def _sympystr (self, printer):
"""Override printer to suppress function evaluation notation; the arguments are understood."""
if all(arg.is_symbol for arg in self.args):
return self._name()
else:
return f'{self._name()}({",".join(str(arg) for arg in self.args)})'
def fdiff (self, argindex):
return self._value(*self.args).diff(self.args[argindex-1])
def _expanded (self):
return self._value(*self.args)
class P_x__(FancyFunction):
@classmethod
def _name (cls):
return 'P_x'
@classmethod
def _value (cls, x, y, z, p_x, p_y, p_z):
"""Return the expression that defines this function."""
return p_x - y*p_z/2
@classmethod
def eval (cls, x, y, z, p_x, p_y, p_z):
"""Only evaluate special cases where P_x simplifies."""
value = cls._value(x,y,z,p_x,p_y,p_z)
if value.is_number or p_x.is_number or y.is_number or p_z.is_number:
return value
# NOTE: This function intentionally does NOT return if there is no simplification.
class P_y__(FancyFunction):
@classmethod
def _name (cls):
return 'P_y'
@classmethod
def _value (cls, x, y, z, p_x, p_y, p_z):
"""Return the expression that defines this function."""
return p_y + x*p_z/2
@classmethod
def eval (cls, x, y, z, p_x, p_y, p_z):
# Only evaluate special cases where P_y simplifies.
value = cls._value(x,y,z,p_x,p_y,p_z)
if value.is_number or p_y.is_number or x.is_number or p_z.is_number:
return value
# NOTE: This function intentionally does NOT return if there is no simplification.
class r_squared__(FancyFunction):
@classmethod
def _name (cls):
return 'r_squared'
@classmethod
def _value (cls, x, y):
"""Return the expression that defines this function."""
return x**2 + y**2
@classmethod
def eval (cls, x, y):
# Only evaluate special cases where r_squared simplifies.
value = cls._value(x,y)
if value.is_number or x.is_number or y.is_number:
return value
# NOTE: This function intentionally does NOT return if there is no simplification.
class mu__(FancyFunction):
@classmethod
def _name (cls):
return 'mu'
@classmethod
def _value (cls, x, y, z):
"""Return the expression that defines this function."""
r_squared_ = r_squared__(x,y)
return r_squared_**2 + 16*z**2
@classmethod
def eval (cls, x, y, z):
# Only evaluate special cases where mu simplifies.
value = cls._value(x,y,z)
if value.is_number or r_squared__.eval(x,y) is not None or z.is_number:
return value
# NOTE: This function intentionally does NOT return if there is no simplification.
class K__(FancyFunction):
@classmethod
def _name (cls):
return 'K'
@classmethod
def _value (cls, x, y, z, p_x, p_y, p_z):
"""Return the expression that defines this function."""
return (P_x__(x,y,z,p_x,p_y,p_z)**2 + P_y__(x,y,z,p_x,p_y,p_z)**2)/2
@classmethod
def eval (cls, x, y, z, p_x, p_y, p_z):
# Only evaluate special cases where mu simplifies.
value = cls._value(x,y,z,p_x,p_y,p_z)
#if value.is_number or P_x__.eval(x,y,z,p_x,p_y,p_z) is not None or P_y__.eval(x,y,z,p_x,p_y,p_z) is not None:
#return value
## NOTE: This function intentionally does NOT return if there is no simplification.
# Always evaluate
return value
class U__(FancyFunction):
@classmethod
def _name (cls):
return 'U'
@classmethod
def _value (cls, x, y, z, p_x, p_y, p_z):
"""Return the expression that defines this function."""
return -1 / (8*sp.pi*sp.sqrt(mu__(x,y,z)))
@classmethod
def eval (cls, x, y, z, p_x, p_y, p_z):
# Only evaluate special cases where mu simplifies.
value = cls._value(x,y,z,p_x,p_y,p_z)
#if value.is_number or mu__.eval(x,y,z) is not None:
#return value
## NOTE: This function intentionally does NOT return if there is no simplification.
# Always evaluate
return value
class H__(FancyFunction):
@classmethod
def _name (cls):
return 'H'
@classmethod
def _value (cls, x, y, z, p_x, p_y, p_z):
"""Return the expression that defines this function."""
return K__(x,y,z,p_x,p_y,p_z) + U__(x,y,z,p_x,p_y,p_z)
@classmethod
def eval (cls, x, y, z, p_x, p_y, p_z):
# Only evaluate special cases where mu simplifies.
value = cls._value(x,y,z,p_x,p_y,p_z)
#if value.is_number or K__.eval(x,y,z,p_x,p_y,p_z) is not None or U__.eval(x,y,z,p_x,p_y,p_z) is not None:
#return value
## NOTE: This function intentionally does NOT return if there is no simplification.
# Always evaluate
return value
class J__(FancyFunction):
@classmethod
def _name (cls):
return 'J'
@classmethod
def _value (cls, x, y, z, p_x, p_y, p_z):
"""Return the expression that defines this function."""
return x*p_x + y*p_y + 2*z*p_z
@classmethod
def eval (cls, x, y, z, p_x, p_y, p_z):
# Only evaluate special cases where mu simplifies.
value = cls._value(x,y,z,p_x,p_y,p_z)
#if value.is_number or any(v.is_number for v in (x,y,z,p_x,p_y,p_z)):
#return value
## NOTE: This function intentionally does NOT return if there is no simplification.
# Always evaluate
return value
def P_x__test ():
# TODO: Deprecate this, it's just to test how subclassing sp.Function works.
qp = phase_space_coordinates()
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
#P_x_ = P_x__(qp)
#P_x_ = P_x__(x,y,z,p_x,p_y,p_z)
P_x_ = P_x__(*qp.reshape(-1).tolist())
print(f'P_x_ = {P_x_}')
print(f'P_x__(x,y,z,p_x,p_y,p_z) = {P_x__(x,y,z,p_x,p_y,p_z)}')
print(f'P_x__(x,0,z,p_x,p_y,p_z) = {P_x__(x,0,z,p_x,p_y,p_z)}')
print(f'P_x_.diff(x) = {P_x_.diff(x)}')
print(f'P_x_.diff(y) = {P_x_.diff(y)}')
print(f'P_x_.diff(z) = {P_x_.diff(z)}')
print(f'P_x_.diff(p_x) = {P_x_.diff(p_x)}')
print(f'P_x_.diff(p_y) = {P_x_.diff(p_y)}')
print(f'P_x_.diff(p_z) = {P_x_.diff(p_z)}')
print(f'P_x_.diff(qp) = {P_x_.diff(qp)}')
mu_ = mu__(*qp.reshape(-1).tolist()[:3])
print(f'mu_ = {mu_}, mu_.func = {mu_.func}')
print(f'mu__(x,y,0) = {mu__(x,y,0)}')
print(f'mu__(x,0,z) = {mu__(x,0,z)}')
K = (P_x__(*qp.reshape(-1).tolist())**2 + P_y__(*qp.reshape(-1).tolist())**2)/2
print(f'K = {K}')
U = -1 / (8*sp.pi*sp.sqrt(mu_))
print(f'U = {U}')
#H = K + U
H = H__(*qp.reshape(-1).tolist())
print(f'H = {H}')
H_diff = H.diff(qp)
print(f'H.diff(qp) = {H_diff}, type(H.diff(qp)) = {type(H_diff)}')
dH = vorpy.symbolic.differential(H, qp)
print(f'dH = {dH}')
print(f'symplectic gradient of H = {vorpy.symplectic.symplectic_gradient_of(H, qp)}')
def K (qp):
return (P_x(qp)**2 + P_y(qp)**2)/2
def r_squared (qp):
x,y,z = qp[0,:]
return x**2 + y**2
def mu (qp):
x,y,z = qp[0,:]
beta = sp.Integer(16)
return r_squared(qp)**2 + beta*z**2
def U (qp):
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
alpha = 1 / (8*sp.pi)
return -alpha * mu(qp)**sp.Rational(-1,2)
def H (qp):
"""H is the Hamiltonian for the system."""
return K(qp) + U(qp)
def H__conservation_test ():
"""
This test verifies that H is conserved along the flow of H (just a sanity check, this fact
is easily provable in general).
"""
qp = phase_space_coordinates()
#X_H = vorpy.symplectic.symplectic_gradient_of(H(qp), qp)
H_qp = H__(*qp.reshape(-1).tolist())
X_H = vorpy.symplectic.symplectic_gradient_of(H_qp, qp)
# Sanity check
X_H__H = vorpy.manifold.apply_vector_field_to_function(X_H, H_qp, qp)
if X_H__H != 0:
raise FancyException(f'Expected X_H(H) == 0 but instead got {X_H__H}')
print('H__conservation_test passed')
def p_theta (qp):
"""p_theta is the angular momentum for the system and is conserved along solutions."""
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
return x*p_y - y*p_x
def p_theta__conservation_test ():
"""
This test verifies that p_theta is conserved along the flow of H.
"""
qp = phase_space_coordinates()
X_H = vorpy.symplectic.symplectic_gradient_of(H(qp), qp)
# Sanity check
X_H__p_theta = vorpy.manifold.apply_vector_field_to_function(X_H, p_theta(qp), qp)
if X_H__p_theta != 0:
raise FancyException(f'Expected X_H(p_theta) == 0 but instead got {X_H__p_theta}')
print('p_theta__conservation_test passed')
def J (X):
"""J can be thought of as "dilational momentum" for the system, and is conserved along solutions when H = 0."""
x,y,z = X[0,:]
p_x,p_y,p_z = X[1,:]
return x*p_x + y*p_y + 2*z*p_z
def J__restricted_conservation_test ():
"""This test verifies that J is conserved along the flow of H if restricted to the H = 0 submanifold."""
qp = phase_space_coordinates()
H_qp = H(qp)
X_H = vorpy.symplectic.symplectic_gradient_of(H_qp, qp)
J_qp = J(qp)
X_H__J = vorpy.manifold.apply_vector_field_to_function(X_H, J_qp, qp)
p_z = qp[1,2]
# Solve for p_z in H_qp == 0; there are two sheets to this solution.
p_z_solution_v = sp.solve(H_qp, p_z)
assert len(p_z_solution_v) == 2, f'Expected 2 solutions for p_z in H == 0, but instead got {len(p_z_solution_v)}'
#print('There are {0} solutions for the equation: {1} = 0'.format(len(p_z_solution_v), H_qp))
#for i,p_z_solution in enumerate(p_z_solution_v):
#print(' solution {0}: p_z = {1}'.format(i, p_z_solution))
for solution_index,p_z_solution in enumerate(p_z_solution_v):
# We have to copy X_H__J or it will only be a view into X_H__J and will modify the original.
# The [tuple()] access is to obtain the scalar value out of the
# sympy.tensor.array.dense_ndim_array.ImmutableDenseNDimArray object that doit() returns.
X_H__J__restricted = sp.Subs(np.copy(X_H__J), p_z, p_z_solution).doit()[tuple()].simplify()
#print(f'solution_index = {solution_index}, X_H__J__restricted = {X_H__J__restricted}')
if X_H__J__restricted != 0:
raise FancyException(f'Expected X_H__J__restricted == 0 for solution_index = {solution_index}, but actual value was {X_H__J__restricted}')
print('J__restricted_conservation_test passed')
def J__test ():
"""This test verifies that dJ/dt = 2*H."""
qp = phase_space_coordinates()
qp_ = qp.reshape(-1).tolist()
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
P_x_ = P_x__(*qp_)
P_y_ = P_y__(*qp_)
mu_ = mu__(x,y,z)
r_squared_ = r_squared__(x,y)
H_qp = H__(*qp_)
X_H = vorpy.symplectic.symplectic_gradient_of(H_qp, qp)
J_qp = J__(*qp_)
# Because X_H gives the vector field defining the time derivative of a solution to the dynamics,
# it follows that X_H applied to J is equal to dJ/dt (where J(t) is J(qp(t)), where qp(t) is a
# solution to Hamilton's equations).
X_H__J = vorpy.manifold.apply_vector_field_to_function(X_H, J_qp, qp)
#print(f'J__test; X_H__J = {X_H__J}')
#print(f'J__test; 2*H = {sp.expand(2*H_qp)}')
actual_value = X_H__J - sp.expand(2*H_qp)
#print(f'J__test; X_H__J - 2*H = {actual_value}')
# Annoyingly, this doesn't simplify to 0 automatically, so some manual manipulation has to be done.
# Manipulate the expression to ensure the P_x and P_y terms cancel
actual_value = sp.collect(actual_value, [P_x_, P_y_])
#print(f'J__test; after collect P_x, P_y: X_H__J - 2*H = {actual_value}')
actual_value = sp.Subs(actual_value, [P_x_, P_y_], [P_x_._expanded(), P_y_._expanded()]).doit()
#print(f'J__test; after subs P_x, P_y: X_H__J - 2*H = {actual_value}')
# Manipulate the expression to ensure the mu terms cancel
actual_value = sp.factor_terms(actual_value, clear=True, fraction=True)
#print(f'J__test; after factor_terms: X_H__J - 2*H = {actual_value}')
actual_value = sp.collect(actual_value, [r_squared_])
#print(f'J__test; after collect r_squared_: X_H__J - 2*H = {actual_value}')
actual_value = sp.Subs(actual_value, [r_squared_._expanded()], [r_squared_]).doit()
#print(f'J__test; after subs r_squared: X_H__J - 2*H = {actual_value}')
actual_value = sp.Subs(actual_value, [mu_._expanded()], [mu_]).doit()
#print(f'J__test; after subs mu: X_H__J - 2*H = {actual_value}')
if actual_value != 0:
raise FancyException(f'Expected X_H__J - 2*H == 0, but actual value was {actual_value}')
print('J__test passed')
def A (qp):
"""
A is the standard contact form in R^3, taken as a differential form in T*(R^3), then V is its symplectic dual.
A = dz + y/2 * dx - x/2 * dy
"""
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
return np.array((
( y/2, -x/2, sp.Integer(1)),
(sp.Integer(0), sp.Integer(0), sp.Integer(0)),
))
def V (qp):
"""
If A is the standard contact form in R^3, taken as a differential form in T*(R^3), then V is its symplectic dual.
A = dz + y/2 * dx - x/2 * dy
V = del_{p_z} + y/2 * del_{p_x} - x/2 * del_{p_y}
"""
return vorpy.symplectic.symplectic_dual_of_covector_field(A(qp))
def V__test ():
qp = phase_space_coordinates()
x,y,z = qp[0,:]
p_x,p_y,p_z = qp[1,:]
expected_value = np.array((
(sp.Integer(0), sp.Integer(0), sp.Integer(0)),
( -y/2, x/2, -1),
))
actual_value = V(qp)
error = actual_value - expected_value
if not np.all(error == 0):
raise FancyException(f'Expected V = {expected_value} but it was actually {actual_value}')
print('V__test passed')
def lie_bracket_of__X_H__V__test ():
qp = phase_space_coordinates()
#print(f'H = {H(qp)}')
#print(f'X_H = {vorpy.symplectic.symplectic_gradient_of(H(qp), qp)}')
#print(f'A = {A(qp)}')
#print(f'V = {V(qp)}')
lb__X_H__V = vorpy.manifold.lie_bracket(vorpy.symplectic.symplectic_gradient_of(H(qp), qp), V(qp), qp)
#print(f'[X_H,V] = {lb__X_H__V}')
# NOTE: This has sign opposite from Corey's Weinstein Note PDF file (he has a sign
# error in computing the symplectic dual of V).
expected__lb__X_H__V = np.array((
(sp.Integer(0), sp.Integer(0), sp.Integer(0)),
( -P_y(qp), P_x(qp), sp.Integer(0)),
))
#print(f'expected value = {expected__lb__X_H__V}')
#print(f'[X_H,V] - expected_value = {lb__X_H__V - expected__lb__X_H__V}')
if not np.all(lb__X_H__V == expected__lb__X_H__V):
raise FancyException(f'Expected [X_H,V] = {expected__lb__X_H__V} but it was actually {lb__X_H__V}')
print('lie_bracket_of__X_H__V__test passed')
"""
Design notes for integral-of-motion search.
Assume F(qp) is a polynomial of degree n for a given n and that F is an integral of motion.
F being an integral of motion is defined by it being conserved along solutions to the system,
and therefore is equivalent to X_H(F) = 0. Thus, represent F as a generic symbolic polynomial,
and then attempt to solve for its coefficients within X_H(F) = 0. Because angular momentum
is a conserved polynomial (in this case of degree 2), this method should find angular momentum).
Additionally, if we restrict the search to the H = 0 manifold, this condition can be applied
after evaluating X_H(F) and before attempting to solve for the coefficients of F. Because J
is conserved in this case (only when H = 0), this method should find J.
"""
#def tensor_power (V, p):
#"""
#Returns the pth tensor power of vector V. This should be a tensor having order p,
#which looks like V \otimes ... \otimes V (with p factors). If p is zero, then this
#returns 1.
#TODO: Implement this for tensors of arbitrary order (especially including 0-tensors).
#"""
#V_order = vorpy.tensor.order(V)
#if V_order != 1:
#raise FancyException(f'Expected V to be a vector (i.e. a 1-tensor), but it was actually a {V_order}-tensor')
#if p < 0:
#raise FancyException(f'Expected p to be a nonnegative integer, but it was actually {p}')
#if p == 0:
#return np.array(1) # TODO: Should this be an actual scalar?
#elif p == 1:
#return V
#else:
#assert len(V.shape) == 1 # This should be equivalent to V_order == 1.
#V_dim = V.shape[0]
#V_to_the_p_minus_1 = vorpy.tensor.tensor_power_of_vector(V, p-1)
#retval_shape = (V_dim,)*p
#return np.outer(V, V_to_the_p_minus_1.reshape(-1)).reshape(*retval_shape)
def tensor_power__test ():
V = np.array((sp.var('x'), sp.var('y'), sp.var('z')))
#print(f'V = {V}')
#for p in range(5):
#print(f'vorpy.tensor.tensor_power_of_vector(V, {p}):')
#print(f'{vorpy.tensor.tensor_power_of_vector(V, p)}')
#print()
# Specific comparisons
power = 0
expected_value = 1
actual_value = vorpy.tensor.tensor_power_of_vector(V, power)
if not np.all(expected_value == actual_value):
raise FancyException(f'For power {power}, expected {expected_value} but actual value was {actual_value}')
power = 1
expected_value = V
actual_value = vorpy.tensor.tensor_power_of_vector(V, power)
if not np.all(expected_value == actual_value):
raise FancyException(f'For power {power}, expected {expected_value} but actual value was {actual_value}')
power = 2
expected_value = vorpy.tensor.contract('i,j', V, V, dtype=object)
actual_value = vorpy.tensor.tensor_power_of_vector(V, power)
if not np.all(expected_value == actual_value):
raise FancyException(f'For power {power}, expected {expected_value} but actual value was {actual_value}')
power = 3
expected_value = vorpy.tensor.contract('i,j,k', V, V, V, dtype=object)
actual_value = vorpy.tensor.tensor_power_of_vector(V, power)
if not np.all(expected_value == actual_value):
raise FancyException(f'For power {power}, expected {expected_value} but actual value was {actual_value}')
power = 4
expected_value = vorpy.tensor.contract('i,j,k,l', V, V, V, V, dtype=object)
actual_value = vorpy.tensor.tensor_power_of_vector(V, power)
if not np.all(expected_value == actual_value):
raise FancyException(f'For power {power}, expected {expected_value} but actual value was {actual_value}')
print('tensor_power__test passed')
# TODO: Write symbolic_homogeneous_polynomial function and use it to implement symbolic_polynomial function.
def symbolic_polynomial (coefficient_prefix, degree, X):
"""
Returns a generic polynomial of the given degree with symbolic coefficients,
as well as a list of the coefficients. X is the coordinates to express the
polynomial in. Each polynomial term does not include multiplicity (e.g.
the `x*y` term would appear as `a_0_1*x*y`, not as `2*a_0_1*x*y`).
The return value is polynomial, coefficients.
"""
# TODO: Allow specification of which degrees should be present in this polynomial
X_reshaped = X.reshape(-1)
coefficient_accumulator = []
polynomial_accumulator = sp.Integer(0)
# TEMP HACK: Omitting the 0-degree term for now.
for p in range(1, degree+1):
degree_shape = (X_reshaped.size,)*p
degree_p_coefficients = vorpy.symbolic.tensor(coefficient_prefix, degree_shape)
# TODO: Have to encode the symmetries in the coefficients -- in particular, could replace any
# coefficient with non-strictly-increasing indices with the corresponding one that has
# strictly increasing indices.
for I in vorpy.tensor.multiindex_iterator(degree_shape):
# Replace the non-strictly-increasing-indexed coefficients with 0, and store the rest for return.
if I != tuple(sorted(I)):
degree_p_coefficients[I] = 0
else:
coefficient_accumulator.append(degree_p_coefficients[I])
degree_p_variable_tensor = vorpy.tensor.tensor_power_of_vector(X_reshaped, p)
# Because of the sparsification done above, multiplying it out this way is somewhat inefficient, but it's fine for now.
polynomial_accumulator += np.dot(degree_p_coefficients.reshape(-1), degree_p_variable_tensor.reshape(-1))
return polynomial_accumulator, coefficient_accumulator
def symbolic_polynomial__test ():
X = np.array((sp.var('x'), sp.var('y'), sp.var('z')))
print(f'vorpy.symbolic.symbolic_polynomial("a", 2, {X}) = {vorpy.symbolic.symbolic_polynomial("a", 2, X)}')
# TODO: actually do a check.
def collect_by_linear_factors (expr, linear_factor_v, *, term_procedure=None, sanity_check=False):
#expr_order = vorpy.tensor.order(expr)
#if expr_order != 0:
#raise FancyException(f'Expected expr to have tensor order 0, but it was actually {expr_order}')
print(f'collect_by_linear_factors; expr = {expr}, linear_factor_v = {linear_factor_v}')
dexpr = vorpy.symbolic.differential(expr, linear_factor_v)
print(f'collect_by_linear_factors; dexpr = {dexpr}')
if term_procedure is not None:
dexpr_reshaped = dexpr.reshape(-1) # This is just a view into dexpr.
for i in range(dexpr_reshaped.size):
dexpr_reshaped[i] = term_procedure(dexpr_reshaped[i])
print(f'collect_by_linear_factors; after term_procedure: dexpr = {dexpr}')
if sanity_check:
ddexpr = vorpy.symbolic.differential(dexpr, linear_factor_v)
if not np.all(ddexpr == 0):
raise FancyException(f'Factors did not occur linearly in expr; expected Hessian of expr to be 0.')
return np.dot(dexpr, linear_factor_v)
def find_integral_of_motion (highest_degree_polynomial):
"""
TODO: Could potentially use
https://www.sciencedirect.com/science/article/pii/S0747717185800146
https://www.sciencedirect.com/science/article/pii/S0747717185800146/pdf?md5=2523d9cdea9c529ac03075da71605760&pid=1-s2.0-S0747717185800146-main.pdf
to deal with a larger class of functions (ones involving radicals).
"""
qp = phase_space_coordinates()
H_qp = H(qp)
X_H = vorpy.symplectic.symplectic_gradient_of(H_qp, qp)
qp_reshaped = qp.reshape(-1)
for degree in range(1, highest_degree_polynomial+1):
print(f'degree = {degree}')
F, F_coefficients = vorpy.symbolic.symbolic_polynomial('F', degree, qp)
print(f'F = {F}')
print(f'len(F_coefficients) = {len(F_coefficients)}')
print(f'F_coefficients = {F_coefficients}')
X_H__F = vorpy.manifold.apply_vector_field_to_function(X_H, F, qp)
print(f'\nOriginal expression')
print(f'X_H__F = {X_H__F}')
X_H__F = sp.fraction(sp.factor_terms(X_H__F, clear=True))[0]
print(f'\nAfter taking top of fraction:')
print(f'X_H__F = {X_H__F}')
X_H__F = collect_by_linear_factors(X_H__F, np.array(F_coefficients), term_procedure=lambda e:sp.simplify(e), sanity_check=True)
print(f'\nAfter collect_by_linear_factors:')
print(f'X_H__F = {X_H__F}')
#X_H__F = sp.expand_mul(X_H__F)
#print(f'\nAfter expand_mul:')
#print(f'X_H__F = {X_H__F}')
#X_H__F = sp.collect(X_H__F, F_coefficients)
#print(f'\nAfter collecting by coefficients:')
#print(f'X_H__F = {X_H__F}')
#X_H__F = sp.simplify(X_H__F)
#print(f'\nAfter simplifying:')
#print(f'X_H__F = {X_H__F}')
# Look for integrals of motion restricted to H = 0 submanifold.
if True:
p_z = qp[1,2]
# Solve for p_z in H_qp == 0; there are two sheets to this solution.
p_z_solution_v = sp.solve(H_qp, p_z)
assert len(p_z_solution_v) == 2, f'Expected 2 solutions for p_z in H == 0, but instead got {len(p_z_solution_v)}'
#print('There are {0} solutions for the equation: {1} = 0'.format(len(p_z_solution_v), H_qp))
#for i,p_z_solution in enumerate(p_z_solution_v):
#print(' solution {0}: p_z = {1}'.format(i, p_z_solution))
#for solution_index,p_z_solution in enumerate(p_z_solution_v):
if True:
# TEMP HACK: Just use sheet 0 of the solution for now.
solution_index = 0
p_z_solution = p_z_solution_v[0]
X_H__F = sp.Subs(X_H__F, p_z, p_z_solution).doit().simplify()
print(f'X_H__F restricted to H = 0: {X_H__F}')
coefficient_to_solve_for_v = copy.deepcopy(F_coefficients)
total_substitution_v = []
# TODO: using multiindex_iterator is just a cheap way to get nice test tuples
# of the form (0,0,0,0,0,0), (0,0,0,0,0,1), etc. Should really define the range
# of each test component and use itertools.product. This plugging in of specific
# values and then solving for coefficients is a dumb but effective way of avoiding
# having to determine a linearly independent set of nonlinear functions of the
# qp variables.
#
# TODO: Ideally, we would use test values that are not invariant under the symmetries
# of the problem. Though determining this is relatively hard.
for qp_substitutions in vorpy.tensor.multiindex_iterator((2,)*qp_reshaped.size):
qp_substitutions = np.array(qp_substitutions)
# Skip (x,y,z) == 0.
if np.all(qp_substitutions[0:3] == 0):
print(f'Skipping qp_substitutions = {qp_substitutions} to avoid division by zero')
continue
# We have to copy X_H__F or it will only be a view into X_H__F and will modify the original.
# The [tuple()] access is to obtain the scalar value out of the
# sympy.tensor.array.dense_ndim_array.ImmutableDenseNDimArray object that doit() returns.
#particular_expression = sp.Subs(np.copy(X_H__F), qp_reshaped, qp_substitutions).doit()[tuple()].simplify()
particular_expression = sp.collect(sp.Subs(np.copy(X_H__F), qp_reshaped, qp_substitutions).doit()[tuple()].simplify(), coefficient_to_solve_for_v)
print(f'len(coefficient_to_solve_for_v) = {len(coefficient_to_solve_for_v)}')
print(f'coefficient_to_solve_for_v = {coefficient_to_solve_for_v}')
print(f'running solver on expression: {particular_expression}')
solutions = sp.solve(particular_expression, coefficient_to_solve_for_v)
print(f'qp_substitutions = {qp_substitutions}')
print(f'solutions = {solutions}')
# TODO: Any time a particular solution is found for any coefficient, replace that coefficient
# with that solution and record that replacement. This will narrow down the search field.
# If there are multiple solutions, this necessarily forces the solution search to branch,
# which will require a different search function design.
# TEMP HACK: For now, assume there will be no branching, so just use the first solution, if any.
if len(solutions) > 0:
if len(solutions) > 1:
print(f'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
print(f'WARNING: There was more than one solution -- branching search function is needed.')
print(f'!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
substitution_var_v = []
substitution_value_v = []
first_solution = solutions[0]
# Make sure that we can call zip on first_solution with coefficient_to_solve_for_v
if not isinstance(first_solution, tuple):
first_solution = (first_solution,)
# Substitute the first solution
for var,value in zip(coefficient_to_solve_for_v, first_solution):
if var != 0 and var != value and value != sp.nan:
total_substitution_v.append([var, value])
substitution_var_v.append(var)
substitution_value_v.append(value)
assert len(substitution_var_v) == len(substitution_value_v)
there_was_a_substitution = len(substitution_var_v) > 0
if there_was_a_substitution:
print(f'Substituting:')
for var, value in zip(substitution_var_v, substitution_value_v):
print(f' {var} |-> {value}')
# Substitute the solutions into the conservation law expression
#X_H__F = sp.Subs(X_H__F, substitution_var_v, substitution_value_v).doit().simplify()
X_H__F = sp.collect(sp.Subs(X_H__F, substitution_var_v, substitution_value_v).doit().simplify(), coefficient_to_solve_for_v)
# Substitute the solutions into each of the existing solution substitutions
# so that the substitution expressions don't depend on each other and will
# boil down to constants once all coefficients have been solved for.
for i in range(len(total_substitution_v)):
total_substitution_v[i][1] = sp.Subs(total_substitution_v[i][1], substitution_var_v, substitution_value_v).doit().simplify()
print(f'After substitutions, X_H__F = {X_H__F}')
print()
for substitution_var in substitution_var_v:
coefficient_to_solve_for_v.remove(substitution_var)
print(f'total_substitution_v:')
for var,value in total_substitution_v:
print(f' {var} |-> {value}')
print(f'{len(coefficient_to_solve_for_v)} coefficients still to solve for: {coefficient_to_solve_for_v}')
if X_H__F == 0:
break
print()
#solutions = sp.solve(X_H__F, F_coefficients)
#print(f'solutions = {solutions}')
print()
if __name__ == '__main__':
P_x__test()
if True:
J__test()
if True:
lie_bracket__test()
lie_bracket_of__X_H__V__test()
V__test()
if True:
H__conservation_test()
if True:
p_theta__conservation_test()
J__restricted_conservation_test()
tensor_power__test()
symbolic_polynomial__test()
if False:
find_integral_of_motion(2)
| {
"content_hash": "7de298a467b798350000042ba90feebd",
"timestamp": "",
"source": "github",
"line_count": 819,
"max_line_length": 238,
"avg_line_length": 40.66788766788767,
"alnum_prop": 0.5919176149157834,
"repo_name": "vdods/heisenberg",
"id": "3f94479be45c50ce3e3568b75f96d02442c0fb27",
"size": "33307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "experiments/search_for_integral_of_motion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "420726"
},
{
"name": "Shell",
"bytes": "41295"
}
],
"symlink_target": ""
} |
import eventlet
import os
import socket
import sys
from oslo.config import cfg
from ceilometer.openstack.common import gettextutils
from ceilometer.openstack.common import log
from ceilometer.openstack.common import rpc
cfg.CONF.register_opts([
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address'),
])
CLI_OPTIONS = [
cfg.StrOpt('os-username',
deprecated_group="DEFAULT",
default=os.environ.get('OS_USERNAME', 'ceilometer'),
help='Username to use for openstack service access'),
cfg.StrOpt('os-password',
deprecated_group="DEFAULT",
secret=True,
default=os.environ.get('OS_PASSWORD', 'admin'),
help='Password to use for openstack service access'),
cfg.StrOpt('os-tenant-id',
deprecated_group="DEFAULT",
default=os.environ.get('OS_TENANT_ID', ''),
help='Tenant ID to use for openstack service access'),
cfg.StrOpt('os-tenant-name',
deprecated_group="DEFAULT",
default=os.environ.get('OS_TENANT_NAME', 'admin'),
help='Tenant name to use for openstack service access'),
cfg.StrOpt('os-cacert',
default=os.environ.get('OS_CACERT', None),
help='Certificate chain for SSL validation'),
cfg.StrOpt('os-auth-url',
deprecated_group="DEFAULT",
default=os.environ.get('OS_AUTH_URL',
'http://localhost:5000/v2.0'),
help='Auth URL to use for openstack service access'),
cfg.StrOpt('os-region-name',
deprecated_group="DEFAULT",
default=os.environ.get('OS_REGION_NAME', None),
help='Region name to use for openstack service endpoints'),
cfg.StrOpt('os-endpoint-type',
default=os.environ.get('OS_ENDPOINT_TYPE', 'publicURL'),
help='Type of endpoint in Identity service catalog to use for '
'communication with OpenStack services.'),
]
cfg.CONF.register_cli_opts(CLI_OPTIONS, group="service_credentials")
def prepare_service(argv=None):
eventlet.monkey_patch()
gettextutils.install('ceilometer', lazy=False)
rpc.set_defaults(control_exchange='ceilometer')
cfg.set_defaults(log.log_opts,
default_log_levels=['amqplib=WARN',
'qpid.messaging=INFO',
'sqlalchemy=WARN',
'keystoneclient=INFO',
'stevedore=INFO',
'eventlet.wsgi.server=WARN'
])
if argv is None:
argv = sys.argv
cfg.CONF(argv[1:], project='ceilometer')
log.setup('ceilometer')
| {
"content_hash": "e13918bd7355d97775cff6ea4bef1db3",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 78,
"avg_line_length": 42.0921052631579,
"alnum_prop": 0.5573616755236012,
"repo_name": "citrix-openstack-build/ceilometer",
"id": "43754760e101773a98a9851e5e91feb5dd5c3d3c",
"size": "3898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceilometer/service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6284"
},
{
"name": "JavaScript",
"bytes": "304636"
},
{
"name": "Python",
"bytes": "1776303"
},
{
"name": "Shell",
"bytes": "1322"
}
],
"symlink_target": ""
} |
"""
Mirror all web pages.
Useful if you are living down under.
"""
from mitmproxy import http
def response(flow: http.HTTPFlow) -> None:
reflector = b"<style>body {transform: scaleX(-1);}</style></head>"
flow.response.content = flow.response.content.replace(b"</head>", reflector)
| {
"content_hash": "f388818d39ee276713bfdbb0115637a3",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 80,
"avg_line_length": 26.545454545454547,
"alnum_prop": 0.6952054794520548,
"repo_name": "vhaupert/mitmproxy",
"id": "7ee1821d195e12cd28f4c780ce677bf269e17309",
"size": "292",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/addons/internet_in_mirror.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "186961"
},
{
"name": "HTML",
"bytes": "3034"
},
{
"name": "JavaScript",
"bytes": "2254958"
},
{
"name": "PowerShell",
"bytes": "362"
},
{
"name": "Python",
"bytes": "1312583"
},
{
"name": "Shell",
"bytes": "3726"
}
],
"symlink_target": ""
} |
'''
Copyright (C) 2016 Turi
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
'''
import unittest
import pandas
import array
from .. import SFrame
from pandas.util.testing import assert_frame_equal
from sys import version_info
class DataFrameTest(unittest.TestCase):
def test_empty(self):
expected = pandas.DataFrame()
assert_frame_equal(SFrame(expected).to_dataframe(), expected)
expected['int'] = []
expected['float'] = []
expected['str'] = []
assert_frame_equal(SFrame(expected).to_dataframe(), expected)
def test_simple_dataframe(self):
expected = pandas.DataFrame()
expected['int'] = [i for i in range(10)]
expected['float'] = [float(i) for i in range(10)]
expected['str'] = [str(i) for i in range(10)]
if version_info.major == 2:
expected['unicode'] = [unicode(i) for i in range(10)]
expected['array'] = [array.array('d', [i]) for i in range(10)]
expected['ls'] = [[str(i)] for i in range(10)]
assert_frame_equal(SFrame(expected).to_dataframe(), expected)
def test_sparse_dataframe(self):
expected = pandas.DataFrame()
expected['sparse_int'] = [i if i % 2 == 0 else None for i in range(10)]
expected['sparse_float'] = [float(i) if i % 2 == 1 else None for i in range(10)]
expected['sparse_str'] = [str(i) if i % 3 == 0 else None for i in range(10)]
expected['sparse_array'] = [array.array('d', [i]) if i % 5 == 0 else None for i in range(10)]
expected['sparse_list'] = [[str(i)] if i % 7 == 0 else None for i in range(10)]
assert_frame_equal(SFrame(expected).to_dataframe(), expected)
| {
"content_hash": "1c2b8fd66c99634c5f2111fd3b77a2c5",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 101,
"avg_line_length": 42.26190476190476,
"alnum_prop": 0.6214084507042253,
"repo_name": "TobyRoseman/SFrame",
"id": "2c63162caa430e82431a391568580121818bc4cf",
"size": "1775",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "oss_src/unity/python/sframe/test/test_dataframe.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "142942"
},
{
"name": "C++",
"bytes": "11674775"
},
{
"name": "CMake",
"bytes": "104941"
},
{
"name": "CSS",
"bytes": "127000"
},
{
"name": "HTML",
"bytes": "24407"
},
{
"name": "JavaScript",
"bytes": "20909"
},
{
"name": "Makefile",
"bytes": "9614"
},
{
"name": "Perl",
"bytes": "9663"
},
{
"name": "Python",
"bytes": "2225333"
},
{
"name": "R",
"bytes": "537"
},
{
"name": "Scala",
"bytes": "5232"
},
{
"name": "Shell",
"bytes": "53145"
},
{
"name": "Smarty",
"bytes": "966"
},
{
"name": "XSLT",
"bytes": "74068"
}
],
"symlink_target": ""
} |
from praw.exceptions import ClientException, PRAWException
from praw.models import Comment, Submission
from prawcore import BadRequest
import mock
import pytest
from ... import IntegrationTest
class TestComment(IntegrationTest):
def test_attributes(self):
with self.recorder.use_cassette("TestComment.test_attributes"):
comment = Comment(self.reddit, "cklhv0f")
assert comment.author == "bboe"
assert comment.body.startswith("Yes it does.")
assert not comment.is_root
assert comment.submission == "2gmzqe"
@mock.patch("time.sleep", return_value=None)
def test_block(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_block"):
comment = None
for item in self.reddit.inbox.submission_replies():
if item.author and item.author != pytest.placeholders.username:
comment = item
break
else:
assert False, "no comment found"
comment.block()
def test_clear_vote(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_clear_vote"):
Comment(self.reddit, "d1680wu").clear_vote()
@mock.patch("time.sleep", return_value=None)
def test_delete(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_delete"):
comment = Comment(self.reddit, "d1616q2")
comment.delete()
assert comment.author is None
assert comment.body == "[deleted]"
def test_disable_inbox_replies(self):
self.reddit.read_only = False
comment = Comment(self.reddit, "dcc9snh")
with self.recorder.use_cassette(
"TestComment.test_disable_inbox_replies"
):
comment.disable_inbox_replies()
def test_downvote(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_downvote"):
Comment(self.reddit, "d1680wu").downvote()
@mock.patch("time.sleep", return_value=None)
def test_edit(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_edit"):
comment = Comment(self.reddit, "d1616q2")
comment.edit("New text")
assert comment.body == "New text"
def test_enable_inbox_replies(self):
self.reddit.read_only = False
comment = Comment(self.reddit, "dcc9snh")
with self.recorder.use_cassette(
"TestComment.test_enable_inbox_replies"
):
comment.enable_inbox_replies()
def test_gild__no_creddits(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_gild__no_creddits"):
with pytest.raises(BadRequest) as excinfo:
Comment(self.reddit, "d1616q2").gild()
reason = excinfo.value.response.json()["reason"]
assert "INSUFFICIENT_CREDDITS" == reason
def test_invalid(self):
with self.recorder.use_cassette("TestComment.test_invalid"):
with pytest.raises(PRAWException) as excinfo:
Comment(self.reddit, "0").body
assert excinfo.value.args[0].startswith(
"No data returned for comment"
)
@mock.patch("time.sleep", return_value=None)
def test_mark_read(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_mark_read"):
comment = next(self.reddit.inbox.unread())
assert isinstance(comment, Comment)
comment.mark_read()
@mock.patch("time.sleep", return_value=None)
def test_mark_unread(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_mark_unread"):
comment = next(self.reddit.inbox.comment_replies())
comment.mark_unread()
def test_parent__comment(self):
comment = Comment(self.reddit, "cklhv0f")
with self.recorder.use_cassette("TestComment.test_parent__comment"):
parent = comment.parent()
parent.refresh()
assert comment in parent.replies
assert isinstance(parent, Comment)
assert parent.fullname == comment.parent_id
def test_parent__chain(self):
comment = Comment(self.reddit, "dkk4qjd")
counter = 0
with self.recorder.use_cassette("TestComment.test_parent__chain"):
comment.refresh()
parent = comment.parent()
while parent != comment.submission:
if counter % 9 == 0:
parent.refresh()
counter += 1
parent = parent.parent()
def test_parent__comment_from_forest(self):
submission = self.reddit.submission("2gmzqe")
with self.recorder.use_cassette(
"TestComment.test_parent__comment_from_forest"
):
comment = submission.comments[0].replies[0]
parent = comment.parent()
assert comment in parent.replies
assert isinstance(parent, Comment)
assert parent.fullname == comment.parent_id
@mock.patch("time.sleep", return_value=None)
def test_parent__from_replies(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.parent__from_replies"):
comment = next(self.reddit.inbox.comment_replies())
parent = comment.parent()
assert isinstance(parent, Comment)
assert parent.fullname == comment.parent_id
def test_parent__submission(self):
comment = Comment(self.reddit, "cklfmye")
with self.recorder.use_cassette("TestComment.test_parent__submission"):
parent = comment.parent()
assert comment in parent.comments
assert isinstance(parent, Submission)
assert parent.fullname == comment.parent_id
def test_refresh(self):
with self.recorder.use_cassette("TestComment.test_refresh"):
comment = Comment(self.reddit, "d81vwef").refresh()
assert len(comment.replies) > 0
def test_refresh__raises_exception(self):
with self.recorder.use_cassette(
"TestComment.test_refresh__raises_exception"
):
with pytest.raises(ClientException) as excinfo:
Comment(self.reddit, "d81vwef").refresh()
assert (
"This comment does not appear to be in the comment tree",
) == excinfo.value.args
def test_refresh__twice(self):
with self.recorder.use_cassette("TestComment.test_refresh__twice"):
Comment(self.reddit, "d81vwef").refresh().refresh()
def test_refresh__deleted_comment(self):
with self.recorder.use_cassette(
"TestComment.test_refresh__deleted_comment"
):
with pytest.raises(ClientException) as excinfo:
Comment(self.reddit, "d7ltvl0").refresh()
assert (
"This comment does not appear to be in the comment tree",
) == excinfo.value.args
def test_refresh__removed_comment(self):
with self.recorder.use_cassette(
"TestComment.test_refresh__removed_comment"
):
with pytest.raises(ClientException) as excinfo:
Comment(self.reddit, "dma3mi5").refresh()
assert (
"This comment does not appear to be in the comment tree",
) == excinfo.value.args
def test_refresh__with_reply_sort_and_limit(self):
with self.recorder.use_cassette(
"TestComment.test_refresh__with_reply_sort_and_limit"
):
comment = Comment(self.reddit, "e4j4830")
comment.reply_limit = 4
comment.reply_sort = "new"
comment.refresh()
replies = comment.replies
last_created = float("inf")
for reply in replies:
if isinstance(reply, Comment):
if reply.created_utc > last_created:
assert False, "sort order incorrect"
last_created = reply.created_utc
assert len(comment.replies) == 3
def test_reply(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_reply"):
parent_comment = Comment(self.reddit, "d1616q2")
comment = parent_comment.reply("Comment reply")
assert comment.author == self.reddit.config.username
assert comment.body == "Comment reply"
assert not comment.is_root
assert comment.parent_id == parent_comment.fullname
def test_report(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_report"):
Comment(self.reddit, "d0335z3").report("custom")
def test_save(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_save"):
Comment(self.reddit, "d1680wu").save("foo")
def test_unsave(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_unsave"):
Comment(self.reddit, "d1680wu").unsave()
def test_upvote(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestComment.test_upvote"):
Comment(self.reddit, "d1680wu").upvote()
class TestCommentModeration(IntegrationTest):
def test_approve(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestCommentModeration.test_approve"):
Comment(self.reddit, "da2g5y6").mod.approve()
def test_distinguish(self):
self.reddit.read_only = False
with self.recorder.use_cassette(
"TestCommentModeration.test_distinguish"
):
Comment(self.reddit, "da2g5y6").mod.distinguish()
@mock.patch("time.sleep", return_value=None)
def test_distinguish__sticky(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette(
"TestCommentModeration.test_distinguish__sticky"
):
Comment(self.reddit, "da2g5y6").mod.distinguish(sticky=True)
def test_ignore_reports(self):
self.reddit.read_only = False
with self.recorder.use_cassette(
"TestCommentModeration.test_ignore_reports"
):
self.reddit.comment("da2g5y6").mod.ignore_reports()
def test_lock(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestCommentModeration.test_lock"):
Comment(self.reddit, "da2g6ne").mod.lock()
def test_remove(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestCommentModeration.test_remove"):
self.reddit.comment("da2g5y6").mod.remove(spam=True)
def test_unlock(self):
self.reddit.read_only = False
with self.recorder.use_cassette("TestCommentModeration.test_unlock"):
Comment(self.reddit, "da2g6ne").mod.unlock()
@mock.patch("time.sleep", return_value=None)
def test_send_removal_message(self, _):
self.reddit.read_only = False
with self.recorder.use_cassette(
"TestCommentModeration.test_send_removal_message"
):
comment = self.reddit.comment("edu698v")
mod = comment.mod
mod.remove()
message = "message"
res = [
mod.send_removal_message(message, "title", type)
for type in ("public", "private", "private_exposed")
]
assert isinstance(res[0], Comment)
assert res[0].parent_id == "t1_" + comment.id
assert res[0].body == message
assert res[1] is None
assert res[2] is None
def test_undistinguish(self):
self.reddit.read_only = False
with self.recorder.use_cassette(
"TestCommentModeration.test_undistinguish"
):
self.reddit.comment("da2g5y6").mod.undistinguish()
def test_unignore_reports(self):
self.reddit.read_only = False
with self.recorder.use_cassette(
"TestCommentModeration.test_unignore_reports"
):
self.reddit.comment("da2g5y6").mod.unignore_reports()
| {
"content_hash": "1419690b69b845a28d88823d38e63fae",
"timestamp": "",
"source": "github",
"line_count": 317,
"max_line_length": 79,
"avg_line_length": 39.186119873817034,
"alnum_prop": 0.611737240379971,
"repo_name": "leviroth/praw",
"id": "22dcc3bdae79c1429d7a491435054f5b5dbac0c4",
"size": "12422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/models/reddit/test_comment.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "513471"
},
{
"name": "Shell",
"bytes": "189"
}
],
"symlink_target": ""
} |
"""Learned pooling module."""
from typing import Callable
from flax import linen as nn
import jax
from jax import lax
from jax import numpy as jnp
def gaussian(M: int, std: float, sym: bool = True) -> jnp.ndarray: # pylint: disable=invalid-name
"""Returns a Gaussian window.
Port of scipy.signal.windows.gaussian.
Args:
M: Number of points in the output window.
std: The standard deviation, sigma.
sym: Must be `True` (present for compatibility with SciPy' signature).
Returns:
The window, with the maximum value normalized to 1 (though the value 1 does
not appear if M is even).
"""
if not sym:
raise ValueError("Periodic windows not supported")
n = jnp.arange(0, M) - (M - 1.0) / 2.0
sig2 = 2 * std * std
w = jnp.exp(-n**2 / sig2)
return w
def gaussian_init(key: jnp.ndarray,
num_channels: int,
window_size: int,
std: float = 0.4) -> jnp.ndarray:
"""Initializes Gaussian windows.
Args:
key: RNG, unused.
num_channels: The number of windows to calculate.
window_size: The number of steps in the window (which is assumed to range
from -1 to 1).
std: The standard deviation of the Gaussian.
Returns:
A one-tuple containing an array with `num_channels` entries. These represent
the standard deviation scaled by the window size.
"""
del key
return std * 0.5 * (window_size - 1) * jnp.ones((num_channels,)),
class WindowPool(nn.Module):
"""Pools using a window function.
Note that is not a pooling function in the traditional sense, i.e., it does
not use a reduction operator applied to the elements in each window. Instead,
a weighted average is taken over the window. If the weighting is given by a
parametrized window, e.g., a Gaussian, then these parameters are learned. This
allows the model to interpolate between subsampling (a Gaussian with zero
variance) and average pooling (a Gaussian with infinite variance).
When using a Gaussian window, there are a few differences with the
implementation in LEAF[^1]. Firstly, this module by default scales the weights
to sum to unity. This ensure that the energy of the output signal is the same
as the input. Secondly, this module does not perform clipping on the window
parameters. This is expected to be done during optimization.
[^1]: https://github.com/google-research/leaf-audio
Attributes:
window: The window function to use. Should follow the conventions of the
`scipy.signal.windows` functions.
window_size: The size of the pooling window.
window_init: Initializer of the window parameters. It should take as an
argument an RNG key, the number of filters, and the width of the window,
and return a tuple of parameters. Each parameter should have the number of
filters as its first axis.
normalize_window: Whether or not to normalize the window to sum to 1.
stride: The stride to use.
padding: Padding to use.
"""
window: Callable[..., jnp.ndarray]
window_size: int
window_init: Callable[[jnp.ndarray, int, int], jnp.ndarray]
normalize_window: bool = True
stride: int = 1
padding: str = "SAME"
@nn.compact
def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray:
"""Applies the pooling.
Args:
inputs: The input array must be of shape `(batch, time, channels)`. Each
channel will have its own window applied. In the case of a parametrized
window, each channel will have its own parameters.
Returns:
The pooled outputs of shape (batch, time, channels).
"""
num_channels = inputs.shape[-1]
window_params = self.param("window_params", self.window_init, num_channels,
self.window_size)
window_values = jax.vmap(
self.window,
in_axes=(None,) + (0,) * len(window_params))(self.window_size,
*window_params)
if self.normalize_window:
window_values /= jnp.sum(window_values, axis=1, keepdims=True)
window_values = window_values.T[:, jnp.newaxis]
dn = lax.conv_dimension_numbers(inputs.shape, window_values.shape,
("NWC", "WIO", "NWC"))
return lax.conv_general_dilated(
inputs,
window_values, (self.stride,),
self.padding,
dimension_numbers=dn,
feature_group_count=num_channels)
| {
"content_hash": "2f1c84352dd4fade281c3fb785b3fd65",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 98,
"avg_line_length": 36.611570247933884,
"alnum_prop": 0.6670428893905191,
"repo_name": "google-research/chirp",
"id": "dc3e0341b6d9283c16cebb1d2f84d9eb606ec7bc",
"size": "5028",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "chirp/models/pooling.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1066185"
}
],
"symlink_target": ""
} |
import django
from django.db.models import signals
def create_perm(*func_args, **func_kwargs):
"""
Creates a fake content type and permission
to be able to check for permissions
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
if ContentType._meta.installed and Permission._meta.installed:
kwargs = dict(
app_label='constance',
model='config'
)
if django.VERSION < (1, 8):
kwargs['name'] = 'config'
content_type, created = ContentType.objects.get_or_create(**kwargs)
permission, created = Permission.objects.get_or_create(
name='Can change config',
content_type=content_type,
codename='change_config')
if django.VERSION < (1, 7):
signals.post_syncdb.connect(create_perm, dispatch_uid="constance.create_perm")
else:
signals.post_migrate.connect(create_perm, dispatch_uid="constance.create_perm")
| {
"content_hash": "5eb861d6f352ed5424588839652c0308",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 83,
"avg_line_length": 31.6875,
"alnum_prop": 0.6568047337278107,
"repo_name": "trbs/django-constance-trbs",
"id": "abbc9c58f17864b5fc3de150efcb572db32b5e6c",
"size": "1014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "constance/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2708"
},
{
"name": "Python",
"bytes": "39688"
}
],
"symlink_target": ""
} |
import os
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
if os.path.exists('test.db'):
os.remove('test.db')
# tworzymy instancję klasy Engine do obsługi bazy
baza = create_engine('sqlite:///test.db') # ':memory:'
# klasa bazowa
BazaModel = declarative_base()
# klasy Klasa i Uczen opisują rekordy tabel "klasa" i "uczen"
# oraz relacje między nimi
class Klasa(BazaModel):
__tablename__ = 'klasa'
id = Column(Integer, primary_key=True)
nazwa = Column(String(100), nullable=False)
profil = Column(String(100), default='')
uczniowie = relationship('Uczen', backref='klasa')
class Uczen(BazaModel):
__tablename__ = 'uczen'
id = Column(Integer, primary_key=True)
imie = Column(String(100), nullable=False)
nazwisko = Column(String(100), nullable=False)
klasa_id = Column(Integer, ForeignKey('klasa.id'))
# tworzymy tabele
BazaModel.metadata.create_all(baza)
| {
"content_hash": "e1c68965d77eba180d4635fac82d5104",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 73,
"avg_line_length": 29.82857142857143,
"alnum_prop": 0.7164750957854407,
"repo_name": "koduj-z-klasa/python101",
"id": "96cebd619aba7705fc02dc39e45d5e2c0775a248",
"size": "1096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bazy/orm/ormsa02.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2740"
},
{
"name": "HTML",
"bytes": "18056"
},
{
"name": "Python",
"bytes": "157924"
}
],
"symlink_target": ""
} |
import battle
import unittest
import json
FOURUP_HOOK = """{
"href": "https://battleofbits.com/games/four-up/matches/1",
"players": {
"https://battleofbits.com/players/deepblue": "R",
"https://battleofbits.com/players/garry": "B"
},
"turn": "https://battleofbits.com/players/deepblue",
"loser": "",
"winner": "",
"started": "2013-01-01T23:00:01Z",
"finished": "",
"moves": "https://battleofbits.com/games/four-up/matches/1/moves",
"board": [
["R","R","R","R","R","R",""],
["","","","","","",""],
["","","","","","",""],
["","","","","","",""],
["","B","","","","",""],
["","R","B","","","",""]
]
}"""
class BattleTestCase(unittest.TestCase):
def setUp(self):
self.app = battle.app.test_client()
def test_four_selection(self):
headers = [('Content-Type', 'application/json')]
response = self.app.post('/fourup', data=FOURUP_HOOK, headers=headers)
move = json.loads(response.data.decode('utf-8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(move['column'], 6)
def test_invite(self):
headers = [('Content-Type', 'application/json')]
response = self.app.post('/invite', data='{"game": "fourup"}',
headers=headers)
self.assertEqual(response.status_code, 200)
def test_bad_invite(self):
headers = [('Content-Type', 'application/json')]
response = self.app.post('/invite', data='{"game": "checkers"}',
headers=headers)
self.assertEqual(response.status_code, 400)
data = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(data['error'], "I don't know how to play checkers")
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "b1fbebba2604033dbd395c6a29a70e44",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 78,
"avg_line_length": 32.160714285714285,
"alnum_prop": 0.5452526374236535,
"repo_name": "battleofbits/vizzini.py",
"id": "e5708ccc6949f389a30cf3806d5c412bcb8132a9",
"size": "1801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "battle_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2019"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
class OrderFieldsMixin(object):
def order_fields(self):
new_fields = OrderedDict()
for field_name in self.fields_order:
new_fields[field_name] = self.fields[field_name]
self.fields = new_fields
| {
"content_hash": "5285f6b47b37a6b4b102f40aba196137",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 60,
"avg_line_length": 27.3,
"alnum_prop": 0.6666666666666666,
"repo_name": "muraliselva10/cloudkitty-dashboard",
"id": "824996366468d5bbf3fee26f14a21915b3612276",
"size": "880",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudkittydashboard/dashboards/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "13341"
},
{
"name": "JavaScript",
"bytes": "5149"
},
{
"name": "Python",
"bytes": "88524"
}
],
"symlink_target": ""
} |
import random
import time
import docker
import pytest
from ..helpers import (
force_leave_swarm, requires_api_version, requires_experimental
)
from .base import BaseAPIIntegrationTest, TEST_IMG
class ServiceTest(BaseAPIIntegrationTest):
@classmethod
def setup_class(cls):
client = cls.get_client_instance()
force_leave_swarm(client)
cls._init_swarm(client)
@classmethod
def teardown_class(cls):
client = cls.get_client_instance()
force_leave_swarm(client)
def tearDown(self):
for service in self.client.services(filters={'name': 'dockerpytest_'}):
try:
self.client.remove_service(service['ID'])
except docker.errors.APIError:
pass
super().tearDown()
def get_service_name(self):
return f'dockerpytest_{random.getrandbits(64):x}'
def get_service_container(self, service_name, attempts=20, interval=0.5,
include_stopped=False):
# There is some delay between the service's creation and the creation
# of the service's containers. This method deals with the uncertainty
# when trying to retrieve the container associated with a service.
while True:
containers = self.client.containers(
filters={'name': [service_name]}, quiet=True,
all=include_stopped
)
if len(containers) > 0:
return containers[0]
attempts -= 1
if attempts <= 0:
return None
time.sleep(interval)
def create_simple_service(self, name=None, labels=None):
if name:
name = f'dockerpytest_{name}'
else:
name = self.get_service_name()
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
return name, self.client.create_service(
task_tmpl, name=name, labels=labels
)
@requires_api_version('1.24')
def test_list_services(self):
services = self.client.services()
assert isinstance(services, list)
test_services = self.client.services(filters={'name': 'dockerpytest_'})
assert len(test_services) == 0
self.create_simple_service()
test_services = self.client.services(filters={'name': 'dockerpytest_'})
assert len(test_services) == 1
assert 'dockerpytest_' in test_services[0]['Spec']['Name']
@requires_api_version('1.24')
def test_list_services_filter_by_label(self):
test_services = self.client.services(filters={'label': 'test_label'})
assert len(test_services) == 0
self.create_simple_service(labels={'test_label': 'testing'})
test_services = self.client.services(filters={'label': 'test_label'})
assert len(test_services) == 1
assert test_services[0]['Spec']['Labels']['test_label'] == 'testing'
def test_inspect_service_by_id(self):
svc_name, svc_id = self.create_simple_service()
svc_info = self.client.inspect_service(svc_id)
assert 'ID' in svc_info
assert svc_info['ID'] == svc_id['ID']
def test_inspect_service_by_name(self):
svc_name, svc_id = self.create_simple_service()
svc_info = self.client.inspect_service(svc_name)
assert 'ID' in svc_info
assert svc_info['ID'] == svc_id['ID']
@requires_api_version('1.29')
def test_inspect_service_insert_defaults(self):
svc_name, svc_id = self.create_simple_service()
svc_info = self.client.inspect_service(svc_id)
svc_info_defaults = self.client.inspect_service(
svc_id, insert_defaults=True
)
assert svc_info != svc_info_defaults
assert 'RollbackConfig' in svc_info_defaults['Spec']
assert 'RollbackConfig' not in svc_info['Spec']
def test_remove_service_by_id(self):
svc_name, svc_id = self.create_simple_service()
assert self.client.remove_service(svc_id)
test_services = self.client.services(filters={'name': 'dockerpytest_'})
assert len(test_services) == 0
def test_remove_service_by_name(self):
svc_name, svc_id = self.create_simple_service()
assert self.client.remove_service(svc_name)
test_services = self.client.services(filters={'name': 'dockerpytest_'})
assert len(test_services) == 0
def test_create_service_simple(self):
name, svc_id = self.create_simple_service()
assert self.client.inspect_service(svc_id)
services = self.client.services(filters={'name': name})
assert len(services) == 1
assert services[0]['ID'] == svc_id['ID']
@requires_api_version('1.25')
@requires_experimental(until='1.29')
def test_service_logs(self):
name, svc_id = self.create_simple_service()
assert self.get_service_container(name, include_stopped=True)
attempts = 20
while True:
if attempts == 0:
self.fail('No service logs produced by endpoint')
return
logs = self.client.service_logs(svc_id, stdout=True, is_tty=False)
try:
log_line = next(logs)
except StopIteration:
attempts -= 1
time.sleep(0.1)
continue
else:
break
if log_line is not None:
log_line = log_line.decode('utf-8')
assert 'hello\n' in log_line
def test_create_service_custom_log_driver(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello']
)
log_cfg = docker.types.DriverConfig('none')
task_tmpl = docker.types.TaskTemplate(
container_spec, log_driver=log_cfg
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
res_template = svc_info['Spec']['TaskTemplate']
assert 'LogDriver' in res_template
assert 'Name' in res_template['LogDriver']
assert res_template['LogDriver']['Name'] == 'none'
def test_create_service_with_volume_mount(self):
vol_name = self.get_service_name()
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['ls'],
mounts=[
docker.types.Mount(target='/test', source=vol_name)
]
)
self.tmp_volumes.append(vol_name)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
cspec = svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert 'Mounts' in cspec
assert len(cspec['Mounts']) == 1
mount = cspec['Mounts'][0]
assert mount['Target'] == '/test'
assert mount['Source'] == vol_name
assert mount['Type'] == 'volume'
def test_create_service_with_resources_constraints(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
resources = docker.types.Resources(
cpu_limit=4000000, mem_limit=3 * 1024 * 1024 * 1024,
cpu_reservation=3500000, mem_reservation=2 * 1024 * 1024 * 1024
)
task_tmpl = docker.types.TaskTemplate(
container_spec, resources=resources
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
res_template = svc_info['Spec']['TaskTemplate']
assert 'Resources' in res_template
assert res_template['Resources']['Limits'] == resources['Limits']
assert res_template['Resources']['Reservations'] == resources[
'Reservations'
]
def _create_service_with_generic_resources(self, generic_resources):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
resources = docker.types.Resources(
generic_resources=generic_resources
)
task_tmpl = docker.types.TaskTemplate(
container_spec, resources=resources
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
return resources, self.client.inspect_service(svc_id)
@requires_api_version('1.32')
def test_create_service_with_generic_resources(self):
successful = [{
'input': [
{'DiscreteResourceSpec': {'Kind': 'gpu', 'Value': 1}},
{'NamedResourceSpec': {'Kind': 'gpu', 'Value': 'test'}}
]}, {
'input': {'gpu': 2, 'mpi': 'latest'},
'expected': [
{'DiscreteResourceSpec': {'Kind': 'gpu', 'Value': 2}},
{'NamedResourceSpec': {'Kind': 'mpi', 'Value': 'latest'}}
]}
]
for test in successful:
t = test['input']
resrcs, svc_info = self._create_service_with_generic_resources(t)
assert 'TaskTemplate' in svc_info['Spec']
res_template = svc_info['Spec']['TaskTemplate']
assert 'Resources' in res_template
res_reservations = res_template['Resources']['Reservations']
assert res_reservations == resrcs['Reservations']
assert 'GenericResources' in res_reservations
def _key(d, specs=('DiscreteResourceSpec', 'NamedResourceSpec')):
return [d.get(s, {}).get('Kind', '') for s in specs]
actual = res_reservations['GenericResources']
expected = test.get('expected', test['input'])
assert sorted(actual, key=_key) == sorted(expected, key=_key)
@requires_api_version('1.32')
def test_create_service_with_invalid_generic_resources(self):
for test_input in ['1', 1.0, lambda: '1', {1, 2}]:
with pytest.raises(docker.errors.InvalidArgument):
self._create_service_with_generic_resources(test_input)
def test_create_service_with_update_config(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
update_config = docker.types.UpdateConfig(
parallelism=10, delay=5, failure_action='pause'
)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, update_config=update_config, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'UpdateConfig' in svc_info['Spec']
uc = svc_info['Spec']['UpdateConfig']
assert update_config['Parallelism'] == uc['Parallelism']
assert update_config['Delay'] == uc['Delay']
assert update_config['FailureAction'] == uc['FailureAction']
@requires_api_version('1.28')
def test_create_service_with_failure_action_rollback(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
update_config = docker.types.UpdateConfig(failure_action='rollback')
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, update_config=update_config, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'UpdateConfig' in svc_info['Spec']
uc = svc_info['Spec']['UpdateConfig']
assert update_config['FailureAction'] == uc['FailureAction']
@requires_api_version('1.25')
def test_create_service_with_update_config_monitor(self):
container_spec = docker.types.ContainerSpec('busybox', ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
update_config = docker.types.UpdateConfig(
monitor=300000000, max_failure_ratio=0.4
)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, update_config=update_config, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'UpdateConfig' in svc_info['Spec']
uc = svc_info['Spec']['UpdateConfig']
assert update_config['Monitor'] == uc['Monitor']
assert update_config['MaxFailureRatio'] == uc['MaxFailureRatio']
@requires_api_version('1.28')
def test_create_service_with_rollback_config(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
rollback_cfg = docker.types.RollbackConfig(
parallelism=10, delay=5, failure_action='pause',
monitor=300000000, max_failure_ratio=0.4
)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, rollback_config=rollback_cfg, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'RollbackConfig' in svc_info['Spec']
rc = svc_info['Spec']['RollbackConfig']
assert rollback_cfg['Parallelism'] == rc['Parallelism']
assert rollback_cfg['Delay'] == rc['Delay']
assert rollback_cfg['FailureAction'] == rc['FailureAction']
assert rollback_cfg['Monitor'] == rc['Monitor']
assert rollback_cfg['MaxFailureRatio'] == rc['MaxFailureRatio']
def test_create_service_with_restart_policy(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
policy = docker.types.RestartPolicy(
docker.types.RestartPolicy.condition_types.ANY,
delay=5, max_attempts=5
)
task_tmpl = docker.types.TaskTemplate(
container_spec, restart_policy=policy
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'RestartPolicy' in svc_info['Spec']['TaskTemplate']
assert policy == svc_info['Spec']['TaskTemplate']['RestartPolicy']
def test_create_service_with_custom_networks(self):
net1 = self.client.create_network(
'dockerpytest_1', driver='overlay', ipam={'Driver': 'default'}
)
self.tmp_networks.append(net1['Id'])
net2 = self.client.create_network(
'dockerpytest_2', driver='overlay', ipam={'Driver': 'default'}
)
self.tmp_networks.append(net2['Id'])
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, networks=[
'dockerpytest_1', {'Target': 'dockerpytest_2'}
]
)
svc_info = self.client.inspect_service(svc_id)
assert 'Networks' in svc_info['Spec']
assert svc_info['Spec']['Networks'] == [
{'Target': net1['Id']}, {'Target': net2['Id']}
]
def test_create_service_with_network_attachment_config(self):
network = self.client.create_network(
'dockerpytest_1', driver='overlay', ipam={'Driver': 'default'}
)
self.tmp_networks.append(network['Id'])
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
network_config = docker.types.NetworkAttachmentConfig(
target='dockerpytest_1',
aliases=['dockerpytest_1_alias'],
options={
'foo': 'bar'
}
)
task_tmpl = docker.types.TaskTemplate(
container_spec,
networks=[network_config]
)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'Networks' in svc_info['Spec']['TaskTemplate']
service_networks_info = svc_info['Spec']['TaskTemplate']['Networks']
assert len(service_networks_info) == 1
assert service_networks_info[0]['Target'] == network['Id']
assert service_networks_info[0]['Aliases'] == ['dockerpytest_1_alias']
assert service_networks_info[0]['DriverOpts'] == {'foo': 'bar'}
def test_create_service_with_placement(self):
node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=[f'node.id=={node_id}']
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert (svc_info['Spec']['TaskTemplate']['Placement'] ==
{'Constraints': [f'node.id=={node_id}']})
def test_create_service_with_placement_object(self):
node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(
constraints=[f'node.id=={node_id}']
)
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
@requires_api_version('1.30')
def test_create_service_with_placement_platform(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(platforms=[('x86_64', 'linux')])
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
@requires_api_version('1.27')
def test_create_service_with_placement_preferences(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(preferences=[
{'Spread': {'SpreadDescriptor': 'com.dockerpy.test'}}
])
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
@requires_api_version('1.27')
def test_create_service_with_placement_preferences_tuple(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(preferences=(
('spread', 'com.dockerpy.test'),
))
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
@requires_api_version('1.40')
def test_create_service_with_placement_maxreplicas(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(maxreplicas=1)
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt
def test_create_service_with_endpoint_spec(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
endpoint_spec = docker.types.EndpointSpec(ports={
12357: (1990, 'udp'),
12562: (678,),
53243: 8080,
})
svc_id = self.client.create_service(
task_tmpl, name=name, endpoint_spec=endpoint_spec
)
svc_info = self.client.inspect_service(svc_id)
ports = svc_info['Spec']['EndpointSpec']['Ports']
for port in ports:
if port['PublishedPort'] == 12562:
assert port['TargetPort'] == 678
assert port['Protocol'] == 'tcp'
elif port['PublishedPort'] == 53243:
assert port['TargetPort'] == 8080
assert port['Protocol'] == 'tcp'
elif port['PublishedPort'] == 12357:
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@requires_api_version('1.32')
def test_create_service_with_endpoint_spec_host_publish_mode(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
endpoint_spec = docker.types.EndpointSpec(ports={
12357: (1990, None, 'host'),
})
svc_id = self.client.create_service(
task_tmpl, name=name, endpoint_spec=endpoint_spec
)
svc_info = self.client.inspect_service(svc_id)
ports = svc_info['Spec']['EndpointSpec']['Ports']
assert len(ports) == 1
port = ports[0]
assert port['PublishedPort'] == 12357
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'tcp'
assert port['PublishMode'] == 'host'
def test_create_service_with_env(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['true'], env={'DOCKER_PY_TEST': 1}
)
task_tmpl = docker.types.TaskTemplate(
container_spec,
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
con_spec = svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert 'Env' in con_spec
assert con_spec['Env'] == ['DOCKER_PY_TEST=1']
@requires_api_version('1.29')
def test_create_service_with_update_order(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
update_config = docker.types.UpdateConfig(
parallelism=10, delay=5, order='start-first'
)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, update_config=update_config, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'UpdateConfig' in svc_info['Spec']
uc = svc_info['Spec']['UpdateConfig']
assert update_config['Parallelism'] == uc['Parallelism']
assert update_config['Delay'] == uc['Delay']
assert update_config['Order'] == uc['Order']
@requires_api_version('1.25')
def test_create_service_with_tty(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['true'], tty=True
)
task_tmpl = docker.types.TaskTemplate(
container_spec,
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
con_spec = svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert 'TTY' in con_spec
assert con_spec['TTY'] is True
@requires_api_version('1.25')
def test_create_service_with_tty_dict(self):
container_spec = {
'Image': TEST_IMG,
'Command': ['true'],
'TTY': True
}
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
con_spec = svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert 'TTY' in con_spec
assert con_spec['TTY'] is True
def test_create_service_global_mode(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, mode='global'
)
svc_info = self.client.inspect_service(svc_id)
assert 'Mode' in svc_info['Spec']
assert 'Global' in svc_info['Spec']['Mode']
def test_create_service_replicated_mode(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name,
mode=docker.types.ServiceMode('replicated', 5)
)
svc_info = self.client.inspect_service(svc_id)
assert 'Mode' in svc_info['Spec']
assert 'Replicated' in svc_info['Spec']['Mode']
assert svc_info['Spec']['Mode']['Replicated'] == {'Replicas': 5}
@requires_api_version('1.41')
def test_create_service_global_job_mode(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, mode='global-job'
)
svc_info = self.client.inspect_service(svc_id)
assert 'Mode' in svc_info['Spec']
assert 'GlobalJob' in svc_info['Spec']['Mode']
@requires_api_version('1.41')
def test_create_service_replicated_job_mode(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name,
mode=docker.types.ServiceMode('replicated-job', 5)
)
svc_info = self.client.inspect_service(svc_id)
assert 'Mode' in svc_info['Spec']
assert 'ReplicatedJob' in svc_info['Spec']['Mode']
assert svc_info['Spec']['Mode']['ReplicatedJob'] == {
'MaxConcurrent': 1,
'TotalCompletions': 5
}
@requires_api_version('1.25')
def test_update_service_force_update(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
assert 'ForceUpdate' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['ForceUpdate'] == 0
version_index = svc_info['Version']['Index']
task_tmpl = docker.types.TaskTemplate(container_spec, force_update=10)
self.client.update_service(name, version_index, task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert svc_info['Spec']['TaskTemplate']['ForceUpdate'] == 10
@requires_api_version('1.25')
def test_create_service_with_secret(self):
secret_name = 'favorite_touhou'
secret_data = b'phantasmagoria of flower view'
secret_id = self.client.create_secret(secret_name, secret_data)
self.tmp_secrets.append(secret_id)
secret_ref = docker.types.SecretReference(secret_id, secret_name)
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], secrets=[secret_ref]
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Secrets' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
secrets = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Secrets']
assert secrets[0] == secret_ref
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
container, f'cat /run/secrets/{secret_name}'
)
assert self.client.exec_start(exec_id) == secret_data
@requires_api_version('1.25')
def test_create_service_with_unicode_secret(self):
secret_name = 'favorite_touhou'
secret_data = '東方花映塚'
secret_id = self.client.create_secret(secret_name, secret_data)
self.tmp_secrets.append(secret_id)
secret_ref = docker.types.SecretReference(secret_id, secret_name)
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], secrets=[secret_ref]
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Secrets' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
secrets = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Secrets']
assert secrets[0] == secret_ref
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
container, f'cat /run/secrets/{secret_name}'
)
container_secret = self.client.exec_start(exec_id)
container_secret = container_secret.decode('utf-8')
assert container_secret == secret_data
@requires_api_version('1.30')
def test_create_service_with_config(self):
config_name = 'favorite_touhou'
config_data = b'phantasmagoria of flower view'
config_id = self.client.create_config(config_name, config_data)
self.tmp_configs.append(config_id)
config_ref = docker.types.ConfigReference(config_id, config_name)
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], configs=[config_ref]
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Configs' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
configs = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Configs']
assert configs[0] == config_ref
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
container, f'cat /{config_name}'
)
assert self.client.exec_start(exec_id) == config_data
@requires_api_version('1.30')
def test_create_service_with_unicode_config(self):
config_name = 'favorite_touhou'
config_data = '東方花映塚'
config_id = self.client.create_config(config_name, config_data)
self.tmp_configs.append(config_id)
config_ref = docker.types.ConfigReference(config_id, config_name)
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], configs=[config_ref]
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Configs' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
configs = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Configs']
assert configs[0] == config_ref
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
container, f'cat /{config_name}'
)
container_config = self.client.exec_start(exec_id)
container_config = container_config.decode('utf-8')
assert container_config == config_data
@requires_api_version('1.25')
def test_create_service_with_hosts(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], hosts={
'foobar': '127.0.0.1',
'baz': '8.8.8.8',
}
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Hosts' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
hosts = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Hosts']
assert len(hosts) == 2
assert '127.0.0.1 foobar' in hosts
assert '8.8.8.8 baz' in hosts
@requires_api_version('1.25')
def test_create_service_with_hostname(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], hostname='foobar.baz.com'
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Hostname' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert (
svc_info['Spec']['TaskTemplate']['ContainerSpec']['Hostname'] ==
'foobar.baz.com'
)
@requires_api_version('1.25')
def test_create_service_with_groups(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], groups=['shrinemaidens', 'youkais']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Groups' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
groups = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Groups']
assert len(groups) == 2
assert 'shrinemaidens' in groups
assert 'youkais' in groups
@requires_api_version('1.25')
def test_create_service_with_dns_config(self):
dns_config = docker.types.DNSConfig(
nameservers=['8.8.8.8', '8.8.4.4'],
search=['local'], options=['debug']
)
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], dns_config=dns_config
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'DNSConfig' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert (
dns_config ==
svc_info['Spec']['TaskTemplate']['ContainerSpec']['DNSConfig']
)
@requires_api_version('1.25')
def test_create_service_with_healthcheck(self):
second = 1000000000
hc = docker.types.Healthcheck(
test='true', retries=3, timeout=1 * second,
start_period=3 * second, interval=int(second / 2),
)
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], healthcheck=hc
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert (
'Healthcheck' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
)
assert (
hc ==
svc_info['Spec']['TaskTemplate']['ContainerSpec']['Healthcheck']
)
@requires_api_version('1.28')
def test_create_service_with_readonly(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], read_only=True
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert (
'ReadOnly' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
)
assert svc_info['Spec']['TaskTemplate']['ContainerSpec']['ReadOnly']
@requires_api_version('1.28')
def test_create_service_with_stop_signal(self):
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], stop_signal='SIGINT'
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert (
'StopSignal' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
)
assert (
svc_info['Spec']['TaskTemplate']['ContainerSpec']['StopSignal'] ==
'SIGINT'
)
@requires_api_version('1.30')
def test_create_service_with_privileges(self):
priv = docker.types.Privileges(selinux_disable=True)
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], privileges=priv
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert (
'Privileges' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
)
privileges = (
svc_info['Spec']['TaskTemplate']['ContainerSpec']['Privileges']
)
assert privileges['SELinuxContext']['Disable'] is True
@requires_api_version('1.38')
def test_create_service_with_init(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['sleep', '999'], init=True
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Init' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert (
svc_info['Spec']['TaskTemplate']['ContainerSpec']['Init'] is True
)
@requires_api_version('1.25')
def test_update_service_with_defaults_name(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Name' in svc_info['Spec']
assert svc_info['Spec']['Name'] == name
version_index = svc_info['Version']['Index']
task_tmpl = docker.types.TaskTemplate(container_spec, force_update=10)
self._update_service(
svc_id, name, version_index, task_tmpl, fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'Name' in svc_info['Spec']
assert svc_info['Spec']['Name'] == name
@requires_api_version('1.25')
def test_update_service_with_defaults_labels(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, labels={'service.label': 'SampleLabel'}
)
svc_info = self.client.inspect_service(svc_id)
assert 'Labels' in svc_info['Spec']
assert 'service.label' in svc_info['Spec']['Labels']
assert svc_info['Spec']['Labels']['service.label'] == 'SampleLabel'
version_index = svc_info['Version']['Index']
task_tmpl = docker.types.TaskTemplate(container_spec, force_update=10)
self._update_service(
svc_id, name, version_index, task_tmpl, name=name,
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'Labels' in svc_info['Spec']
assert 'service.label' in svc_info['Spec']['Labels']
assert svc_info['Spec']['Labels']['service.label'] == 'SampleLabel'
def test_update_service_with_defaults_mode(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name,
mode=docker.types.ServiceMode(mode='replicated', replicas=2)
)
svc_info = self.client.inspect_service(svc_id)
assert 'Mode' in svc_info['Spec']
assert 'Replicated' in svc_info['Spec']['Mode']
assert 'Replicas' in svc_info['Spec']['Mode']['Replicated']
assert svc_info['Spec']['Mode']['Replicated']['Replicas'] == 2
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, labels={'force': 'update'},
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'Mode' in svc_info['Spec']
assert 'Replicated' in svc_info['Spec']['Mode']
assert 'Replicas' in svc_info['Spec']['Mode']['Replicated']
assert svc_info['Spec']['Mode']['Replicated']['Replicas'] == 2
def test_update_service_with_defaults_container_labels(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello'],
labels={'container.label': 'SampleLabel'}
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, labels={'service.label': 'SampleLabel'}
)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
assert 'Labels' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
labels = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Labels']
assert labels['container.label'] == 'SampleLabel'
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, labels={'force': 'update'},
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
assert 'Labels' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
labels = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Labels']
assert labels['container.label'] == 'SampleLabel'
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
self._update_service(
svc_id, name, new_index, task_tmpl, fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
newer_index = svc_info['Version']['Index']
assert newer_index > new_index
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
assert 'Labels' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
labels = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Labels']
assert labels['container.label'] == 'SampleLabel'
def test_update_service_with_defaults_update_config(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
update_config = docker.types.UpdateConfig(
parallelism=10, delay=5, failure_action='pause'
)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, update_config=update_config, name=name
)
svc_info = self.client.inspect_service(svc_id)
assert 'UpdateConfig' in svc_info['Spec']
uc = svc_info['Spec']['UpdateConfig']
assert update_config['Parallelism'] == uc['Parallelism']
assert update_config['Delay'] == uc['Delay']
assert update_config['FailureAction'] == uc['FailureAction']
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, labels={'force': 'update'},
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'UpdateConfig' in svc_info['Spec']
uc = svc_info['Spec']['UpdateConfig']
assert update_config['Parallelism'] == uc['Parallelism']
assert update_config['Delay'] == uc['Delay']
assert update_config['FailureAction'] == uc['FailureAction']
def test_update_service_with_defaults_networks(self):
net1 = self.client.create_network(
'dockerpytest_1', driver='overlay', ipam={'Driver': 'default'}
)
self.tmp_networks.append(net1['Id'])
net2 = self.client.create_network(
'dockerpytest_2', driver='overlay', ipam={'Driver': 'default'}
)
self.tmp_networks.append(net2['Id'])
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, networks=[
'dockerpytest_1', {'Target': 'dockerpytest_2'}
]
)
svc_info = self.client.inspect_service(svc_id)
assert 'Networks' in svc_info['Spec']
assert svc_info['Spec']['Networks'] == [
{'Target': net1['Id']}, {'Target': net2['Id']}
]
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, labels={'force': 'update'},
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'Networks' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Networks'] == [
{'Target': net1['Id']}, {'Target': net2['Id']}
]
self._update_service(
svc_id, name, new_index, networks=[net1['Id']],
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
assert 'Networks' in svc_info['Spec']['TaskTemplate']
assert svc_info['Spec']['TaskTemplate']['Networks'] == [
{'Target': net1['Id']}
]
def test_update_service_with_defaults_endpoint_spec(self):
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
endpoint_spec = docker.types.EndpointSpec(ports={
12357: (1990, 'udp'),
12562: (678,),
53243: 8080,
})
svc_id = self.client.create_service(
task_tmpl, name=name, endpoint_spec=endpoint_spec
)
svc_info = self.client.inspect_service(svc_id)
print(svc_info)
ports = svc_info['Spec']['EndpointSpec']['Ports']
for port in ports:
if port['PublishedPort'] == 12562:
assert port['TargetPort'] == 678
assert port['Protocol'] == 'tcp'
elif port['PublishedPort'] == 53243:
assert port['TargetPort'] == 8080
assert port['Protocol'] == 'tcp'
elif port['PublishedPort'] == 12357:
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
svc_info = self.client.inspect_service(svc_id)
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, labels={'force': 'update'},
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
ports = svc_info['Spec']['EndpointSpec']['Ports']
for port in ports:
if port['PublishedPort'] == 12562:
assert port['TargetPort'] == 678
assert port['Protocol'] == 'tcp'
elif port['PublishedPort'] == 53243:
assert port['TargetPort'] == 8080
assert port['Protocol'] == 'tcp'
elif port['PublishedPort'] == 12357:
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@requires_api_version('1.25')
def test_update_service_remove_healthcheck(self):
second = 1000000000
hc = docker.types.Healthcheck(
test='true', retries=3, timeout=1 * second,
start_period=3 * second, interval=int(second / 2),
)
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], healthcheck=hc
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert (
'Healthcheck' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
)
assert (
hc ==
svc_info['Spec']['TaskTemplate']['ContainerSpec']['Healthcheck']
)
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['sleep', '999'], healthcheck={}
)
task_tmpl = docker.types.TaskTemplate(container_spec)
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, task_tmpl, fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
container_spec = svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert (
'Healthcheck' not in container_spec or
not container_spec['Healthcheck']
)
def test_update_service_remove_labels(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, labels={'service.label': 'SampleLabel'}
)
svc_info = self.client.inspect_service(svc_id)
assert 'Labels' in svc_info['Spec']
assert 'service.label' in svc_info['Spec']['Labels']
assert svc_info['Spec']['Labels']['service.label'] == 'SampleLabel'
version_index = svc_info['Version']['Index']
self._update_service(
svc_id, name, version_index, labels={}, fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert not svc_info['Spec'].get('Labels')
def test_update_service_remove_container_labels(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello'],
labels={'container.label': 'SampleLabel'}
)
task_tmpl = docker.types.TaskTemplate(container_spec)
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, labels={'service.label': 'SampleLabel'}
)
svc_info = self.client.inspect_service(svc_id)
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
assert 'Labels' in svc_info['Spec']['TaskTemplate']['ContainerSpec']
labels = svc_info['Spec']['TaskTemplate']['ContainerSpec']['Labels']
assert labels['container.label'] == 'SampleLabel'
version_index = svc_info['Version']['Index']
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello'],
labels={}
)
task_tmpl = docker.types.TaskTemplate(container_spec)
self._update_service(
svc_id, name, version_index, task_tmpl, fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
assert 'TaskTemplate' in svc_info['Spec']
assert 'ContainerSpec' in svc_info['Spec']['TaskTemplate']
container_spec = svc_info['Spec']['TaskTemplate']['ContainerSpec']
assert not container_spec.get('Labels')
@requires_api_version('1.29')
def test_update_service_with_network_change(self):
container_spec = docker.types.ContainerSpec(
'busybox', ['echo', 'hello']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
net1 = self.client.create_network(
self.get_service_name(), driver='overlay',
ipam={'Driver': 'default'}
)
self.tmp_networks.append(net1['Id'])
net2 = self.client.create_network(
self.get_service_name(), driver='overlay',
ipam={'Driver': 'default'}
)
self.tmp_networks.append(net2['Id'])
name = self.get_service_name()
svc_id = self.client.create_service(
task_tmpl, name=name, networks=[net1['Id']]
)
svc_info = self.client.inspect_service(svc_id)
assert 'Networks' in svc_info['Spec']
assert len(svc_info['Spec']['Networks']) > 0
assert svc_info['Spec']['Networks'][0]['Target'] == net1['Id']
svc_info = self.client.inspect_service(svc_id)
version_index = svc_info['Version']['Index']
task_tmpl = docker.types.TaskTemplate(container_spec)
self._update_service(
svc_id, name, version_index, task_tmpl, name=name,
networks=[net2['Id']], fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
task_template = svc_info['Spec']['TaskTemplate']
assert 'Networks' in task_template
assert len(task_template['Networks']) > 0
assert task_template['Networks'][0]['Target'] == net2['Id']
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
assert new_index > version_index
self._update_service(
svc_id, name, new_index, name=name, networks=[net1['Id']],
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
task_template = svc_info['Spec']['TaskTemplate']
assert 'ContainerSpec' in task_template
new_spec = task_template['ContainerSpec']
assert 'Image' in new_spec
assert new_spec['Image'].split(':')[0] == 'busybox'
assert 'Command' in new_spec
assert new_spec['Command'] == ['echo', 'hello']
assert 'Networks' in task_template
assert len(task_template['Networks']) > 0
assert task_template['Networks'][0]['Target'] == net1['Id']
svc_info = self.client.inspect_service(svc_id)
new_index = svc_info['Version']['Index']
task_tmpl = docker.types.TaskTemplate(
container_spec, networks=[net2['Id']]
)
self._update_service(
svc_id, name, new_index, task_tmpl, name=name,
fetch_current_spec=True
)
svc_info = self.client.inspect_service(svc_id)
task_template = svc_info['Spec']['TaskTemplate']
assert 'Networks' in task_template
assert len(task_template['Networks']) > 0
assert task_template['Networks'][0]['Target'] == net2['Id']
def _update_service(self, svc_id, *args, **kwargs):
# service update tests seem to be a bit flaky
# give them a chance to retry the update with a new version index
try:
self.client.update_service(*args, **kwargs)
except docker.errors.APIError as e:
if e.explanation.endswith("update out of sequence"):
svc_info = self.client.inspect_service(svc_id)
version_index = svc_info['Version']['Index']
if len(args) > 1:
args = (args[0], version_index) + args[2:]
else:
kwargs['version'] = version_index
self.client.update_service(*args, **kwargs)
else:
raise
@requires_api_version('1.41')
def test_create_service_cap_add(self):
name = self.get_service_name()
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello'], cap_add=['CAP_SYSLOG']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
svc_id = self.client.create_service(task_tmpl, name=name)
assert self.client.inspect_service(svc_id)
services = self.client.services(filters={'name': name})
assert len(services) == 1
assert services[0]['ID'] == svc_id['ID']
spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
assert 'CAP_SYSLOG' in spec['CapabilityAdd']
@requires_api_version('1.41')
def test_create_service_cap_drop(self):
name = self.get_service_name()
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello'], cap_drop=['CAP_SYSLOG']
)
task_tmpl = docker.types.TaskTemplate(container_spec)
svc_id = self.client.create_service(task_tmpl, name=name)
assert self.client.inspect_service(svc_id)
services = self.client.services(filters={'name': name})
assert len(services) == 1
assert services[0]['ID'] == svc_id['ID']
spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
assert 'CAP_SYSLOG' in spec['CapabilityDrop']
@requires_api_version('1.40')
def test_create_service_with_sysctl(self):
name = self.get_service_name()
sysctls = {
'net.core.somaxconn': '1024',
'net.ipv4.tcp_syncookies': '0',
}
container_spec = docker.types.ContainerSpec(
TEST_IMG, ['echo', 'hello'], sysctls=sysctls
)
task_tmpl = docker.types.TaskTemplate(container_spec)
svc_id = self.client.create_service(task_tmpl, name=name)
assert self.client.inspect_service(svc_id)
services = self.client.services(filters={'name': name})
assert len(services) == 1
assert services[0]['ID'] == svc_id['ID']
spec = services[0]['Spec']['TaskTemplate']['ContainerSpec']
assert spec['Sysctls']['net.core.somaxconn'] == '1024'
assert spec['Sysctls']['net.ipv4.tcp_syncookies'] == '0'
| {
"content_hash": "cfa2ee7b6f180d4bc77c69226de9344b",
"timestamp": "",
"source": "github",
"line_count": 1441,
"max_line_length": 79,
"avg_line_length": 42.86814712005552,
"alnum_prop": 0.596377057937934,
"repo_name": "docker/docker-py",
"id": "8ce7c9d57e2f792994329b7b3d788e133af73bbd",
"size": "61793",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/integration/api_service_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2114"
},
{
"name": "Makefile",
"bytes": "4612"
},
{
"name": "Python",
"bytes": "1073920"
},
{
"name": "Shell",
"bytes": "1165"
}
],
"symlink_target": ""
} |
import os
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.jvm_app import JvmApp
from pants.backend.jvm.targets.jvm_binary import JvmBinary
from pants.backend.jvm.tasks.bundle_create import BundleCreate
from pants.backend.jvm.tasks.classpath_products import ClasspathProducts, MissingClasspathEntryError
from pants.build_graph.resources import Resources
from pants.java.jar.jar_dependency import JarDependency
from pants.util.contextutil import open_zip
from pants.util.dirutil import safe_file_dump, safe_mkdir, safe_mkdtemp
from pants_test.backend.jvm.tasks.jvm_binary_task_test_base import JvmBinaryTaskTestBase
class TestBundleCreate(JvmBinaryTaskTestBase):
@classmethod
def task_type(cls):
return BundleCreate
def add_consolidated_bundle(self, context, tgt, files_dict):
"""Add a bundle to the classpath as if it has been consolidated already."""
consolidated_classpath = context.products.get_data(
"consolidated_classpath", init_func=ClasspathProducts.init_func(self.pants_workdir)
)
# Create a temporary directory under the target id, then dump all files.
target_dir = os.path.join(self.test_workdir, tgt.id)
safe_mkdir(target_dir)
entry_path = safe_mkdtemp(dir=target_dir)
classpath_dir = safe_mkdtemp(dir=target_dir)
for rel_path, content in files_dict.items():
safe_file_dump(os.path.join(entry_path, rel_path), content)
# Create Jar to mimic consolidate classpath behavior.
jarpath = os.path.join(classpath_dir, "output-0.jar")
with self.task.open_jar(jarpath, overwrite=True, compressed=False) as jar:
jar.write(entry_path)
consolidated_classpath.add_for_target(tgt, [("default", jarpath)])
def setUp(self):
"""Prepare targets, context, runtime classpath."""
super().setUp()
self.task = self.prepare_execute(self.context())
self.jar_artifact = self.create_artifact(org="org.example", name="foo", rev="1.0.0")
self.zip_artifact = self.create_artifact(
org="org.pantsbuild", name="bar", rev="2.0.0", ext="zip"
)
self.bundle_artifact = self.create_artifact(
org="org.apache", name="baz", rev="3.0.0", classifier="tests"
)
self.tar_gz_artifact = self.create_artifact(
org="org.gnu", name="gary", rev="4.0.0", ext="tar.gz"
)
self.jar_lib = self.make_target(
spec="3rdparty/jvm/org/example:foo",
target_type=JarLibrary,
jars=[
JarDependency(org="org.example", name="foo", rev="1.0.0"),
JarDependency(org="org.pantsbuild", name="bar", rev="2.0.0", ext="zip"),
JarDependency(org="org.apache", name="baz", rev="3.0.0", classifier="tests"),
JarDependency(org="org.gnu", name="gary", rev="4.0.0", ext="tar.gz"),
],
)
safe_file_dump(os.path.join(self.build_root, "resources/foo/file"), "// dummy content")
self.resources_target = self.make_target(
"//resources:foo-resources", Resources, sources=["foo/file"]
)
# This is so that payload fingerprint can be computed.
safe_file_dump(os.path.join(self.build_root, "foo/Foo.java"), "// dummy content")
self.java_lib_target = self.make_target(
"//foo:foo-library", JavaLibrary, sources=["Foo.java"]
)
self.binary_target = self.make_target(
spec="//foo:foo-binary",
target_type=JvmBinary,
dependencies=[self.java_lib_target, self.jar_lib, self.resources_target],
)
self.dist_root = os.path.join(self.build_root, "dist")
def _create_target(self, **kwargs):
return self.make_target(
spec="//foo:foo-app",
target_type=JvmApp,
basename="FooApp",
dependencies=[self.binary_target],
**kwargs,
)
def _setup_classpath(self, task_context):
"""As a separate prep step because to test different option settings, this needs to rerun
after context is re-created."""
classpath_products = self.ensure_consolidated_classpath_products(task_context)
classpath_products.add_jars_for_targets(
targets=[self.jar_lib],
conf="default",
resolved_jars=[
self.jar_artifact,
self.zip_artifact,
self.bundle_artifact,
self.tar_gz_artifact,
],
)
self.add_consolidated_bundle(
task_context, self.binary_target, {"Foo.class": "", "foo.txt": "", "foo/file": ""}
)
def test_jvm_bundle_products(self):
"""Test default setting outputs bundle products using `target.id`."""
self.app_target = self._create_target()
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_bundle_products("foo.foo-app", check_symlink=True)
def test_jvm_bundle_use_basename_prefix(self):
"""Test override default setting outputs bundle products using basename."""
self.app_target = self._create_target()
self.set_options(use_basename_prefix=True)
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_bundle_products("foo.foo-app", check_symlink=True, symlink_name_prefix="FooApp")
def test_bundle_non_app_target(self):
"""Test bundle does not apply to a non jvm_app/jvm_binary target."""
self.task_context = self.context(target_roots=[self.java_lib_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self.assertIsNone(self.task_context.products.get("jvm_bundles").get(self.java_lib_target))
self.assertFalse(os.path.exists(self.dist_root))
def test_jvm_bundle_missing_product(self):
"""Test exception is thrown in case of a missing jar."""
self.app_target = self._create_target()
self.task_context = self.context(target_roots=[self.app_target])
missing_jar_artifact = self.create_artifact(
org="org.example", name="foo", rev="2.0.0", materialize=False
)
classpath_products = self.ensure_consolidated_classpath_products(self.task_context)
classpath_products.add_jars_for_targets(
targets=[self.binary_target], conf="default", resolved_jars=[missing_jar_artifact]
)
with self.assertRaises(MissingClasspathEntryError):
self.execute(self.task_context)
def test_conflicting_basename(self):
"""Test exception is thrown when two targets share the same basename."""
self.app_target = self._create_target()
conflict_app_target = self.make_target(
spec="//foo:foo-app-conflict",
target_type=JvmApp,
basename="FooApp",
dependencies=[self.binary_target],
)
self.set_options(use_basename_prefix=True)
self.task_context = self.context(target_roots=[self.app_target, conflict_app_target])
self._setup_classpath(self.task_context)
with self.assertRaises(BundleCreate.BasenameConflictError):
self.execute(self.task_context)
def test_target_options(self):
self.app_target = self._create_target(archive="zip")
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_archive_products("foo.foo-app", "zip", check_copy=True)
def test_cli_suppress_target_options(self):
self.set_options(archive="tar")
self.app_target = self._create_target(archive="zip")
self.task_context = self.context(target_roots=[self.app_target])
self._setup_classpath(self.task_context)
self.execute(self.task_context)
self._check_archive_products("foo.foo-app", "tar", check_copy=True)
def _check_products(self, products, product_fullname):
self.assertIsNotNone(products)
product_data = products.get(self.app_target)
product_basedir = list(product_data.keys())[0]
self.assertIn(self.pants_workdir, product_basedir)
self.assertEqual(product_data[product_basedir], [product_fullname])
product_path = os.path.join(product_basedir, product_fullname)
return product_path
def _check_archive_products(
self, archive_name_prefix, archive_extension, check_copy=False, copy_name_prefix=""
):
products = self.task_context.products.get("deployable_archives")
archive_fullname = f"{archive_name_prefix}.{archive_extension}"
archive_path = self._check_products(products, archive_fullname)
self.assertTrue(os.path.isfile(archive_path))
if check_copy:
copy_fullname = (
f"{copy_name_prefix}.{archive_extension}" if copy_name_prefix else archive_fullname
)
copy_path = os.path.join(self.dist_root, copy_fullname)
self.assertTrue(os.path.isfile(copy_path))
def _check_bundle_products(
self, bundle_name_prefix, check_symlink=False, symlink_name_prefix=""
):
products = self.task_context.products.get("jvm_bundles")
bundle_fullname = f"{bundle_name_prefix}-bundle"
bundle_root = self._check_products(products, bundle_fullname)
self.assertTrue(os.path.isdir(bundle_root))
self.assertEqual(
sorted(
[
"foo-binary.jar",
"libs/foo.foo-binary-0.jar",
"libs/3rdparty.jvm.org.example.foo-0.jar",
"libs/3rdparty.jvm.org.example.foo-1.zip",
"libs/3rdparty.jvm.org.example.foo-2.jar",
"libs/3rdparty.jvm.org.example.foo-3.gz",
]
),
sorted(self.iter_files(bundle_root)),
)
with open_zip(os.path.join(bundle_root, "libs/foo.foo-binary-0.jar")) as zf:
self.assertEqual(
sorted(
[
"META-INF/",
"META-INF/MANIFEST.MF",
"Foo.class",
"foo.txt",
"foo/",
"foo/file",
]
),
sorted(zf.namelist()),
)
# TODO verify Manifest's Class-Path
with open_zip(os.path.join(bundle_root, "foo-binary.jar")) as jar:
self.assertEqual(sorted(["META-INF/", "META-INF/MANIFEST.MF"]), sorted(jar.namelist()))
# Check symlink.
if check_symlink:
symlink_fullname = (
f"{symlink_name_prefix}-bundle" if symlink_name_prefix else bundle_fullname
)
symlink_path = os.path.join(self.dist_root, symlink_fullname)
self.assertTrue(os.path.islink(symlink_path))
self.assertEqual(os.readlink(symlink_path), bundle_root)
| {
"content_hash": "a5c9814933013ea451192483dde9afb6",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 100,
"avg_line_length": 44.24124513618677,
"alnum_prop": 0.6163588390501319,
"repo_name": "wisechengyi/pants",
"id": "9d57073601884163e98ef52845882634a9777c54",
"size": "11502",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "6634"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "44381"
},
{
"name": "Java",
"bytes": "507948"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "7608990"
},
{
"name": "Rust",
"bytes": "1005243"
},
{
"name": "Scala",
"bytes": "106520"
},
{
"name": "Shell",
"bytes": "105217"
},
{
"name": "Starlark",
"bytes": "489739"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
} |
"""Veridu Python SDK
See:
https://github.com/veridu/veridu-python
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='veridu-python',
version='0.1.1',
description='Veridu Python SDK',
long_description=long_description,
url='https://github.com/veridu/veridu-python',
author='Veridu Ltd',
author_email='contact@veridu.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='veridu sdk user identification social media online services single sign on',
packages=find_packages(),
)
| {
"content_hash": "cbc5fd70d08d1617a6372303fb70d62b",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 90,
"avg_line_length": 24.25,
"alnum_prop": 0.6310751104565537,
"repo_name": "veridu/veridu-python",
"id": "f7b951aedc708cdf0adf63953dcc0489926295d9",
"size": "1358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6411"
}
],
"symlink_target": ""
} |
import logging
import arrow
import emission.core.get_database as edb
import emission.core.wrapper.entry as ecwe
def convert_wrapper_to_entry(key, wrapper):
logging.debug("found user_id in wrapper %s" % wrapper["user_id"])
wrapper_entry = ecwe.Entry.create_entry(wrapper["user_id"], key, wrapper)
wrapper_entry["_id"] = wrapper["_id"]
return wrapper_entry
def convert_collection(collection, key):
result_cursor = collection.find()
logging.info("About to convert %s entries" % result_cursor.count())
for i, wrapper in enumerate(result_cursor):
entry = convert_wrapper_to_entry(key, wrapper)
if entry.get_id() != wrapper["_id"]:
logging.warn("entry.id = %s, wrapper.id = %s" % (entry.get_id(), wrapper["_id"]))
if i % 10000 == 0:
print "converted %s -> %s" % (wrapper, entry)
edb.get_timeseries_db().insert(entry)
# collection.remove(wrapper)
def move_ts_entries(key):
tdb = edb.get_timeseries_db()
atdb = edb.get_analysis_timeseries_db()
result_cursor = tdb.find({'metadata.key': key})
logging.info("About to convert %s entries" % result_cursor.count())
for i, entry_doc in enumerate(result_cursor):
try:
if i % 10000 == 0:
print "moved %s from one ts to the other" % (entry_doc)
atdb.insert(entry_doc)
# tdb.remove(entry_doc)
except:
logging.info("Got error while moving %s, skipping" % (entry_doc))
if __name__ == '__main__':
# No arguments - muahahahaha. Just going to copy known fields over.
# convert_collection(edb.get_trip_new_db(), "segmentation/raw_trip")
# convert_collection(edb.get_place_db(), "segmentation/raw_place")
# convert_collection(edb.get_section_new_db(), "segmentation/raw_section")
# convert_collection(edb.get_stop_db(), "segmentation/raw_stop")
move_ts_entries("segmentation/raw_trip")
move_ts_entries("segmentation/raw_place")
move_ts_entries("segmentation/raw_section")
move_ts_entries("segmentation/raw_stop")
move_ts_entries("analysis/smoothing")
| {
"content_hash": "67e38686313620f0e66d03fc2afb1727",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 93,
"avg_line_length": 40.92156862745098,
"alnum_prop": 0.6530905606133206,
"repo_name": "yw374cornell/e-mission-server",
"id": "b56721dc00448943af4a54f0044319e83ec0b7a7",
"size": "2087",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/historical/migrations/move_trips_places_sections_stops_to_analysis_timeseries_db.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "445"
},
{
"name": "CSS",
"bytes": "717871"
},
{
"name": "HTML",
"bytes": "114875"
},
{
"name": "JavaScript",
"bytes": "7620696"
},
{
"name": "Jupyter Notebook",
"bytes": "97095629"
},
{
"name": "Python",
"bytes": "1584848"
},
{
"name": "Shell",
"bytes": "2299"
},
{
"name": "Smarty",
"bytes": "3456"
}
],
"symlink_target": ""
} |
""" This module contains the functions for iterating the social and climate
coupled model (SoCCo)"""
import numpy as np
import pandas as pd
from scipy import stats
from . import social as sl
from . import climate as cl
#### randomUniformF ############################################################
def randomUniformF(nSamples=1):
"""
returns random variates on scale (0,1).
This function can replace perceivedBehavioralControlF()
and efficacyF().
"""
return np.random.uniform(low=0.0, high=1.0, size=nSamples)
#### randomNormalF ############################################################
def randomNormalF(mean, sd, nSamples=1):
"""
returns normal random variates.
This function is used to initialize per capita emissions.
pcEmissions = per capita emissions of CO2; Current values estimated
by annualGHGemissionsInit/WorldPopnInit resulting if value of 5.049
per person
"""
return np.random.normal(loc=mean,scale=sd,size=nSamples)
#### eIncrement ################################################################
def eIncrement(att, pbc, psn):
"""
eIncrement[att_,pbc_,psn_]: rescales att and psn to -Inf to Inf and
then multiplies by pbc (0 to 1) to result in a increment in per
capita emissions
att = attitude, pbc = perceivedBehavioralControl, psn = perceivedSocialNorm
"""
attInv = stats.norm(loc=0.0,scale=1.0).ppf(att) # InverseCDF
attInv[attInv==-np.inf]= min(10*attInv[attInv!=-np.inf].min(),-10) # avoid -inf !NEED TO CHANGE VALUE OF 10!
attInv[attInv==np.inf]= max(10*attInv[attInv!=np.inf].max(),10) # avoid +inf
psnInv = stats.norm(loc=0.0,scale=1.0).ppf(psn) # InverseCDF
psnInv[psnInv==-np.inf]= min(10*psnInv[psnInv!=-np.inf].min(),-10) # avoid -inf
psnInv[psnInv==np.inf]= max(10*psnInv[psnInv!=np.inf].max(),10) # avoid +inf
eDelIncrement = -(attInv + psnInv)*pbc
return eDelIncrement
#### updatePCEmissions ################################################################
def updatePCEmissions(pcE, eff, pbc, tData,percepWindowSize,riskSens=1.0):
"""
updatePCEmissions calculates a del pcE and then adds this to current pcE to
return new pcE in
"""
psn= sl.perceivedSocialNorm(pcE)
risk = sl.perceivedRisk(percepWindowSize, tData, riskSens)
att = sl.attitude(risk, eff)
pcE_Del = eIncrement(att, pbc, psn)
pcE_New = pcE_Del + pcE
return pcE_New
#### iterateOneStep ############################################################
def iterateOneStep(pcE_ts, tData_ts, co2_ts, eff, pbc, popN,percepWindowSize=3,riskSens=1.0):
"""
Updates atm CO2, temperature and per capita emissions for one step (one year).
"""
pcE_updated=updatePCEmissions(pcE_ts[:,-1], eff, pbc,tData_ts,percepWindowSize,riskSens)
pcE_updated=np.atleast_2d(pcE_updated).transpose()
pcE_vector=np.concatenate((pcE_ts, pcE_updated),axis=1)
co2Del_ppm=cl.perCapitaEmissionsToDelPPM(pcE_updated, popN)
co2_updated = np.array([co2Del_ppm + co2_ts[-1]]) # adds to last element of co2Current
co2_vector = np.concatenate( [co2_ts, co2_updated] )
rf = cl.computeRF(co2_vector)
tDel=cl.compute_deltaT(rf)
t_updated = np.array([tDel[-1] + tData_ts[-1]]) # adds to last element of co2Current
t_vector = np.concatenate( [tData_ts, t_updated] )
return pcE_vector,t_vector,co2_vector
#### iterateOneStep ############################################################
def iterateNsteps(pcE_init,tData_init, co2_init, nSteps, eff, pbc,popN,
percepWindowSize=3,riskSens=1.0):
"""
'Nsteps' updates of per capita emissions, temperature, and atm CO2 with each
step being 1 year
"""
for i in range(nSteps):
pcE_init,tData_init,co2_init = iterateOneStep(pcE_init,tData_init, co2_init,
eff, pbc,popN,percepWindowSize=3,riskSens=1.0)
return pcE_init,tData_init,co2_init
| {
"content_hash": "48c69ed2725bc959296a2bbe83444111",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 112,
"avg_line_length": 35.36842105263158,
"alnum_prop": 0.6029265873015873,
"repo_name": "OpenClimate/climate_change_model",
"id": "4a6b82c453a29051f91dfba86466d878ca0107a4",
"size": "4032",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SoCCo/algorithms/iter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "271384"
},
{
"name": "Mathematica",
"bytes": "2343577"
},
{
"name": "Python",
"bytes": "36423"
},
{
"name": "R",
"bytes": "6172"
}
],
"symlink_target": ""
} |
import time
from kid_readout.interactive import *
#setup = hardware.Hardware()
ri = Roach2Baseband()
ri.set_modulation_output('high')
ncycle = 0
while True:
print "cycle",ncycle
print " "
for dac_atten in [0]:
ri.set_dac_atten(dac_atten)
df = acquire.new_nc_file(suffix='vna_dac_atten_%.1f_dB' % dac_atten)
swa = acquire.run_sweep(ri,np.linspace(100,180,64)[None,:]+np.arange(650,dtype='int')[:,None]*512./2.**18,
2**18,
verbose=True,length_seconds=.1,
)
df.write(swa)
df.close()
print "waiting 10 minutes"
time.sleep(600)
ncycle += 1
#for example
#170-230 MHz band, steps are (230-170)/128
#then sampling 480 times between each of these steps by stepping an additional 2**18 | {
"content_hash": "2368c759bb6d5e98540a6af3df48f18f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 114,
"avg_line_length": 25.818181818181817,
"alnum_prop": 0.5704225352112676,
"repo_name": "ColumbiaCMB/kid_readout",
"id": "8975ec75f3ee73a9600e12c713ef890e118d9d94",
"size": "852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/data_taking_scripts/2017-02-jpl-lf-n2/vna_sweep_repeat.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "13672"
},
{
"name": "Python",
"bytes": "2033932"
}
],
"symlink_target": ""
} |
"""Test the python-manta MantaClient."""
from __future__ import absolute_import
from __future__ import print_function
import re
from posixpath import dirname as udirname, basename as ubasename, join as ujoin
from common import *
import manta
#---- globals
TDIR = "tmp/test_mantaclient"
#---- internal support stuff
#---- Test cases
#
# We need to run these tests in order. We'll be creating a test area:
# /$account/stor/tmp/test_mantaclient/
# and working in there.
#
class MiscTestCase(MantaTestCase):
"""Miscellaneous 'manta' module tests."""
def test_imports(self):
self.assertTrue(manta.MantaClient)
self.assertTrue(manta.PrivateKeySigner)
self.assertTrue(manta.SSHAgentSigner)
self.assertTrue(manta.CLISigner)
self.assertTrue(manta.MantaError)
self.assertTrue(manta.MantaAPIError)
def test_version(self):
VERSION_RE = re.compile('^\d+\.\d+\.\d+$')
self.assertTrue(manta.__version__)
self.assertTrue(VERSION_RE.search(manta.__version__))
class CleanTestAreaTestCase(MantaTestCase):
def test_clean(self):
client = self.get_client()
try:
client.list_directory(stor(TDIR))
except manta.MantaError as ex:
if ex.code == "ResourceNotFound":
return
else:
raise
# Don't totally wipe, to save time on test re-runs... though
# I'm sure this will surprise at some point.
skips = [stor(TDIR), stor(TDIR, 'manyfiles')]
for mdir, dirs, nondirs in client.walk(stor(TDIR), False):
if mdir in skips:
continue
for nondir in nondirs:
client.delete_object(ujoin(mdir, nondir["name"]))
client.delete_object(mdir)
class DirTestCase(MantaTestCase):
def test_put(self):
client = self.get_client()
client.mkdirp(stor(TDIR))
dirents = client.list_directory(stor(udirname(TDIR)))
dirent = [d for d in dirents if d["name"] == ubasename(TDIR)][0]
self.assertTrue(dirent)
def test_listheaddel(self):
client = self.get_client()
client.mkdirp(stor(TDIR, 'dir'))
for d in ['a', 'b', 'c']:
client.mkdirp(stor(TDIR, 'dir', d))
dirents = client.list_directory(stor(TDIR, 'dir'))
self.assertEqual(
len(dirents), 3,
'unexpected number of dirents: got %d, expected 3, dirents %r' %
(len(dirents), dirents))
dirents = client.list_directory(stor(TDIR, 'dir'), limit=2)
self.assertEqual(len(dirents), 2)
dirents = client.list_directory(
stor(TDIR, 'dir'), marker=dirents[-1]["name"])
self.assertEqual(len(dirents), 2)
self.assertEqual(dirents[1]["name"], "c")
res = client.head_directory(stor(TDIR, 'dir'))
self.assertEqual(int(res['result-set-size']), 3)
for d in ['a', 'b', 'c']:
client.delete_directory(stor(TDIR, 'dir', d))
dirents = client.list_directory(stor(TDIR, 'dir'))
self.assertEqual(len(dirents), 0)
class ObjectTestCase(MantaTestCase):
def test_putgetdel(self):
client = self.get_client()
client.mkdirp(stor(TDIR))
mpath = stor(TDIR, 'foo.txt')
content = 'foo\nbar\nbaz'
client.put_object(mpath, content=content)
got = client.get_object(mpath)
self.assertEqual(content, got)
client.delete_object(mpath)
dirents = [e for e in client.list_directory(stor(TDIR))
if e["name"] == "foo.txt"]
self.assertEqual(len(dirents), 0)
class LinkTestCase(MantaTestCase):
def test_put(self):
client = self.get_client()
if client.subuser or client.role:
print('\nSkipping LinkTestCase because a subuser or role has been '
'provided for the Manta client.\nSee '
'https://devhub.joyent.com/jira/browse/MANTA-2829 '
'for details.')
return
client.mkdirp(stor(TDIR))
obj_path = stor(TDIR, 'obj.txt')
content = 'foo\nbar\nbaz'
client.put_object(obj_path, content=content)
link_path = stor(TDIR, 'link.txt')
client.put_snaplink(link_path, obj_path)
got = client.get_object(link_path)
self.assertEqual(content, got)
client.delete_object(obj_path)
got2 = client.get_object(link_path)
self.assertEqual(content, got2)
client.delete_object(link_path)
dirents = [e for e in client.list_directory(stor(TDIR))
if e["name"] in ("obj.txt", "link.txt")]
self.assertEqual(len(dirents), 0)
class ManyFilesTestCase(MantaTestCase):
__tags__ = ['slow']
def setUp(self):
self.client = self.get_client()
self.base = b = ujoin(TDIR, "manyfiles")
# If this dir exists already, then save time, don't rebuild it (i.e.
# presuming all the files were left in place).
if self.client.type(stor(b)) != "directory":
self.client.mkdirp(stor(b))
for i in range(1100):
self.client.put(stor(b, "f%05d" % i), "index %d" % i)
def test_count(self):
ls = self.client.ls(stor(self.base))
self.assertEqual(len(ls), 1100)
| {
"content_hash": "6d907fb60c4d5c611230b184c8543146",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 79,
"avg_line_length": 34.564935064935064,
"alnum_prop": 0.5962802930678189,
"repo_name": "joyent/python-manta",
"id": "9a5c079a990c4b5823747ec1d4fd30f89b88aeef",
"size": "5401",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_mantaclient.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2543"
},
{
"name": "Python",
"bytes": "279596"
}
],
"symlink_target": ""
} |
"""Example configurations using the PPO algorithm."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-variable
import tensorflow as tf
from agents import algorithms
from agents.scripts import networks
def default():
"""Default configuration for PPO."""
# General
algorithm = algorithms.PPO
num_agents = 30
eval_episodes = 30
use_gpu = False
# Environment
normalize_ranges = True
# Network
network = networks.feed_forward_gaussian
weight_summaries = dict(
all=r'.*', policy=r'.*/policy/.*', value=r'.*/value/.*')
policy_layers = 200, 100
value_layers = 200, 100
init_output_factor = 0.1
init_std = 0.35
# Optimization
update_every = 30
update_epochs = 25
optimizer = tf.train.AdamOptimizer
learning_rate = 1e-4
# Losses
discount = 0.995
kl_target = 1e-2
kl_cutoff_factor = 2
kl_cutoff_coef = 1000
kl_init_penalty = 1
return locals()
def pendulum():
"""Configuration for the pendulum classic control task."""
locals().update(default())
# Environment
env = 'Pendulum-v0'
max_length = 200
steps = 1e6 # 1M
# Optimization
batch_size = 20
chunk_length = 50
return locals()
def cartpole():
"""Configuration for the cart pole classic control task."""
locals().update(default())
# Environment
env = 'CartPole-v1'
max_length = 500
steps = 2e5 # 200k
normalize_ranges = False # The env reports wrong ranges.
# Network
network = networks.feed_forward_categorical
return locals()
def reacher():
"""Configuration for MuJoCo's reacher task."""
locals().update(default())
# Environment
env = 'Reacher-v2'
max_length = 1000
steps = 5e6 # 5M
discount = 0.985
update_every = 60
return locals()
def cheetah():
"""Configuration for MuJoCo's half cheetah task."""
locals().update(default())
# Environment
env = 'HalfCheetah-v2'
max_length = 1000
steps = 1e7 # 10M
discount = 0.99
return locals()
def walker():
"""Configuration for MuJoCo's walker task."""
locals().update(default())
# Environment
env = 'Walker2d-v2'
max_length = 1000
steps = 1e7 # 10M
return locals()
def hopper():
"""Configuration for MuJoCo's hopper task."""
locals().update(default())
# Environment
env = 'Hopper-v2'
max_length = 1000
steps = 1e7 # 10M
update_every = 60
return locals()
def ant():
"""Configuration for MuJoCo's ant task."""
locals().update(default())
# Environment
env = 'Ant-v2'
max_length = 1000
steps = 2e7 # 20M
return locals()
def humanoid():
"""Configuration for MuJoCo's humanoid task."""
locals().update(default())
# Environment
env = 'Humanoid-v2'
max_length = 1000
steps = 5e7 # 50M
update_every = 60
return locals()
def bullet_ant():
"""Configuration for PyBullet's ant task."""
locals().update(default())
# Environment
import pybullet_envs # noqa pylint: disable=unused-import
env = 'AntBulletEnv-v0'
max_length = 1000
steps = 3e7 # 30M
update_every = 60
return locals()
| {
"content_hash": "51ba4b4b920f96c2444ba3a51b560f5a",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 62,
"avg_line_length": 21.10958904109589,
"alnum_prop": 0.663854639844257,
"repo_name": "google-research/batch-ppo",
"id": "4833137db15b8d18e73739f5bb9de52990b7b6e3",
"size": "3678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agents/scripts/configs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "176905"
}
],
"symlink_target": ""
} |
import logging
from pajbot.managers.handler import HandlerManager
from pajbot.modules import BaseModule
from pajbot.modules import ModuleSetting
log = logging.getLogger(__name__)
class EmoteLimitModule(BaseModule):
ID = __name__.split(".")[-1]
NAME = "Emote Limit"
DESCRIPTION = "Times out users who post too many emotes"
CATEGORY = "Moderation"
SETTINGS = [
ModuleSetting(
key="max_emotes",
label="Maximum number of emotes that can be posted",
type="number",
required=True,
placeholder="",
default=15,
constraints={"min_value": 1, "max_value": 167},
),
ModuleSetting(
key="bypass_level",
label="Level to bypass module",
type="number",
required=True,
placeholder="",
default=420,
constraints={"min_value": 100, "max_value": 1000},
),
ModuleSetting(
key="moderation_action",
label="Moderation action to apply",
type="options",
required=True,
default="Timeout",
options=["Delete", "Timeout"],
),
ModuleSetting(
key="timeout_duration",
label="Timeout duration (if moderation action is timeout)",
type="number",
required=True,
placeholder="",
default=60,
constraints={"min_value": 1, "max_value": 1209600},
),
ModuleSetting(
key="allow_subs_to_bypass",
label="Allow subscribers to bypass",
type="boolean",
required=True,
default=False,
),
ModuleSetting(
key="enable_in_online_chat", label="Enabled in online chat", type="boolean", required=True, default=True
),
ModuleSetting(
key="enable_in_offline_chat", label="Enabled in offline chat", type="boolean", required=True, default=True
),
ModuleSetting(
key="timeout_reason",
label="Timeout Reason",
type="text",
required=False,
placeholder="",
default="Too many emotes in your message",
constraints={},
),
ModuleSetting(
key="enable_whisper_timeout_reasons",
label="Enable Whisper Timeout Reasons",
type="boolean",
required=True,
default=True,
),
ModuleSetting(
key="whisper_timeout_reason",
label="Whisper Timeout Reason | Available arguments: {timeout_duration}",
type="text",
required=False,
placeholder="",
default="You have been timed out for {timeout_duration} seconds for posting too many emotes.",
constraints={},
),
ModuleSetting(
key="disable_warnings",
label="Disable warning timeouts",
type="boolean",
required=True,
default=False,
),
]
def delete_or_timeout(self, user, msg_id, reason):
if self.settings["moderation_action"] == "Delete":
self.bot.delete_message(msg_id)
elif self.settings["moderation_action"] == "Timeout":
if self.settings["disable_warnings"] is True:
self.bot.timeout(user, self.settings["timeout_duration"], reason, once=True)
else:
self.bot.timeout_warn(user, self.settings["timeout_duration"], reason, once=True)
def on_message(self, source, message, emote_instances, msg_id, **rest):
if source.level >= self.settings["bypass_level"] or source.moderator is True:
return True
if self.bot.is_online and not self.settings["enable_in_online_chat"]:
return True
if not self.bot.is_online and not self.settings["enable_in_offline_chat"]:
return True
if self.settings["allow_subs_to_bypass"] and source.subscriber is True:
return True
if len(emote_instances) > self.settings["max_emotes"]:
self.delete_or_timeout(source, msg_id, self.settings["timeout_reason"])
if (
self.settings["moderation_action"] == "Timeout"
and self.settings["enable_whisper_timeout_reasons"] is True
):
self.bot.whisper(
source,
self.settings["whisper_timeout_reason"].format(timeout_duration=self.settings["timeout_duration"]),
)
return False
return True
def enable(self, bot):
HandlerManager.add_handler("on_message", self.on_message, priority=150, run_if_propagation_stopped=True)
def disable(self, bot):
HandlerManager.remove_handler("on_message", self.on_message)
| {
"content_hash": "238a7ee11f79504516511c9e417d52ff",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 119,
"avg_line_length": 35.43478260869565,
"alnum_prop": 0.5556237218813906,
"repo_name": "pajlada/pajbot",
"id": "01fbb68fdd7f923b0d4a8e8ac55b4d7e903b6062",
"size": "4890",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pajbot/modules/emote_limit.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11288"
},
{
"name": "HTML",
"bytes": "129576"
},
{
"name": "JavaScript",
"bytes": "202450"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "987601"
},
{
"name": "Shell",
"bytes": "589"
}
],
"symlink_target": ""
} |
"""Tools for data conversion and presentation."""
import numpy as np
def get_wind_components(speed, wdir):
r"""Calculate the U, V wind vector components from the speed and direction.
Parameters
----------
speed : array_like
The wind speed (magnitude)
wdir : array_like
The wind direction, specified as the direction from which the wind is
blowing, with 0 being North.
Returns
-------
u, v : tuple of array_like
The wind components in the X (East-West) and Y (North-South)
directions, respectively.
"""
u = -speed * np.sin(wdir)
v = -speed * np.cos(wdir)
return u, v
| {
"content_hash": "4cb7fecfd1ca7a29ca04fc26d858dcd3",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 79,
"avg_line_length": 25.384615384615383,
"alnum_prop": 0.6166666666666667,
"repo_name": "dopplershift/siphon",
"id": "50e8c1449b809db1c5535369c1383bdd8aae685c",
"size": "824",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "siphon/_tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "337904"
},
{
"name": "Shell",
"bytes": "1485"
}
],
"symlink_target": ""
} |
"""
Unit tests for landlab.components.radiation.radiation
"""
from nose.tools import assert_equal, assert_true, assert_raises, with_setup
from numpy.testing import assert_array_almost_equal
try:
from nose.tools import assert_is_instance
except ImportError:
from landlab.testing.tools import assert_is_instance
import numpy as np
from landlab import RasterModelGrid
from landlab.components.radiation.radiation import Radiation
(_SHAPE, _SPACING, _ORIGIN) = ((20, 20), (10e0, 10e0), (0., 0.))
_ARGS = (_SHAPE, _SPACING, _ORIGIN)
def setup_grid():
from landlab import RasterModelGrid
grid = RasterModelGrid((20, 20), spacing=10e0)
rad = Radiation(grid)
globals().update({
'rad': Radiation(grid)
})
@with_setup(setup_grid)
def test_name():
assert_equal(rad.name, 'Radiation')
@with_setup(setup_grid)
def test_input_var_names():
assert_equal(rad.input_var_names,
('topographic__elevation',))
@with_setup(setup_grid)
def test_output_var_names():
assert_equal(sorted(rad.output_var_names),
['radiation__incoming_shortwave_flux',
'radiation__net_shortwave_flux',
'radiation__ratio_to_flat_surface'])
@with_setup(setup_grid)
def test_var_units():
assert_equal(set(rad.input_var_names) |
set(rad.output_var_names),
set(dict(rad.units).keys()))
assert_equal(rad.var_units('topographic__elevation'), 'm')
assert_equal(rad.var_units('radiation__incoming_shortwave_flux'), 'W/m^2')
assert_equal(rad.var_units('radiation__net_shortwave_flux'), 'W/m^2')
assert_equal(rad.var_units('radiation__ratio_to_flat_surface'), 'None')
@with_setup(setup_grid)
def test_grid_shape():
assert_equal(rad.grid.number_of_node_rows, _SHAPE[0])
assert_equal(rad.grid.number_of_node_columns, _SHAPE[1])
@with_setup(setup_grid)
def test_grid_x_extent():
assert_equal(rad.grid.extent[1], (_SHAPE[1] - 1) * _SPACING[1])
@with_setup(setup_grid)
def test_grid_y_extent():
assert_equal(rad.grid.extent[0], (_SHAPE[0] - 1) * _SPACING[0])
@with_setup(setup_grid)
def test_field_getters():
for name in rad.grid['node']:
field = rad.grid['node'][name]
assert_is_instance(field, np.ndarray)
assert_equal(field.shape,
(rad.grid.number_of_node_rows *
rad.grid.number_of_node_columns, ))
for name in rad.grid['cell']:
field = rad.grid['cell'][name]
assert_is_instance(field, np.ndarray)
assert_equal(field.shape,
(rad.grid.number_of_cell_rows *
rad.grid.number_of_cell_columns, ))
assert_raises(KeyError, lambda: rad.grid['not_a_var_name'])
@with_setup(setup_grid)
def test_field_initialized_to_zero():
for name in rad.grid['node']:
field = rad.grid['node'][name]
assert_array_almost_equal(field, np.zeros(rad.grid.number_of_nodes))
for name in rad.grid['cell']:
if name == 'Slope' or name == 'Aspect':
continue
field = rad.grid['cell'][name]
assert_array_almost_equal(field, np.zeros(rad.grid.number_of_cells)) | {
"content_hash": "46466f40d62f867a943d862c54397388",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 78,
"avg_line_length": 30.807692307692307,
"alnum_prop": 0.6320224719101124,
"repo_name": "csherwood-usgs/landlab",
"id": "588d44d8becb372ada6c4441aa9709f7df60145d",
"size": "3204",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "landlab/components/radiation/tests/test_radiation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1359"
},
{
"name": "PowerShell",
"bytes": "6112"
},
{
"name": "Python",
"bytes": "2844194"
},
{
"name": "Shell",
"bytes": "2773"
}
],
"symlink_target": ""
} |
import aquests
import route_guide_pb2
def test_grpc_404 ():
stub = aquests.grpc ("http://127.0.0.1:5000/routeguide.RouteGuide_")
point = route_guide_pb2.Point (latitude=409146138, longitude=-746188906)
for i in range (3):
stub.GetFeature (point)
aquests.fetchall ()
| {
"content_hash": "f10fe4f6fb95e103372d3cd40d7e1b0a",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 73,
"avg_line_length": 30.333333333333332,
"alnum_prop": 0.7289377289377289,
"repo_name": "hansroh/aquests",
"id": "f99f649a7f88b12758abd38a41c354052d287f56",
"size": "273",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_grpc_404.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "744914"
},
{
"name": "Shell",
"bytes": "40"
}
],
"symlink_target": ""
} |
from PySide6.QtCore import * # type: ignore
from PySide6.QtGui import * # type: ignore
from PySide6.QtWidgets import * # type: ignore
from .gitview import GitView
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
if not MainWindow.objectName():
MainWindow.setObjectName(u"MainWindow")
MainWindow.resize(800, 600)
self.acQuit = QAction(MainWindow)
self.acQuit.setObjectName(u"acQuit")
icon = QIcon()
iconThemeName = u"window-close"
if QIcon.hasThemeIcon(iconThemeName):
icon = QIcon.fromTheme(iconThemeName)
else:
icon.addFile(u".", QSize(), QIcon.Normal, QIcon.Off)
self.acQuit.setIcon(icon)
#if QT_CONFIG(shortcut)
self.acQuit.setShortcut(u"Ctrl+W")
#endif // QT_CONFIG(shortcut)
self.acAbout = QAction(MainWindow)
self.acAbout.setObjectName(u"acAbout")
icon1 = QIcon()
iconThemeName = u"help-about"
if QIcon.hasThemeIcon(iconThemeName):
icon1 = QIcon.fromTheme(iconThemeName)
else:
icon1.addFile(u".", QSize(), QIcon.Normal, QIcon.Off)
self.acAbout.setIcon(icon1)
self.acPreferences = QAction(MainWindow)
self.acPreferences.setObjectName(u"acPreferences")
icon2 = QIcon()
iconThemeName = u"preferences-system"
if QIcon.hasThemeIcon(iconThemeName):
icon2 = QIcon.fromTheme(iconThemeName)
else:
icon2.addFile(u".", QSize(), QIcon.Normal, QIcon.Off)
self.acPreferences.setIcon(icon2)
self.actionIgnore_whitespace_changes = QAction(MainWindow)
self.actionIgnore_whitespace_changes.setObjectName(u"actionIgnore_whitespace_changes")
self.acVisualizeWhitespace = QAction(MainWindow)
self.acVisualizeWhitespace.setObjectName(u"acVisualizeWhitespace")
self.acVisualizeWhitespace.setCheckable(True)
self.acIgnoreEOL = QAction(MainWindow)
self.acIgnoreEOL.setObjectName(u"acIgnoreEOL")
self.acIgnoreEOL.setCheckable(True)
self.acIgnoreAll = QAction(MainWindow)
self.acIgnoreAll.setObjectName(u"acIgnoreAll")
self.acIgnoreAll.setCheckable(True)
self.acIgnoreNone = QAction(MainWindow)
self.acIgnoreNone.setObjectName(u"acIgnoreNone")
self.acIgnoreNone.setCheckable(True)
self.acCopy = QAction(MainWindow)
self.acCopy.setObjectName(u"acCopy")
icon3 = QIcon()
iconThemeName = u"edit-copy"
if QIcon.hasThemeIcon(iconThemeName):
icon3 = QIcon.fromTheme(iconThemeName)
else:
icon3.addFile(u".", QSize(), QIcon.Normal, QIcon.Off)
self.acCopy.setIcon(icon3)
self.acSelectAll = QAction(MainWindow)
self.acSelectAll.setObjectName(u"acSelectAll")
icon4 = QIcon()
iconThemeName = u"edit-select-all"
if QIcon.hasThemeIcon(iconThemeName):
icon4 = QIcon.fromTheme(iconThemeName)
else:
icon4.addFile(u".", QSize(), QIcon.Normal, QIcon.Off)
self.acSelectAll.setIcon(icon4)
self.acFind = QAction(MainWindow)
self.acFind.setObjectName(u"acFind")
icon5 = QIcon()
iconThemeName = u"edit-find"
if QIcon.hasThemeIcon(iconThemeName):
icon5 = QIcon.fromTheme(iconThemeName)
else:
icon5.addFile(u".", QSize(), QIcon.Normal, QIcon.Off)
self.acFind.setIcon(icon5)
self.acCompare = QAction(MainWindow)
self.acCompare.setObjectName(u"acCompare")
self.acCompare.setCheckable(True)
self.acShowGraph = QAction(MainWindow)
self.acShowGraph.setObjectName(u"acShowGraph")
self.acShowGraph.setCheckable(True)
self.acShowGraph.setChecked(True)
self.acAboutQt = QAction(MainWindow)
self.acAboutQt.setObjectName(u"acAboutQt")
self.acCopyLog = QAction(MainWindow)
self.acCopyLog.setObjectName(u"acCopyLog")
self.acCopyLogA = QAction(MainWindow)
self.acCopyLogA.setObjectName(u"acCopyLogA")
self.acCopyLogB = QAction(MainWindow)
self.acCopyLogB.setObjectName(u"acCopyLogB")
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName(u"centralwidget")
self.verticalLayout = QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(u"verticalLayout")
self.gridFrame = QFrame(self.centralwidget)
self.gridFrame.setObjectName(u"gridFrame")
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.gridFrame.sizePolicy().hasHeightForWidth())
self.gridFrame.setSizePolicy(sizePolicy)
self.gridFrame.setFrameShape(QFrame.StyledPanel)
self.gridLayout_2 = QGridLayout(self.gridFrame)
self.gridLayout_2.setObjectName(u"gridLayout_2")
self.gridLayout_2.setSizeConstraint(QLayout.SetDefaultConstraint)
self.leRepo = QLineEdit(self.gridFrame)
self.leRepo.setObjectName(u"leRepo")
self.gridLayout_2.addWidget(self.leRepo, 0, 2, 1, 1)
self.leOpts = QLineEdit(self.gridFrame)
self.leOpts.setObjectName(u"leOpts")
self.gridLayout_2.addWidget(self.leOpts, 2, 2, 1, 1)
self.label_2 = QLabel(self.gridFrame)
self.label_2.setObjectName(u"label_2")
self.gridLayout_2.addWidget(self.label_2, 2, 0, 1, 1)
self.btnRepoBrowse = QPushButton(self.gridFrame)
self.btnRepoBrowse.setObjectName(u"btnRepoBrowse")
self.gridLayout_2.addWidget(self.btnRepoBrowse, 0, 3, 1, 1)
self.label = QLabel(self.gridFrame)
self.label.setObjectName(u"label")
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.lbSubmodule = QLabel(self.gridFrame)
self.lbSubmodule.setObjectName(u"lbSubmodule")
self.gridLayout_2.addWidget(self.lbSubmodule, 1, 0, 1, 1)
self.cbSubmodule = QComboBox(self.gridFrame)
self.cbSubmodule.setObjectName(u"cbSubmodule")
self.gridLayout_2.addWidget(self.cbSubmodule, 1, 2, 1, 1)
self.verticalLayout.addWidget(self.gridFrame)
self.splitter = QSplitter(self.centralwidget)
self.splitter.setObjectName(u"splitter")
self.splitter.setFrameShape(QFrame.StyledPanel)
self.splitter.setFrameShadow(QFrame.Plain)
self.splitter.setOrientation(Qt.Horizontal)
self.gitViewA = GitView(self.splitter)
self.gitViewA.setObjectName(u"gitViewA")
self.splitter.addWidget(self.gitViewA)
self.verticalLayout.addWidget(self.splitter)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QMenuBar(MainWindow)
self.menubar.setObjectName(u"menubar")
self.menubar.setGeometry(QRect(0, 0, 800, 21))
self.menuFile = QMenu(self.menubar)
self.menuFile.setObjectName(u"menuFile")
self.menu_Help = QMenu(self.menubar)
self.menu_Help.setObjectName(u"menu_Help")
self.menu_Settings = QMenu(self.menubar)
self.menu_Settings.setObjectName(u"menu_Settings")
self.menu_View = QMenu(self.menubar)
self.menu_View.setObjectName(u"menu_View")
self.menuIgnoreWhitespace = QMenu(self.menu_View)
self.menuIgnoreWhitespace.setObjectName(u"menuIgnoreWhitespace")
self.menu_Edit = QMenu(self.menubar)
self.menu_Edit.setObjectName(u"menu_Edit")
self.menu_Merge = QMenu(self.menubar)
self.menu_Merge.setObjectName(u"menu_Merge")
self.menuCopy_To_Conflict_Log = QMenu(self.menu_Merge)
self.menuCopy_To_Conflict_Log.setObjectName(u"menuCopy_To_Conflict_Log")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QStatusBar(MainWindow)
self.statusbar.setObjectName(u"statusbar")
MainWindow.setStatusBar(self.statusbar)
QWidget.setTabOrder(self.leRepo, self.btnRepoBrowse)
QWidget.setTabOrder(self.btnRepoBrowse, self.leOpts)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menu_Edit.menuAction())
self.menubar.addAction(self.menu_View.menuAction())
self.menubar.addAction(self.menu_Merge.menuAction())
self.menubar.addAction(self.menu_Settings.menuAction())
self.menubar.addAction(self.menu_Help.menuAction())
self.menuFile.addAction(self.acQuit)
self.menu_Help.addAction(self.acAbout)
self.menu_Help.addAction(self.acAboutQt)
self.menu_Settings.addAction(self.acPreferences)
self.menu_View.addAction(self.acVisualizeWhitespace)
self.menu_View.addAction(self.menuIgnoreWhitespace.menuAction())
self.menu_View.addSeparator()
self.menu_View.addAction(self.acCompare)
self.menuIgnoreWhitespace.addAction(self.acIgnoreNone)
self.menuIgnoreWhitespace.addAction(self.acIgnoreEOL)
self.menuIgnoreWhitespace.addAction(self.acIgnoreAll)
self.menu_Edit.addAction(self.acCopy)
self.menu_Edit.addAction(self.acSelectAll)
self.menu_Edit.addSeparator()
self.menu_Edit.addAction(self.acFind)
self.menu_Merge.addAction(self.menuCopy_To_Conflict_Log.menuAction())
self.menuCopy_To_Conflict_Log.addAction(self.acCopyLog)
self.menuCopy_To_Conflict_Log.addAction(self.acCopyLogA)
self.menuCopy_To_Conflict_Log.addAction(self.acCopyLogB)
self.retranslateUi(MainWindow)
QMetaObject.connectSlotsByName(MainWindow)
# setupUi
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"QGitc", None))
self.acQuit.setText(QCoreApplication.translate("MainWindow", u"Close &Window", None))
self.acAbout.setText(QCoreApplication.translate("MainWindow", u"&About QGitc", None))
self.acPreferences.setText(QCoreApplication.translate("MainWindow", u"&Preferences...", None))
self.actionIgnore_whitespace_changes.setText(QCoreApplication.translate("MainWindow", u"Ignore whitespace changes", None))
self.acVisualizeWhitespace.setText(QCoreApplication.translate("MainWindow", u"&Visualize whitespace", None))
self.acIgnoreEOL.setText(QCoreApplication.translate("MainWindow", u"At &end of line", None))
self.acIgnoreAll.setText(QCoreApplication.translate("MainWindow", u"&All", None))
self.acIgnoreNone.setText(QCoreApplication.translate("MainWindow", u"&None", None))
self.acCopy.setText(QCoreApplication.translate("MainWindow", u"&Copy", None))
#if QT_CONFIG(shortcut)
self.acCopy.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+C", None))
#endif // QT_CONFIG(shortcut)
self.acSelectAll.setText(QCoreApplication.translate("MainWindow", u"Select &All", None))
#if QT_CONFIG(shortcut)
self.acSelectAll.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+A", None))
#endif // QT_CONFIG(shortcut)
self.acFind.setText(QCoreApplication.translate("MainWindow", u"&Find", None))
#if QT_CONFIG(shortcut)
self.acFind.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+F", None))
#endif // QT_CONFIG(shortcut)
self.acCompare.setText(QCoreApplication.translate("MainWindow", u"&Compare Mode", None))
self.acShowGraph.setText(QCoreApplication.translate("MainWindow", u"Show &graph", None))
self.acAboutQt.setText(QCoreApplication.translate("MainWindow", u"About &Qt", None))
self.acCopyLog.setText(QCoreApplication.translate("MainWindow", u"From Current &View", None))
#if QT_CONFIG(shortcut)
self.acCopyLog.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+D", None))
#endif // QT_CONFIG(shortcut)
self.acCopyLogA.setText(QCoreApplication.translate("MainWindow", u"From &A", None))
#if QT_CONFIG(shortcut)
self.acCopyLogA.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+1", None))
#endif // QT_CONFIG(shortcut)
self.acCopyLogB.setText(QCoreApplication.translate("MainWindow", u"From &B", None))
#if QT_CONFIG(shortcut)
self.acCopyLogB.setShortcut(QCoreApplication.translate("MainWindow", u"Ctrl+2", None))
#endif // QT_CONFIG(shortcut)
#if QT_CONFIG(tooltip)
self.leOpts.setToolTip(QCoreApplication.translate("MainWindow", u"See the GIT-LOG options for more information.", None))
#endif // QT_CONFIG(tooltip)
self.leOpts.setPlaceholderText(QCoreApplication.translate("MainWindow", u"Type the log options here and press Enter to filter", None))
self.label_2.setText(QCoreApplication.translate("MainWindow", u"Filter:", None))
self.btnRepoBrowse.setText(QCoreApplication.translate("MainWindow", u"&Browse...", None))
self.label.setText(QCoreApplication.translate("MainWindow", u"Repository:", None))
self.lbSubmodule.setText(QCoreApplication.translate("MainWindow", u"Submodule:", None))
self.menuFile.setTitle(QCoreApplication.translate("MainWindow", u"&File", None))
self.menu_Help.setTitle(QCoreApplication.translate("MainWindow", u"&Help", None))
self.menu_Settings.setTitle(QCoreApplication.translate("MainWindow", u"&Settings", None))
self.menu_View.setTitle(QCoreApplication.translate("MainWindow", u"&View", None))
self.menuIgnoreWhitespace.setTitle(QCoreApplication.translate("MainWindow", u"&Ignore whitespace", None))
self.menu_Edit.setTitle(QCoreApplication.translate("MainWindow", u"&Edit", None))
self.menu_Merge.setTitle(QCoreApplication.translate("MainWindow", u"&Merge", None))
self.menuCopy_To_Conflict_Log.setTitle(QCoreApplication.translate("MainWindow", u"Copy To Conflict &Log", None))
# retranslateUi
| {
"content_hash": "3689a4caffdb16b9be6da3616ed705ce",
"timestamp": "",
"source": "github",
"line_count": 281,
"max_line_length": 142,
"avg_line_length": 49.39857651245551,
"alnum_prop": 0.6941862978171601,
"repo_name": "timxx/gitc",
"id": "12eae09bb42e2959be0b2200784c5c852d25465a",
"size": "14267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qgitc/ui_mainwindow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1108"
},
{
"name": "HTML",
"bytes": "11101"
},
{
"name": "Python",
"bytes": "258731"
},
{
"name": "QMake",
"bytes": "429"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from sentry.utils.rust import merge_rust_info_frames, starts_with, strip_symbol
STACKTRACE = """
stacktrace: stack backtrace:
0: 0x111e51cf4 - backtrace::backtrace::trace::h38e3b1de9f341e04
at /.cargo/registry/src/github.com-1ecc6299db9ec823/backtrace-0.3.9/src/backtrace/mod.rs:42
1: 0x111e4a3be - failure::backtrace::Backtrace::new::h2abf3908d09948f1
at /.cargo/registry/src/github.com-1ecc6299db9ec823/failure-0.1.3/src/backtrace/mod.rs:111
2: 0x11163e27c - <failure::error::Error as core::convert::From<F>>::from::h5ae4b38f39150cb2
at /.cargo/registry/src/github.com-1ecc6299db9ec823/failure-0.1.3/src/error/mod.rs:36
- <T as core::convert::Into<U>>::into::h58e05f056150874e
at libcore/convert.rs:456
3: 0x11163a9b7 - symbolic::debuginfo::symbolic_normalize_debug_id::{{closure}}::he767b4111eb41a33
at /symbolic/cabi/src/debuginfo.rs:160
4: 0x111e7f5de - ___rust_maybe_catch_panic
at /rustc/da5f414c2c0bfe5198934493f04c676e2b23ff2e/src/libpanic_unwind/lib.rs:103
5: 0x111618fcb - std::panic::catch_unwind::h66eea40447da0e66
at /symbolic/cabi/libstd/panic.rs:392
6: 0x11160b9c1 - symbolic::utils::landingpad::h3cd528225184a301
at /symbolic/cabi/src/utils.rs:55
7: 0x111632f43 - _symbolic_normalize_debug_id
at /symbolic/cabi/src/utils.rs:74
8: 0x7fff69609f6b - _ffi_call_unix64
9: 0x7fff6960a786 - _ffi_call
10: 0x10fab19d6 - _cdata_call
11: 0x10efc014f - _PyObject_Call
12: 0x10f069f43 - _Py_Main
"""
def get_event(stacktrace):
return {
"event_id": "fe628bfa48064c9b97ce7e75a19e6197",
"level": "error",
"platform": "python",
"logentry": {"formatted": "invalid debug identifier\n\n%s" % stacktrace},
"exception": {
"values": [
{
"type": "ParseDebugIdError",
"value": "invalid debug identifier\n\n%s" % stacktrace,
"stacktrace": {
"frames": [
{
"abs_path": "/symbolic/py/symbolic/utils.py",
"filename": "symbolic/utils.py",
"function": "rustcall",
"in_app": True,
"lineno": 93,
"module": "symbolic.utils",
}
]
},
}
]
},
}
def get_exc_info(rust_info):
exc = ValueError("hello world")
if rust_info is not None:
exc.rust_info = rust_info
return type(exc), exc, None
def test_merge_rust_info():
event = get_event(STACKTRACE)
exc_info = get_exc_info(STACKTRACE)
merge_rust_info_frames(event, {"exc_info": exc_info})
assert event["platform"] == "native"
assert event["logentry"]["formatted"] == "invalid debug identifier"
exception = event["exception"]["values"][0]
assert exception["value"] == "invalid debug identifier"
frames = exception["stacktrace"]["frames"]
assert len(frames) == 8
assert frames[0]["platform"] == "python"
# Top frame
assert frames[7]["instruction_addr"] == "0x11163e27c"
assert frames[7]["function"] == "<failure::error::Error as core::convert::From<F>>::from"
assert frames[7]["package"] == "failure"
assert frames[7]["in_app"] is False
assert frames[7]["filename"] == "mod.rs"
assert frames[7]["lineno"] == 36
# Inlined frame, same address
assert frames[7]["instruction_addr"] == "0x11163e27c"
assert frames[6]["function"] == "<T as core::convert::Into<U>>::into"
assert frames[6]["package"] == "core"
assert frames[6]["in_app"] is False
assert frames[6]["filename"] == "convert.rs"
assert frames[6]["lineno"] == 456
def test_without_exc_info():
event = get_event(STACKTRACE)
merge_rust_info_frames(event, {})
assert event["platform"] == "python"
def test_without_rust_info():
event = get_event(STACKTRACE)
exc_info = get_exc_info(None)
merge_rust_info_frames(event, {"exc_info": exc_info})
assert event["platform"] == "python"
def test_without_stacktrace():
stacktrace = "stacktrace: stack backtrace:\n\n"
event = get_event(stacktrace)
exc_info = get_exc_info(stacktrace)
merge_rust_info_frames(event, {"exc_info": exc_info})
assert event["platform"] == "native"
assert event["logentry"]["formatted"] == "invalid debug identifier"
exception = event["exception"]["values"][0]
assert exception["value"] == "invalid debug identifier"
frames = exception["stacktrace"]["frames"]
assert len(frames) == 1
def test_without_exception():
event = get_event(STACKTRACE)
exc_info = get_exc_info(STACKTRACE)
del event["exception"]
merge_rust_info_frames(event, {"exc_info": exc_info})
assert event["platform"] == "python"
def test_starts_with():
# Basic functions
assert starts_with("__rust_maybe_catch_panic", "__rust")
assert starts_with("futures::task_impl::std::set", "futures::")
assert not starts_with("futures::task_impl::std::set", "tokio::")
# Generics
assert starts_with("_<futures..task_impl..Spawn<T>>::enter::_{{closure}}", "futures::")
assert not starts_with("_<futures..task_impl..Spawn<T>>::enter::_{{closure}}", "tokio::")
assert starts_with("<futures::task_impl::Spawn<T>>::enter::{{closure}}", "futures::")
assert not starts_with("<futures::task_impl::Spawn<T>>::enter::{{closure}}", "tokio::")
# Trait implementations
assert starts_with("<failure::error::Error as core::convert::From<F>>::from", "failure::")
assert starts_with("_<failure::error::Error as core::convert::From<F>>::from", "failure::")
# Blanket implementations
assert starts_with("<T as core::convert::Into<U>>::into", "core::")
def test_strip_symbol():
assert strip_symbol("") == ""
assert strip_symbol("_ffi_call_unix64") == "_ffi_call_unix64"
assert (
strip_symbol("backtrace::backtrace::trace::h1c213d29ba950696")
== "backtrace::backtrace::trace"
)
assert (
strip_symbol("<T as core::convert::Into<U>>::into::h58e05f056150874e")
== "<T as core::convert::Into<U>>::into"
)
assert strip_symbol("symbolic_symcache_from_object") == "symbolic_symcache_from_object"
| {
"content_hash": "3dd8893a6a1884cbf99f3a7cdc0ad986",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 115,
"avg_line_length": 38.53448275862069,
"alnum_prop": 0.5873228933631618,
"repo_name": "mvaled/sentry",
"id": "028072f50917bd9874ed4fba3706dd812719bd58",
"size": "6705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/utils/test_rust.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "226439"
},
{
"name": "Dockerfile",
"bytes": "6431"
},
{
"name": "HTML",
"bytes": "173429"
},
{
"name": "JavaScript",
"bytes": "9314175"
},
{
"name": "Lua",
"bytes": "65885"
},
{
"name": "Makefile",
"bytes": "9225"
},
{
"name": "Python",
"bytes": "50385401"
},
{
"name": "Ruby",
"bytes": "168"
},
{
"name": "Shell",
"bytes": "5685"
},
{
"name": "TypeScript",
"bytes": "773664"
}
],
"symlink_target": ""
} |
from .codec import PolylineCodec
__version__ = '1.4.0'
def decode(expression, precision=5, geojson=False):
"""
Decode a polyline string into a set of coordinates.
:param expression: Polyline string, e.g. 'u{~vFvyys@fS]'.
:param precision: Precision of the encoded coordinates. Google Maps uses 5, OpenStreetMap uses 6.
The default value is 5.
:param geojson: Set output of tuples to (lon, lat), as per https://tools.ietf.org/html/rfc7946#section-3.1.1
:return: List of coordinate tuples in (lat, lon) order, unless geojson is set to True.
"""
return PolylineCodec().decode(expression, precision, geojson)
def encode(coordinates, precision=5, geojson=False):
"""
Encode a set of coordinates in a polyline string.
:param coordinates: List of coordinate tuples, e.g. [(0, 0), (1, 0)]. Unless geojson is set to True, the order
is expected to be (lat, lon).
:param precision: Precision of the coordinates to encode. Google Maps uses 5, OpenStreetMap uses 6.
The default value is 5.
:param geojson: Set to True in order to encode (lon, lat) tuples.
:return: The encoded polyline string.
"""
return PolylineCodec().encode(coordinates, precision, geojson)
__all__ = ['decode', 'encode']
| {
"content_hash": "6abea67695c76911605085ecdc41c639",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 114,
"avg_line_length": 38.666666666666664,
"alnum_prop": 0.6857366771159875,
"repo_name": "frederickjansen/polyline",
"id": "99bd7b692532a1da1931d61104919686d42ca9a9",
"size": "1276",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "polyline/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "322"
},
{
"name": "Python",
"bytes": "9251"
}
],
"symlink_target": ""
} |
from limbo.breadcrumbs.breadcrumbs import Breadcrumbs,BreadcrumbsNotSet
from django.contrib.flatpages.models import FlatPage
from django.http import Http404
def breadcrumbs_for_flatpages(request,flatpage):
""" given request and flatpage instance create breadcrumbs for all flat
pages """
if not hasattr(request,'breadcrumbs') or \
not isinstance(request.breadcrumbs,Breadcrumbs):
raise BreadcrumbNotSet(u"You need to setup breadcrumbs to use this " + \
"function.")
if not isinstance(flatpage,FlatPage) or \
not hasattr(flatpage,'id'):
raise TypeError(u"flatpage argument isn't a FlatPage instance or " + \
"not have id.")
paths = []
for part in request.path_info.split(u"/"):
# When split we have u"" for slashes
if len(part) == 0:
continue
# Add slash again
if not part.startswith(u"/"):
part = u"/"+part
if not part.endswith(u"/"):
part = part+u"/"
# If we have something on paths, url for flatpage is composed of what we
# have in path + part. Note that strins in path not have last slash, but
# part have.
if len(paths) > 0:
url = u"".join(paths+[part])
else:
url = part
# if is same url we don't hit database again
# else, get page from FlatPage. If page doesn't exist, we allow raise
# 404 because it is a url design problem, not flatpages or breadcrumbs
# problem.
if url == flatpage.url:
request.breadcrumbs(flatpage.title,flatpage.url)
else:
try:
f = FlatPage.objects.get(url=url)
except FlatPage.DoesNotExist:
raise Http404
else:
request.breadcrumbs(f.title,f.url)
# add last part of path in paths with one slash
paths.append(u"/"+url[1:-1].rpartition(u"/")[-1])
| {
"content_hash": "c73c6888c48134fe7c57bf6c64b7af78",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 80,
"avg_line_length": 39.24,
"alnum_prop": 0.5978593272171254,
"repo_name": "gdoermann/django-limbo",
"id": "f5cdb7186736eb29aaaaefaa8e28142f1c1b80e5",
"size": "1986",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "limbo/breadcrumbs/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "409084"
},
{
"name": "Python",
"bytes": "327119"
}
],
"symlink_target": ""
} |
import time
class Log:
"""This class handles the server log."""
def __init__(self, world):
self.__time_started = int(time.time())
self.__world = world
def write(self, message):
if self.__world.config["log"]["enable"]:
print("[{0}] {1}".format(int(time.time())-self.__time_started, message))
def debug(self, message):
if self.__world.config["log"]["enable"] and self.__world.config["log"]["debug"]:
print("{{{0}}} {1}".format(int(time.time())-self.__time_started, message))
| {
"content_hash": "fbed20c4954c063c62805d6685a83c1a",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 82,
"avg_line_length": 31.125,
"alnum_prop": 0.6224899598393574,
"repo_name": "pariahsoft/DennisX",
"id": "896a6f7064a7d6be9e0e6585600370eb25b22fce",
"size": "1864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "inc/log.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30964"
}
],
"symlink_target": ""
} |
from oscar.core.loading import get_model
Notification = get_model('communication', 'Notification')
def notifications(request):
ctx = {}
if getattr(request, 'user', None) and request.user.is_authenticated:
num_unread = Notification.objects.filter(
recipient=request.user, date_read=None).count()
ctx['num_unread_notifications'] = num_unread
return ctx
| {
"content_hash": "c828cdadc998f15091a16dda0fe36290",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 72,
"avg_line_length": 32.833333333333336,
"alnum_prop": 0.6903553299492385,
"repo_name": "django-oscar/django-oscar",
"id": "db885c799fe21fd1041ec37700e6d82ac1ac9ff7",
"size": "394",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/oscar/apps/communication/notifications/context_processors.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "529"
},
{
"name": "HTML",
"bytes": "565297"
},
{
"name": "JavaScript",
"bytes": "41944"
},
{
"name": "Makefile",
"bytes": "4234"
},
{
"name": "Python",
"bytes": "2261460"
},
{
"name": "SCSS",
"bytes": "21815"
},
{
"name": "Shell",
"bytes": "308"
}
],
"symlink_target": ""
} |
"""
This module contains Pipeline class that represents a central control
mechanism over a sequential image processing pipeline. It controls all the
available image processing categories, handles processing results and works
as an mediator between the algorithms and UI.
"""
from nefi2.model.categories._category import Category
from nefi2.model.algorithms import _utility
import demjson
import networkx.readwrite as nx
import os
import re
import shutil
import sys
import copy
import zope.event.classhandler
import cv2
__authors__ = {"Pavel Shkadzko": "p.shkadzko@gmail.com",
"Dennis Groß": "gdennis91@googlemail.com",
"Philipp Reichert": "prei@me.com"}
def filter_images(file_list):
"""
Filter out all non-image files.
<This function is used to protect the pipeline from attempting to process
any non-image files existing in the input directory.>
"""
valid_ext = ['.jpg', '.jpeg', '.png', '.tif', '.tiff']
return [f for f in file_list if os.path.splitext(f)[-1] in valid_ext]
def read_image_file(fpath, prev_cat, start_from):
"""
Read and return an image file as a numpy ndarray.
If the name of the previous Category is Segmentation, read grayscaled img.
Args:
| *fpath* (str): file path
| *prev_cat* (str): name of the previous Category
| *start_from* (int): starting Category position
"""
try:
if prev_cat == 'Segmentation' and start_from != 0:
img = cv2.imread(fpath, cv2.IMREAD_GRAYSCALE)
else:
img = cv2.imread(fpath, cv2.IMREAD_COLOR)
except (IOError, cv2.error) as ex:
print(ex)
print('ERROR in read_image_file() '+
'Cannot read the image file, make sure it is readable')
sys.exit(1)
return img
class Pipeline:
def __init__(self, categories):
"""
Args:
| *categories* : OrderedDict of category names and their instances
| *isui* (bool) : True if Pipeline is running in UI mode
public Attributes:
| *available_cats* (dict): dict of {Category name: Category}
| *executed_cats* (list): a list of Categories in the pipeline
| *pipeline_path* (str): a path to a saved pipelines
| *out_dir* (str): a path where processing results are saved
| *input_files* (list): a list of image files in the input dir
| *cache* (list): a list of tuples where (Category name, img url)
"""
self.cache = []
self.available_cats = categories
self.executed_cats = []
self.pipeline_path = os.path.join('assets', 'json') # default dir
self.out_dir = os.path.join(os.getcwd(), 'output') # default out dir
if not os.path.exists(self.out_dir):
os.mkdir(self.out_dir)
self.input_files = None
self.original_img = None # original image file as read first time
# remember the results of each algorithm in the pipeline
self.pipeline_memory = {}
def subscribe_cache_event(self, function):
"""
Subscribe to the cache event which tells the maincontroller about
new images in the cache folder
Args:
function: the subscriber
"""
self.cache_event.onChange += function
def subscribe_progress_event(self, function):
"""
Subscribe to the progress event which tells the maincontroller about
the progress of the pipeline
Args:
function: the subscriber
"""
self.progress_event.onChange += function
def new_category(self, position, cat_name=None, alg_name=None):
"""
This method is used by the json parser to create a category.
The parser knows already the cat type and alg type as well
as the position. So it doesnt make sense to create a blank and
change it.
Args:
| *cat_name* (str): name of the category also indicating its cat type
| *alg_name* (str): name of the active algoirthm indicating its alg type
| *position* (int): position in the executed_cats
"""
# creating new blank Category
if cat_name is None:
blank_cat = Category("blank")
blank_cat_copy = copy.deepcopy(blank_cat)
self.executed_cats.insert(position, blank_cat_copy)
return self.executed_cats[position]
# inserting named Category
for v in list(self.available_cats.values()):
if v.name == cat_name:
cat_copy = copy.deepcopy(v)
self.executed_cats.insert(position, cat_copy)
# setting active Algorithm
for v in list(self.executed_cats[position].available_algs.values())[0]:
if alg_name == v.name:
v.set_modified()
self.executed_cats[position].set_active_algorithm(alg_name)
break
def move_category(self, origin_pos, destination_pos):
"""
Move Category instance within the pipeline using indices.
Args:
| *origin_pos* (int): Category index number
| *destination_pos* (int): new position for Category
"""
buf = self.executed_cats[origin_pos]
self.executed_cats[origin_pos] = self.executed_cats[destination_pos]
self.executed_cats[destination_pos] = buf
def delete_category(self, category):
"""
Remove Category from the pipeline.
Args:
*category* (int|str): Category position index or Category name
"""
if type(category) == int:
del self.executed_cats[category]
elif type(category) == str:
for i, cat in enumerate(self.executed_cats):
if category == cat.name:
del self.executed_cats[i]
def get_index(self, cat):
"""
Gets the index of a given Category entry from the pipeline.
Args:
| *category* (cat): Category object
Returns:
| *index* (int): index of Category object in the pipeline
"""
return self.executed_cats.index(cat)
def process(self):
"""
Process input image selected in UI, save intermediate results in
_cache_ and enable pipeline recalculation from the category that was
first changed.
Keep all intermediate results.
<This function will be obviously slower than the console variant due
to IO operations on the _cache_ directory.>
"""
# reset cache list
self.cache = []
# create and set output dir name
img_fpath = self.input_files[0]
orig_fname = os.path.splitext(os.path.basename(img_fpath))[0]
pip_name = os.path.splitext(os.path.basename(self.pipeline_path))[0]
out_path = os.path.join(self.out_dir,
'_'.join([pip_name, orig_fname]))
# check if any algorithm has changed
for idx, cat in enumerate(self.executed_cats):
if cat.active_algorithm.modified:
prev_cat_idx = 0 if idx - 1 < 0 else idx - 1
if idx - 1 < 0:
start_idx = 0
prev_cat_name = self.executed_cats[0].name
else:
start_idx = idx
prev_cat_name = self.executed_cats[prev_cat_idx].name
break
prev_cat_idx = 0
start_idx = 0
prev_cat_name = self.executed_cats[prev_cat_idx].name
# decide which category to continue from if any, act accordingly
if prev_cat_idx == 0 and start_idx == 0:
# new pipeline, read original img
orig_arr = read_image_file(img_fpath, '', start_idx)
self.pipeline_memory[prev_cat_idx] = orig_arr
data = [self.pipeline_memory[prev_cat_idx], None]
self.original_img = data[0]
else:
# get the results of the previous (unmodified) algorithm
data = self.pipeline_memory.get(prev_cat_idx)
# remember the prev path
prev_path = self.pipeline_memory[prev_cat_idx][0]
# we need to read grayscale if previous category was Segmentation
data[0] = read_image_file(prev_path, prev_cat_name, start_idx)
# release memory
if start_idx != 0:
released = [prev_path, data[1] or None, prev_cat_name]
self.pipeline_memory[prev_cat_idx] = released
# main pipeline loop, execute the pipeline from the modified category
for num, cat in enumerate(self.executed_cats[start_idx:], start_idx):
progress = (num / len(self.executed_cats)) * 100
report = cat.name + " - " + cat.active_algorithm.name
zope.event.notify(ProgressEvent(progress, report))
cat.process(data)
# reassign results of the prev alg for the next one
data = list(cat.active_algorithm.result.items())
data.sort(key=lambda x: ['img', 'graph'].index(x[0]))
data = [i[1] for i in data]
# check if we have graph
if data[1]:
# draw the graph into the original image
data[0] = _utility.draw_graph(self.original_img, data[1])
# save the results
save_fname = self.get_results_fname(img_fpath, cat)
save_path = os.path.join(out_path, save_fname)
self.save_results(save_path, save_fname, data)
# update the cache
self.update_cache(cat, save_path)
cache_path = os.path.join(os.getcwd(), '_cache_', save_fname)
self.pipeline_memory[num] = [cache_path, data[1], cat.name]
# release memory
cat.active_algorithm.result['img'] = ''
def process_batch(self):
"""
Process a given image or a directory of images using predefined
pipeline.
"""
for fpath in self.input_files:
# create and set output dir name
orig_fname = os.path.splitext(os.path.basename(fpath))[0]
pip_name = os.path.splitext(os.path.basename(self.pipeline_path))[0]
dir_name = os.path.join(self.out_dir, '_'.join([pip_name,
orig_fname]))
data = [read_image_file(fpath, '', None), None]
self.original_img = data[0]
# process given image with the pipeline
last_cat = None
for cat in self.executed_cats:
cat.process(data)
# reassign results of the prev alg for the next one
data = list(cat.active_algorithm.result.items())
data.sort(key=lambda x: ['img', 'graph'].index(x[0]))
data = [i[1] for i in data]
last_cat = cat
if data[1]:
# draw the graph into the original image
data[0] = _utility.draw_graph(self.original_img, data[1])
# save the results and update the cache if store_image is True
save_fname = self.get_results_fname(fpath, last_cat)
save_path = os.path.join(dir_name, save_fname)
self.save_results(save_path, save_fname, data)
def save_results(self, save_path, image_name, results):
"""
Create a directory of the following format: current pipeline + fname.
Save and put the results of algorithm processing in the directory.
Args:
| *save_path* (str): image save path
| *image_name* (str): image name
| *results* (list): a list of arguments to save
"""
# check if the save directory exists
dir_to_save = os.path.dirname(save_path)
if not os.path.exists(dir_to_save):
os.mkdir(dir_to_save)
# saving the processed image
try:
saved = cv2.imwrite(save_path, results[0])
if not saved:
print('ERROR in save_results(), ' +
'cv2.imwrite could not save the results!')
sys.exit(1)
except (IOError, cv2.error) as ex:
print(ex)
print('ERROR in save_results() ' +
'Cannot write an image file, make sure there is ' +
'enough free space on disk')
sys.exit(1)
# exporting graph object
if results[1]:
image_name = os.path.splitext(image_name)[0] + '.txt'
nx.write_multiline_adjlist(results[1], os.path.join(dir_to_save,
image_name),
delimiter='|')
print('Success!', image_name, 'saved in', dir_to_save)
def sanity_check(self):
"""
The order of the categories is important in the pipeline.
You can not execute graph filtering before graph detection or
segmentation after graph filtering (graph filtering requires
graph object which only graph detection produces).
Therefor we check if the pipeline is in an illegal state before we
execute it.
Returns:
("", -1) if the pipeline is NOT in an illegae state,
(*message*, i) an error message with the position in pipeline otherwise.
"""
if len(self.executed_cats) == 0:
return ("Nothing to do."), 0
pipeline_cats = self.executed_cats
is_graph = False
is_segmented = False
for i in range(0, len(pipeline_cats)):
cat = pipeline_cats[i].get_name()
if (cat == "Segmentation" or cat == "Preprocessing") and is_graph:
return (("You cannot process '{0}' after 'Graph Detection'.".format(cat)), pipeline_cats[i])
if (cat == "Graph Detection") and is_graph:
return (("You cannot process '{0}' more than once.".format(cat)), pipeline_cats[i])
if (cat == "Graph Filtering") and not is_graph:
return (("You need to process 'Graph Detection' before '{0}'.".format(cat)), pipeline_cats[i])
if (cat == "Graph Detection") and not is_segmented:
return (("You need to process 'Segmentation' before '{0}'.".format(cat)), pipeline_cats[i])
if cat == "blank":
return (("Specify step {0} in the pipeline first.".format(i)), pipeline_cats[i])
if cat == "Graph Detection":
is_graph = True
if cat == "Segmentation":
is_segmented = True
return "", None
def report_available_cats(self, selected_cat=None):
"""
The order of the categories is important in the pipeline.
You can not execute graph filtering before graph detection or
segmentation after graph filtering (graph filtering requires
graph object which only graph detection produces).
When a user selects a category from a drop-down menu we provide only
currently allowed categories.
Args:
*selected_cat* (str): Category selected by the user
Returns:
a list of currently allowed category names
<Deprecated function>
"""
available_cats = [cat.name for cat in self.get_available_cats()]
if selected_cat is None:
return available_cats
elif selected_cat not in available_cats:
return available_cats
elif selected_cat == 'Graph Detection':
return available_cats[available_cats.index(selected_cat) + 1:]
else:
return available_cats[available_cats.index(selected_cat):]
def allow_cat_swap(self, pos1, pos2):
"""
Check the order after potential category swapping and return a bool if
it should be allowed or not.
Args:
|*pos1* (int): position to be swapped
|*pos2* (int): position to be swapped
Returns:
True if allowed and False otherwise
"""
current_list = self.get_available_cat_names()
return current_list[pos1] == current_list[pos2]
def change_category(self, cat_name, position):
"""
Change the type of the category at position in the executed_cats.
This is needed for the ui since the categorys in the executed_cats
need to be changed because of the dropdown menus.
Args:
| *cat_name*: the name of the category as it should be
| *position*: the position in the executed_cats
"""
for v in list(self.available_cats.values()):
if v.name == cat_name:
self.executed_cats[position] = copy.deepcopy(v)
def change_algorithm(self, alg_name, position):
"""
Set the algorithm of the category in position to modified = *True*.
Also change the selected algorithm of the category in position.
Args:
| *position*: list index of the category in the pipeline
| *alg_name*: algorithm name
"""
for v in list(self.executed_cats[position].available_algs.values())[0]:
if alg_name == v.name:
v.set_modified()
self.executed_cats[position].set_active_algorithm(alg_name)
def get_executed_cats(self):
"""
Create and return a list of currently executed categories.
*No cats are actually harmed during execution of this method >_<*
Returns:
*executed_cat_names*: list of Category names
"""
executed_cat_names = [cat.get_name() for cat in self.executed_cats]
return executed_cat_names
def get_category(self, key):
"""
Keys are the names of the categories.
Returns:
*category*: Category
"""
return self.available_cats.get(key)
def get_available_cat_names(self):
"""
Create and return a list of currently loaded categories as strings.
Names are used as keys in ``executed_cats`` list.
Returns:
a list of current Category names in the pipeline
"""
return [cat.get_name() for cat in self.executed_cats]
def get_available_cats(self):
"""
Create and return a list of currently available categories as list of
categorie objects.
*<Get your cat for free ^-_-^>*
Returns:
*available_cats*: list of Category classes
"""
available_cats = list(self.available_cats.values())
return available_cats
def get_algorithm_list(self, position):
"""
Get names of all available algorithms for the category in position
available in the pipeline.
Sort the list and return.
Args:
*position* (int): Category index number
Returns:
*alg_names* (list): a sorted list of algorithm names
"""
alg_names = self.executed_cats[position].alg_names
alg_names.sort()
return alg_names
def get_all_algorithm_list(self, category):
"""
Get names of all available algorithms for a given category.
Sort the list and return.
Args:
*category*: Category
Returns:
*alg_names* (list): a sorted list of algorithm names
"""
alg_names = category.alg_names
alg_names.sort()
return alg_names
def get_results_fname(self, img_fpath, cat):
"""
Create a file name for algorithm results.
Args:
| *img_fpath* (str): img file path
| *cat* (Category): category instance
Returns:
*img_name* (str): image file name to save
"""
alg_name = re.sub(' ', '_', cat.active_algorithm.name.lower())
basename = os.path.basename(img_fpath)
img_name = '_'.join([cat.get_name(), alg_name,
basename])
return img_name
def set_input(self, input_source):
"""
Set the directory where original images are located or set a file path.
Args:
*input_source* (str): directory path with original images or a
single file path
"""
if os.path.isdir(input_source):
files = filter_images(os.listdir(input_source))
self.input_files = [os.path.join(input_source, f) for f in files]
elif os.path.isfile(input_source):
self.input_files = [input_source]
if not os.path.exists('_cache_'):
self.set_cache()
zope.event.notify(CacheInputEvent(os.path.basename(input_source), input_source))
shutil.copy(self.input_files[0], '_cache_')
def set_output_dir(self, dir_path):
"""
Create and set the directory where to save the results of processing.
<Used in console mode>.
Args:
*dir_path* (str): directory path for processing results
"""
if not os.path.exists(dir_path):
os.mkdir(dir_path)
self.out_dir = dir_path
def load_pipeline_json(self, url):
"""
Loads the Pipeline from the url location and parses all data to
create the corresponding executed_cats
Args:
| *url*: location identifier for the pipeline.json
"""
try:
json = demjson.decode_file(url, "UTF-8")
except demjson.JSONDecodeError as e:
e = sys.exc_info()[0]
print("Unable to parse " + url + " trace: " + e)
for position, alg in enumerate(json):
alg_name = alg[0]
alg_attributes = alg[1]
cat_name = alg_attributes["type"]
self.new_category(position, cat_name, alg_name)
active_alg = self.executed_cats[position].active_algorithm
active_alg.store_image = alg_attributes["store_image"]
for name in alg_attributes.keys():
if name == "type" or name == "store_image":
continue
value = alg_attributes[name]
alg_ui_elem = active_alg.find_ui_element(name)
if alg_ui_elem:
alg_ui_elem.set_value(value)
self.pipeline_path = url
# reset current cache
self.set_cache()
def save_pipeline_json(self, name, url):
"""
Goes trough the list of executed_cats and calls for every
selected_algorithm its report_pip method. With the returned
dictionary's, it builds the pipeline.json file and stores it
at the given url location on the file system.
Args:
| *url*: location identifier for the pipeline.json
"""
alg_reports = []
for cat in self.executed_cats:
alg = cat.get_active_algorithm()
cat_name, alg_dic = alg.report_pip()
alg_reports.append([cat_name, alg_dic])
with open(url + ".json", "wb+") as outfile:
# ord_alg_reps = OrderedDict(alg_reports)
outfile.write(bytes(demjson.encode(alg_reports), "UTF-8"))
def set_cache(self):
"""
Create cache dir in order to save in it the intermediate results of
processing and an original image.
Recreate dir if exists or before running image processing.
<This is done to make thumbnails in the left pane available in UI.>
"""
if os.path.exists('_cache_'):
try:
shutil.rmtree('_cache_')
except (IOError, OSError):
print('ERROR in set_cache() ' +
'Cannot remove _cache_ directory, make sure it ' +
'is not open or locked by some other process.')
sys.exit(1)
os.mkdir('_cache_')
self.cache = []
def update_cache(self, cat, img_path):
"""
Copy an img to cache dir and update the cache list.
Args:
| *category*: Category
| *img_path* (str): image path
"""
try:
shutil.copy(img_path, '_cache_')
except (IOError, OSError) as ex:
print(ex)
print('ERROR in update_cache() ' +
'Cannot copy to _cache_ directory, make sure there ' +
'is enough space on disk')
sys.exit(1)
cache_img_path = os.path.join(os.getcwd(), '_cache_',
os.path.basename(img_path))
zope.event.notify(CacheAddEvent(cat, cache_img_path))
self.cache.append((cat, cache_img_path))
class ProgressEvent(object):
"""
This event is used to report the progress back to the maincontroller
"""
def __init__(self, value, report):
self.value = value
self.report = report
class CacheAddEvent(object):
"""
This event is used to report the maincontroller the new cached image
"""
def __init__(self, cat, path):
self.cat = cat
self.path = path
class CacheRemoveEvent(object):
"""
This event is used to report the maincontroller the new cached image
"""
def __init__(self, cat, path):
self.cat = cat
self.path = path
class CacheInputEvent(object):
"""
This event is used to report the maincontroller the new cached image
"""
def __init__(self, image_name, path):
self.image_name = image_name
self.path = path
if __name__ == '__main__':
pass
| {
"content_hash": "3ba79983a4b592c3d20487777a6d84a2",
"timestamp": "",
"source": "github",
"line_count": 699,
"max_line_length": 110,
"avg_line_length": 37.659513590844064,
"alnum_prop": 0.5566403282175961,
"repo_name": "LumPenPacK/NetworkExtractionFromImages",
"id": "d9b07a2ec0a7b142b6d994086a788da0b8ed3a8e",
"size": "26374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nefi2/model/pipeline.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1577"
},
{
"name": "C",
"bytes": "3035840"
},
{
"name": "C++",
"bytes": "147394619"
},
{
"name": "CMake",
"bytes": "603"
},
{
"name": "CSS",
"bytes": "4298"
},
{
"name": "FORTRAN",
"bytes": "14321"
},
{
"name": "HTML",
"bytes": "41126"
},
{
"name": "Lex",
"bytes": "20920"
},
{
"name": "Makefile",
"bytes": "350419"
},
{
"name": "Python",
"bytes": "25507066"
},
{
"name": "QMake",
"bytes": "22941"
},
{
"name": "Shell",
"bytes": "19080"
},
{
"name": "Yacc",
"bytes": "248826"
}
],
"symlink_target": ""
} |
from nanpy.arduinoboard import ArduinoObject
from nanpy.arduinoboard import (arduinoobjectmethod, returns)
class DHT(ArduinoObject):
DHT11 = 11
DHT22 = 22
DHT21 = 21
AM2301 = 21
def __init__(self, pin, _type, count=6, connection=None):
ArduinoObject.__init__(self, connection=connection)
self.id = self.call('new', pin, _type, count)
@returns(int)
@arduinoobjectmethod
def begin(self):
pass
@returns(float)
@arduinoobjectmethod
def readHumidity(self):
pass
@returns(float)
@arduinoobjectmethod
def readTemperature(self, value=False):
pass
| {
"content_hash": "45bf359bfd0e26fce501be25c392e5be",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 61,
"avg_line_length": 22.892857142857142,
"alnum_prop": 0.6505460218408736,
"repo_name": "ryanvade/nanpy",
"id": "f0b8302c9a68c27f79a300498d2119f1aa4ed84a",
"size": "641",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nanpy/dht.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "105323"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function, with_statement
import utils
from tornado.web import addslash, authenticated
from tornado.gen import coroutine
from . import BaseHandler, route, check_level
@route(r"/web/")
class IndexHandler(BaseHandler):
@authenticated
@addslash
def get(self):
self.render("web/index.html")
@route(r"/web/login")
class LoginHandler(BaseHandler):
def get(self):
if self.current_user:
self.redirect(self.reverse_url("web_index"))
return
self.render("web/login.html")
@coroutine
def post(self):
_login = self.get_body_argument("login")
_password = self.get_body_argument("password")
user = yield self.thread_pool.submit(
self.db.fetch, "SELECT * FROM users WHERE login = ? AND is_enabled = TRUE", _login)
if not user or utils.hash_password(_password, user["password"]) != user["password"]:
self.set_flash("The details you entered are incorrect.", BaseHandler.FLASH_ERROR)
self.redirect(self.reverse_url("web_login") + "?login=" + _login)
return
session = yield self.thread_pool.submit(self.create_session, user["login"])
self.set_secure_cookie("session", session)
self.redirect(self.reverse_url("web_index"))
@route(r"/web/logout")
class LogoutHandler(BaseHandler):
@authenticated
@coroutine
def get(self):
yield self.thread_pool.submit(self.clear_session, self.current_user["login"], self.current_user["session"])
self.clear_cookie("session")
self.redirect(self.reverse_url("web_login"))
@route(r"/web/feed")
class FeedHandler(BaseHandler):
@authenticated
def get(self):
self.render("web/feed.html")
@route(r"/web/users")
class UsersHandler(BaseHandler):
@authenticated
@check_level(
BaseHandler.LEVEL_SUPER,
BaseHandler.LEVEL_ADMIN)
def get(self):
self.render("web/users.html")
| {
"content_hash": "ec93825942a56a8eb8ca160a4519702d",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 115,
"avg_line_length": 29.529411764705884,
"alnum_prop": 0.6523904382470119,
"repo_name": "lokhman/sharedown",
"id": "de0f100fd96738e2f09ea807f931a87bdd32adbf",
"size": "3154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sharedown/handlers/web.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15041"
},
{
"name": "HTML",
"bytes": "32069"
},
{
"name": "JavaScript",
"bytes": "31591"
},
{
"name": "Nginx",
"bytes": "1657"
},
{
"name": "Python",
"bytes": "56945"
}
],
"symlink_target": ""
} |
import random
import string
from datetime import datetime
from urllib.error import HTTPError
from ..viaggiatreno import viaggiatreno, format
def _gen_ran_string(string_len: int = 16):
return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(string_len))
def process_inline_query(bot, iq, u):
def minifyStation(__str):
__str = __str[1:]
n = 0
for i in __str:
if i != "0":
__str = __str[n:]
break
n += 1
return __str
def default_answer():
iq.answer(
results=[
{
"type": "article",
"id": _gen_ran_string(),
"title": "❇️ Orario Treni in tutte le chat!",
"description": "👉 Clicca qui per scoprire come usare Orario Treni in qualsiasi chat!",
"input_message_content": {
"message_text": (
"❇️ <b>Usa Orario Treni in tutte le chat!</b>"
"\n⏺ <i>Cerca treni, stazioni e itinerari in qualsiasi chat</i>"
"\nPer usare questa funzione basta che scrivi <code>@{username} query</code> in qualsiasi chat: "
"si aprirà un pop-up da dove potrai selezionare il risultato desiderato."
"\n➖➖️ <b>Cerca treni e stazioni</b>"
"\nScrivi il <b>numero del treno</b> o il <b>nome della stazione</b>, "
"per esempio <code>@{username} 9650</code> o <code>@{username} Roma Termini</code>"
"\n➖➖️ <b>Cerca itinerari</b>"
"\nScrivi la <b>stazione di partenza</b>, un <b>trattino -</b> e la <b>stazione di arrivo</b>: "
"per esempio <code>@{username} Milano Centrale - Roma Termini</code>"
"\n<i>Gli itinerari cercati inline sono basati sull'orario attuale</i>"
.format(username=bot.itself.username)
),
"parse_mode": "HTML",
"disable_web_page_preview": True,
},
"reply_markup": {
"inline_keyboard": [
[{"text": "➡️ Orario Treni", "url": "https://t.me/OrarioTreniBot"}]
]
},
"thumb_url": "http://i.imgur.com/hp9QUXx.png",
}
]
)
def not_found_answer():
iq.answer(
results=[
{
"type": "article",
"id": _gen_ran_string(),
"title": "❌ Non trovato",
"description": "👉 Clicca qui per scoprire come usare Orario Treni in qualsiasi chat!",
"input_message_content": {
"message_text": (
"❇️ <b>Usa Orario Treni in tutte le chat!</b>"
"\n⏺ <i>Cerca treni, stazioni e itinerari in qualsiasi chat</i>"
"\nPer usare questa funzione basta che scrivi <code>@{username} query</code> in qualsiasi chat: "
"si aprirà un pop-up da dove potrai selezionare il risultato desiderato."
"\n➖➖️ <b>Cerca treni e stazioni</b>"
"\nScrivi il <b>numero del treno</b> o il <b>nome della stazione</b>, "
"per esempio <code>@{username} 9650</code> o <code>@{username} Roma Termini</code>"
"\n➖➖️ <b>Cerca itinerari</b>"
"\nScrivi la <b>stazione di partenza</b>, un <b>trattino -</b> e la <b>stazione di arrivo</b>: "
"per esempio <code>@{username} Milano Centrale - Roma Termini</code>"
"\n<i>Gli itinerari cercati inline sono basati sull'orario attuale</i>"
.format(username=bot.itself.username)
),
"parse_mode": "HTML",
"disable_web_page_preview": True,
},
"reply_markup": {
"inline_keyboard": [
[{"text": "➡️ Orario Treni", "url": "https://t.me/OrarioTreniBot"}]
]
},
"thumb_url": "http://i.imgur.com/hp9QUXx.png",
}
]
)
if not iq.query:
return default_answer()
api = viaggiatreno.API()
if iq.query.isnumeric(): # Search train
try:
results = api.call('cercaNumeroTrenoTrenoAutocomplete', iq.query)
except HTTPError:
results = []
if len(results) == 0:
return not_found_answer()
u.increaseStat("stats_inline_queries")
u.increaseStat("stats_trains_bynum")
inline_results = []
for result in results:
raw = api.call('andamentoTreno', result[1], iq.query)
text = format.formatTrain(raw)
inline_results.append(
{
"type": "article",
"id": _gen_ran_string(),
"title": "🚅 Treno {train}".format(train=raw['compNumeroTreno']),
"description": "👉 Informazioni del treno {train} da {o}".format(
train=raw['compNumeroTreno'],
o=raw['origine']
),
"input_message_content": {
"message_text": text,
"parse_mode": "HTML",
"disable_web_page_preview": True,
},
"reply_markup": {
"inline_keyboard": [
[{"text": "🔄 Aggiorna le informazioni", "callback_data": "train@{d}_{n}@update"
.format(d=result[1],
n=iq.query)}],
[{"text": "🚉 Fermate", "callback_data": "train@{d}_{n}@stops"
.format(d=result[1],
n=iq.query)}]
]},
"thumb_url": "http://i.imgur.com/hp9QUXx.png"
}
)
iq.answer(results=inline_results)
else:
if "-" in iq.query: # Search itinerary
try:
station_a = minifyStation(api.call('cercaStazione', iq.query.split("-")[0].strip())[0]['id'])
station_b = minifyStation(api.call('cercaStazione', iq.query.split("-")[1].strip())[0]['id'])
date = datetime.now().strftime('%Y-%m-%dT%H:%M:%S')
raw = api.call('soluzioniViaggioNew', station_a, station_b, date)
except (KeyError, IndexError, HTTPError):
return not_found_answer()
u.increaseStat("stats_inline_queries")
u.increaseStat("stats_trains_byiti")
text = format.formatItinerary(raw)
iq.answer(
results=[
{
"type": "article",
"id": _gen_ran_string(),
"title": "🛤 Itinerari da {a} a {b}".format(
a=iq.query.split("-")[0].upper(),
b=iq.query.split("-")[1].upper()),
"description": "{x} soluzioni trovate".format(
x=len(raw['soluzioni']) if len(raw['soluzioni']) < 5 else 5),
"input_message_content": {
"message_text": text,
"parse_mode": "HTML",
"disable_web_page_preview": True
},
"thumb_url": "http://i.imgur.com/hp9QUXx.png",
}
]
)
else: # Search station
results = api.call('cercaStazione', iq.query)
if len(results) == 0:
return not_found_answer()
elif len(results) > 0:
u.increaseStat("stats_inline_queries")
u.increaseStat("stats_stations")
inline_results = []
x = 0
for station in results:
if x > 49:
break
inline_results.append(
{
"type": "article",
"id": _gen_ran_string(),
"title": "🚉 Stazione di {station}".format(station=station['nomeLungo']),
"description": "👉 Informazioni sulla stazione di {station}".format(station=station['nomeLungo']),
"input_message_content": {
"message_text": format.formatStation(station['nomeLungo'], station['id']),
"parse_mode": "HTML",
"disable_web_page_preview": True
},
"reply_markup": {
"inline_keyboard": [
[{"text": "🔘 Mostra le informazioni da Wikipedia", "callback_data":
"station@" + station["id"] + "@wiki"}],
[{"text": "🚦 Arrivi",
"callback_data": "station@" + station["id"] + "@arrivals"},
{"text": "🚦 Partenze",
"callback_data": "station@" + station["id"] + "@departures"}],
]},
"thumb_url": "http://i.imgur.com/hp9QUXx.png",
}
)
x += 1
iq.answer(results=inline_results)
| {
"content_hash": "0124bc2a13f14bb9f2b34f811c56ecec",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 125,
"avg_line_length": 45.80542986425339,
"alnum_prop": 0.4168724686357799,
"repo_name": "MarcoBuster/OrarioTreniBot",
"id": "a090d6902dc1604d37bfb9926304c935b2648f2a",
"size": "11336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/updates/inline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "157"
},
{
"name": "Python",
"bytes": "123926"
}
],
"symlink_target": ""
} |
"""Module containing the core callback.
"""
from soc.tasks.updates import project_conversion
from soc.tasks.updates import proposal_conversion
from soc.tasks.updates import role_conversion
class Callback(object):
"""Callback object that handles interaction between the core.
"""
API_VERSION = 1
def __init__(self, core):
"""Initializes a new Callback object for the specified core.
"""
self.core = core
self.views = []
def registerViews(self):
"""Instantiates all view objects.
"""
from soc.views import host
from soc.views import legacy
from soc.tasks import mailer
from soc.views import oauth
from soc.views import site
from soc.views import user
self.views.append(host.HostProfilePage())
self.views.append(legacy.Legacy())
self.views.append(mailer.MailerTask())
self.views.append(oauth.PopupOAuthRedirectPage())
self.views.append(oauth.PopupOAuthVerified())
self.views.append(oauth.MakeRequest())
self.views.append(site.EditSitePage())
self.views.append(site.SiteHomepage())
self.views.append(user.CreateUserPage())
self.views.append(user.EditUserPage())
def registerWithSitemap(self):
"""Called by the server when sitemap entries should be registered.
"""
self.core.requireUniqueService('registerWithSitemap')
# Redesigned view registration
for view in self.views:
self.core.registerSitemapEntry(view.djangoURLPatterns())
self.core.registerSitemapEntry(role_conversion.getDjangoURLPatterns())
self.core.registerSitemapEntry(proposal_conversion.getDjangoURLPatterns())
self.core.registerSitemapEntry(project_conversion.getDjangoURLPatterns())
| {
"content_hash": "f782862d306dbf1a02dd61a144ba0b78",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 78,
"avg_line_length": 30.303571428571427,
"alnum_prop": 0.7324690630524455,
"repo_name": "adviti/melange",
"id": "47aab2cb9a128a20b5f1b6c40e052beed59c04d2",
"size": "2282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/soc/modules/soc_core/callback.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from .dataUser import DataUser
__all__ = ["Property"]
class Property(DataUser):
""" Store a value associated with a DataObject
Properties can be be accessed like methods. A method call like::
a.P()
for a property ``P`` will return values appropriate to that property for
``a``, the `owner` of the property.
Parameters
----------
owner : yarom.dataObject.DataObject
The owner of this property
name : string
The name of this property. Can be accessed as an attribute like::
owner.name
"""
# Indicates whether the Property is multi-valued
multiple = False
link = None
linkName = None
def __init__(self, owner=False, **kwargs):
super(Property, self).__init__(**kwargs)
self.owner = owner
def get(self, *args):
"""
Get the things which are on the other side of this property
The return value must be iterable. For a ``get`` that just returns
a single value, an easy way to make an iterable is to wrap the
value in a tuple like ``(value,)``.
Derived classes must override.
"""
raise NotImplementedError()
def set(self, *args, **kwargs):
"""
Set the value of this property
Derived classes must override.
"""
raise NotImplementedError()
def one(self):
"""
Returns a single value for the ``Property`` whether or not it is
multivalued.
"""
try:
r = self.get()
return next(iter(r))
except StopIteration:
return None
def hasValue(self):
"""
Returns true if the Property has any values set on it.
This may be defined differently for each property
"""
return False
@property
def values(self):
raise NotImplementedError()
def __call__(self, *args, **kwargs):
"""
If arguments are passed to the ``Property``, its ``set`` method is
called. Otherwise, the ``get`` method is called. If the ``multiple``
member for the ``Property`` is set to ``True``, then a Python set
containing the associated values is returned. Otherwise, a single bare
value is returned.
"""
if len(args) > 0 or len(kwargs) > 0:
return self.set(*args, **kwargs)
else:
r = self.get(*args, **kwargs)
if self.multiple:
return set(r)
else:
try:
return next(iter(r))
except StopIteration:
return None
| {
"content_hash": "45ab49ff45e4d63bcf4be009d26afa5b",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 78,
"avg_line_length": 26.019607843137255,
"alnum_prop": 0.5591559909570459,
"repo_name": "mwatts15/YAROM",
"id": "754922f6a550f347f6d75e1bb0b405ae631d2af5",
"size": "2654",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "yarom/yProperty.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "206919"
},
{
"name": "Shell",
"bytes": "4696"
}
],
"symlink_target": ""
} |
import copy
from datetime import timedelta
from textwrap import dedent
from typing import Dict, no_type_check
import warnings
import numpy as np
from pandas._libs import lib
from pandas._libs.tslibs import NaT, Timestamp
from pandas._libs.tslibs.frequencies import is_subperiod, is_superperiod
from pandas._libs.tslibs.period import IncompatibleFrequency
from pandas.compat.numpy import function as nv
from pandas.errors import AbstractMethodError
from pandas.util._decorators import Appender, Substitution
from pandas.core.dtypes.generic import ABCDataFrame, ABCSeries
import pandas as pd
import pandas.core.algorithms as algos
from pandas.core.generic import _shared_docs
from pandas.core.groupby.base import GroupByMixin
from pandas.core.groupby.generic import SeriesGroupBy
from pandas.core.groupby.groupby import (
GroupBy, _GroupBy, _pipe_template, groupby)
from pandas.core.groupby.grouper import Grouper
from pandas.core.groupby.ops import BinGrouper
from pandas.core.indexes.datetimes import DatetimeIndex, date_range
from pandas.core.indexes.period import PeriodIndex
from pandas.core.indexes.timedeltas import TimedeltaIndex, timedelta_range
from pandas.tseries.frequencies import to_offset
from pandas.tseries.offsets import DateOffset, Day, Nano, Tick
_shared_docs_kwargs = dict() # type: Dict[str, str]
class Resampler(_GroupBy):
"""
Class for resampling datetimelike data, a groupby-like operation.
See aggregate, transform, and apply functions on this object.
It's easiest to use obj.resample(...) to use Resampler.
Parameters
----------
obj : pandas object
groupby : a TimeGrouper object
axis : int, default 0
kind : str or None
'period', 'timestamp' to override default index treatment
Returns
-------
a Resampler of the appropriate type
Notes
-----
After resampling, see aggregate, apply, and transform functions.
"""
# to the groupby descriptor
_attributes = ['freq', 'axis', 'closed', 'label', 'convention',
'loffset', 'base', 'kind']
def __init__(self, obj, groupby=None, axis=0, kind=None, **kwargs):
self.groupby = groupby
self.keys = None
self.sort = True
self.axis = axis
self.kind = kind
self.squeeze = False
self.group_keys = True
self.as_index = True
self.exclusions = set()
self.binner = None
self.grouper = None
if self.groupby is not None:
self.groupby._set_grouper(self._convert_obj(obj), sort=True)
def __str__(self):
"""
Provide a nice str repr of our rolling object.
"""
attrs = ("{k}={v}".format(k=k, v=getattr(self.groupby, k))
for k in self._attributes if
getattr(self.groupby, k, None) is not None)
return "{klass} [{attrs}]".format(klass=self.__class__.__name__,
attrs=', '.join(attrs))
def __getattr__(self, attr):
if attr in self._internal_names_set:
return object.__getattribute__(self, attr)
if attr in self._attributes:
return getattr(self.groupby, attr)
if attr in self.obj:
return self[attr]
return object.__getattribute__(self, attr)
def __iter__(self):
"""
Resampler iterator.
Returns
-------
Generator yielding sequence of (name, subsetted object)
for each group.
See Also
--------
GroupBy.__iter__
"""
self._set_binner()
return super().__iter__()
@property
def obj(self):
return self.groupby.obj
@property
def ax(self):
return self.groupby.ax
@property
def _typ(self):
"""
Masquerade for compat as a Series or a DataFrame.
"""
if isinstance(self._selected_obj, pd.Series):
return 'series'
return 'dataframe'
@property
def _from_selection(self):
"""
Is the resampling from a DataFrame column or MultiIndex level.
"""
# upsampling and PeriodIndex resampling do not work
# with selection, this state used to catch and raise an error
return (self.groupby is not None and
(self.groupby.key is not None or
self.groupby.level is not None))
def _convert_obj(self, obj):
"""
Provide any conversions for the object in order to correctly handle.
Parameters
----------
obj : the object to be resampled
Returns
-------
obj : converted object
"""
obj = obj._consolidate()
return obj
def _get_binner_for_time(self):
raise AbstractMethodError(self)
def _set_binner(self):
"""
Setup our binners.
Cache these as we are an immutable object
"""
if self.binner is None:
self.binner, self.grouper = self._get_binner()
def _get_binner(self):
"""
Create the BinGrouper, assume that self.set_grouper(obj)
has already been called.
"""
binner, bins, binlabels = self._get_binner_for_time()
bin_grouper = BinGrouper(bins, binlabels, indexer=self.groupby.indexer)
return binner, bin_grouper
def _assure_grouper(self):
"""
Make sure that we are creating our binner & grouper.
"""
self._set_binner()
@Substitution(klass='Resampler',
versionadded='.. versionadded:: 0.23.0',
examples="""
>>> df = pd.DataFrame({'A': [1, 2, 3, 4]},
... index=pd.date_range('2012-08-02', periods=4))
>>> df
A
2012-08-02 1
2012-08-03 2
2012-08-04 3
2012-08-05 4
To get the difference between each 2-day period's maximum and minimum
value in one pass, you can do
>>> df.resample('2D').pipe(lambda x: x.max() - x.min())
A
2012-08-02 1
2012-08-04 1""")
@Appender(_pipe_template)
def pipe(self, func, *args, **kwargs):
return super().pipe(func, *args, **kwargs)
_agg_see_also_doc = dedent("""
See Also
--------
DataFrame.groupby.aggregate
DataFrame.resample.transform
DataFrame.aggregate
""")
_agg_examples_doc = dedent("""
Examples
--------
>>> s = pd.Series([1,2,3,4,5],
index=pd.date_range('20130101', periods=5,freq='s'))
2013-01-01 00:00:00 1
2013-01-01 00:00:01 2
2013-01-01 00:00:02 3
2013-01-01 00:00:03 4
2013-01-01 00:00:04 5
Freq: S, dtype: int64
>>> r = s.resample('2s')
DatetimeIndexResampler [freq=<2 * Seconds>, axis=0, closed=left,
label=left, convention=start, base=0]
>>> r.agg(np.sum)
2013-01-01 00:00:00 3
2013-01-01 00:00:02 7
2013-01-01 00:00:04 5
Freq: 2S, dtype: int64
>>> r.agg(['sum','mean','max'])
sum mean max
2013-01-01 00:00:00 3 1.5 2
2013-01-01 00:00:02 7 3.5 4
2013-01-01 00:00:04 5 5.0 5
>>> r.agg({'result' : lambda x: x.mean() / x.std(),
'total' : np.sum})
total result
2013-01-01 00:00:00 3 2.121320
2013-01-01 00:00:02 7 4.949747
2013-01-01 00:00:04 5 NaN
""")
@Substitution(see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
versionadded='',
klass='DataFrame',
axis='')
@Appender(_shared_docs['aggregate'])
def aggregate(self, func, *args, **kwargs):
self._set_binner()
result, how = self._aggregate(func, *args, **kwargs)
if result is None:
how = func
grouper = None
result = self._groupby_and_aggregate(how,
grouper,
*args,
**kwargs)
result = self._apply_loffset(result)
return result
agg = aggregate
apply = aggregate
def transform(self, arg, *args, **kwargs):
"""
Call function producing a like-indexed Series on each group and return
a Series with the transformed values.
Parameters
----------
arg : function
To apply to each group. Should return a Series with the same index.
Returns
-------
transformed : Series
Examples
--------
>>> resampled.transform(lambda x: (x - x.mean()) / x.std())
"""
return self._selected_obj.groupby(self.groupby).transform(
arg, *args, **kwargs)
def _downsample(self, f):
raise AbstractMethodError(self)
def _upsample(self, f, limit=None, fill_value=None):
raise AbstractMethodError(self)
def _gotitem(self, key, ndim, subset=None):
"""
Sub-classes to define. Return a sliced object.
Parameters
----------
key : string / list of selections
ndim : 1,2
requested ndim of result
subset : object, default None
subset to act on
"""
self._set_binner()
grouper = self.grouper
if subset is None:
subset = self.obj
grouped = groupby(subset, by=None, grouper=grouper, axis=self.axis)
# try the key selection
try:
return grouped[key]
except KeyError:
return grouped
def _groupby_and_aggregate(self, how, grouper=None, *args, **kwargs):
"""
Re-evaluate the obj with a groupby aggregation.
"""
if grouper is None:
self._set_binner()
grouper = self.grouper
obj = self._selected_obj
grouped = groupby(obj, by=None, grouper=grouper, axis=self.axis)
try:
if isinstance(obj, ABCDataFrame) and callable(how):
# Check if the function is reducing or not.
result = grouped._aggregate_item_by_item(how, *args, **kwargs)
else:
result = grouped.aggregate(how, *args, **kwargs)
except Exception:
# we have a non-reducing function
# try to evaluate
result = grouped.apply(how, *args, **kwargs)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _apply_loffset(self, result):
"""
If loffset is set, offset the result index.
This is NOT an idempotent routine, it will be applied
exactly once to the result.
Parameters
----------
result : Series or DataFrame
the result of resample
"""
needs_offset = (
isinstance(self.loffset, (DateOffset, timedelta,
np.timedelta64)) and
isinstance(result.index, DatetimeIndex) and
len(result.index) > 0
)
if needs_offset:
result.index = result.index + self.loffset
self.loffset = None
return result
def _get_resampler_for_grouping(self, groupby, **kwargs):
"""
Return the correct class for resampling with groupby.
"""
return self._resampler_for_grouping(self, groupby=groupby, **kwargs)
def _wrap_result(self, result):
"""
Potentially wrap any results.
"""
if isinstance(result, ABCSeries) and self._selection is not None:
result.name = self._selection
if isinstance(result, ABCSeries) and result.empty:
obj = self.obj
if isinstance(obj.index, PeriodIndex):
result.index = obj.index.asfreq(self.freq)
else:
result.index = obj.index._shallow_copy(freq=self.freq)
result.name = getattr(obj, 'name', None)
return result
def pad(self, limit=None):
"""
Forward fill the values.
Parameters
----------
limit : integer, optional
limit of how many values to fill
Returns
-------
An upsampled Series.
See Also
--------
Series.fillna
DataFrame.fillna
"""
return self._upsample('pad', limit=limit)
ffill = pad
def nearest(self, limit=None):
"""
Resample by using the nearest value.
When resampling data, missing values may appear (e.g., when the
resampling frequency is higher than the original frequency).
The `nearest` method will replace ``NaN`` values that appeared in
the resampled data with the value from the nearest member of the
sequence, based on the index value.
Missing values that existed in the original data will not be modified.
If `limit` is given, fill only this many values in each direction for
each of the original values.
Parameters
----------
limit : int, optional
Limit of how many values to fill.
.. versionadded:: 0.21.0
Returns
-------
Series or DataFrame
An upsampled Series or DataFrame with ``NaN`` values filled with
their nearest value.
See Also
--------
backfill : Backward fill the new missing values in the resampled data.
pad : Forward fill ``NaN`` values.
Examples
--------
>>> s = pd.Series([1, 2],
... index=pd.date_range('20180101',
... periods=2,
... freq='1h'))
>>> s
2018-01-01 00:00:00 1
2018-01-01 01:00:00 2
Freq: H, dtype: int64
>>> s.resample('15min').nearest()
2018-01-01 00:00:00 1
2018-01-01 00:15:00 1
2018-01-01 00:30:00 2
2018-01-01 00:45:00 2
2018-01-01 01:00:00 2
Freq: 15T, dtype: int64
Limit the number of upsampled values imputed by the nearest:
>>> s.resample('15min').nearest(limit=1)
2018-01-01 00:00:00 1.0
2018-01-01 00:15:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 00:45:00 2.0
2018-01-01 01:00:00 2.0
Freq: 15T, dtype: float64
"""
return self._upsample('nearest', limit=limit)
def backfill(self, limit=None):
"""
Backward fill the new missing values in the resampled data.
In statistics, imputation is the process of replacing missing data with
substituted values [1]_. When resampling data, missing values may
appear (e.g., when the resampling frequency is higher than the original
frequency). The backward fill will replace NaN values that appeared in
the resampled data with the next value in the original sequence.
Missing values that existed in the original data will not be modified.
Parameters
----------
limit : integer, optional
Limit of how many values to fill.
Returns
-------
Series, DataFrame
An upsampled Series or DataFrame with backward filled NaN values.
See Also
--------
bfill : Alias of backfill.
fillna : Fill NaN values using the specified method, which can be
'backfill'.
nearest : Fill NaN values with nearest neighbor starting from center.
pad : Forward fill NaN values.
Series.fillna : Fill NaN values in the Series using the
specified method, which can be 'backfill'.
DataFrame.fillna : Fill NaN values in the DataFrame using the
specified method, which can be 'backfill'.
References
----------
.. [1] https://en.wikipedia.org/wiki/Imputation_(statistics)
Examples
--------
Resampling a Series:
>>> s = pd.Series([1, 2, 3],
... index=pd.date_range('20180101', periods=3, freq='h'))
>>> s
2018-01-01 00:00:00 1
2018-01-01 01:00:00 2
2018-01-01 02:00:00 3
Freq: H, dtype: int64
>>> s.resample('30min').backfill()
2018-01-01 00:00:00 1
2018-01-01 00:30:00 2
2018-01-01 01:00:00 2
2018-01-01 01:30:00 3
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
>>> s.resample('15min').backfill(limit=2)
2018-01-01 00:00:00 1.0
2018-01-01 00:15:00 NaN
2018-01-01 00:30:00 2.0
2018-01-01 00:45:00 2.0
2018-01-01 01:00:00 2.0
2018-01-01 01:15:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 01:45:00 3.0
2018-01-01 02:00:00 3.0
Freq: 15T, dtype: float64
Resampling a DataFrame that has missing values:
>>> df = pd.DataFrame({'a': [2, np.nan, 6], 'b': [1, 3, 5]},
... index=pd.date_range('20180101', periods=3,
... freq='h'))
>>> df
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 01:00:00 NaN 3
2018-01-01 02:00:00 6.0 5
>>> df.resample('30min').backfill()
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 00:30:00 NaN 3
2018-01-01 01:00:00 NaN 3
2018-01-01 01:30:00 6.0 5
2018-01-01 02:00:00 6.0 5
>>> df.resample('15min').backfill(limit=2)
a b
2018-01-01 00:00:00 2.0 1.0
2018-01-01 00:15:00 NaN NaN
2018-01-01 00:30:00 NaN 3.0
2018-01-01 00:45:00 NaN 3.0
2018-01-01 01:00:00 NaN 3.0
2018-01-01 01:15:00 NaN NaN
2018-01-01 01:30:00 6.0 5.0
2018-01-01 01:45:00 6.0 5.0
2018-01-01 02:00:00 6.0 5.0
"""
return self._upsample('backfill', limit=limit)
bfill = backfill
def fillna(self, method, limit=None):
"""
Fill missing values introduced by upsampling.
In statistics, imputation is the process of replacing missing data with
substituted values [1]_. When resampling data, missing values may
appear (e.g., when the resampling frequency is higher than the original
frequency).
Missing values that existed in the original data will
not be modified.
Parameters
----------
method : {'pad', 'backfill', 'ffill', 'bfill', 'nearest'}
Method to use for filling holes in resampled data
* 'pad' or 'ffill': use previous valid observation to fill gap
(forward fill).
* 'backfill' or 'bfill': use next valid observation to fill gap.
* 'nearest': use nearest valid observation to fill gap.
limit : integer, optional
Limit of how many consecutive missing values to fill.
Returns
-------
Series or DataFrame
An upsampled Series or DataFrame with missing values filled.
See Also
--------
backfill : Backward fill NaN values in the resampled data.
pad : Forward fill NaN values in the resampled data.
nearest : Fill NaN values in the resampled data
with nearest neighbor starting from center.
interpolate : Fill NaN values using interpolation.
Series.fillna : Fill NaN values in the Series using the
specified method, which can be 'bfill' and 'ffill'.
DataFrame.fillna : Fill NaN values in the DataFrame using the
specified method, which can be 'bfill' and 'ffill'.
References
----------
.. [1] https://en.wikipedia.org/wiki/Imputation_(statistics)
Examples
--------
Resampling a Series:
>>> s = pd.Series([1, 2, 3],
... index=pd.date_range('20180101', periods=3, freq='h'))
>>> s
2018-01-01 00:00:00 1
2018-01-01 01:00:00 2
2018-01-01 02:00:00 3
Freq: H, dtype: int64
Without filling the missing values you get:
>>> s.resample("30min").asfreq()
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 01:00:00 2.0
2018-01-01 01:30:00 NaN
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
>>> s.resample('30min').fillna("backfill")
2018-01-01 00:00:00 1
2018-01-01 00:30:00 2
2018-01-01 01:00:00 2
2018-01-01 01:30:00 3
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
>>> s.resample('15min').fillna("backfill", limit=2)
2018-01-01 00:00:00 1.0
2018-01-01 00:15:00 NaN
2018-01-01 00:30:00 2.0
2018-01-01 00:45:00 2.0
2018-01-01 01:00:00 2.0
2018-01-01 01:15:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 01:45:00 3.0
2018-01-01 02:00:00 3.0
Freq: 15T, dtype: float64
>>> s.resample('30min').fillna("pad")
2018-01-01 00:00:00 1
2018-01-01 00:30:00 1
2018-01-01 01:00:00 2
2018-01-01 01:30:00 2
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
>>> s.resample('30min').fillna("nearest")
2018-01-01 00:00:00 1
2018-01-01 00:30:00 2
2018-01-01 01:00:00 2
2018-01-01 01:30:00 3
2018-01-01 02:00:00 3
Freq: 30T, dtype: int64
Missing values present before the upsampling are not affected.
>>> sm = pd.Series([1, None, 3],
... index=pd.date_range('20180101', periods=3, freq='h'))
>>> sm
2018-01-01 00:00:00 1.0
2018-01-01 01:00:00 NaN
2018-01-01 02:00:00 3.0
Freq: H, dtype: float64
>>> sm.resample('30min').fillna('backfill')
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 01:00:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
>>> sm.resample('30min').fillna('pad')
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 1.0
2018-01-01 01:00:00 NaN
2018-01-01 01:30:00 NaN
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
>>> sm.resample('30min').fillna('nearest')
2018-01-01 00:00:00 1.0
2018-01-01 00:30:00 NaN
2018-01-01 01:00:00 NaN
2018-01-01 01:30:00 3.0
2018-01-01 02:00:00 3.0
Freq: 30T, dtype: float64
DataFrame resampling is done column-wise. All the same options are
available.
>>> df = pd.DataFrame({'a': [2, np.nan, 6], 'b': [1, 3, 5]},
... index=pd.date_range('20180101', periods=3,
... freq='h'))
>>> df
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 01:00:00 NaN 3
2018-01-01 02:00:00 6.0 5
>>> df.resample('30min').fillna("bfill")
a b
2018-01-01 00:00:00 2.0 1
2018-01-01 00:30:00 NaN 3
2018-01-01 01:00:00 NaN 3
2018-01-01 01:30:00 6.0 5
2018-01-01 02:00:00 6.0 5
"""
return self._upsample(method, limit=limit)
@Appender(_shared_docs['interpolate'] % _shared_docs_kwargs)
def interpolate(self, method='linear', axis=0, limit=None, inplace=False,
limit_direction='forward', limit_area=None,
downcast=None, **kwargs):
"""
Interpolate values according to different methods.
.. versionadded:: 0.18.1
"""
result = self._upsample(None)
return result.interpolate(method=method, axis=axis, limit=limit,
inplace=inplace,
limit_direction=limit_direction,
limit_area=limit_area,
downcast=downcast, **kwargs)
def asfreq(self, fill_value=None):
"""
Return the values at the new freq, essentially a reindex.
Parameters
----------
fill_value : scalar, optional
Value to use for missing values, applied during upsampling (note
this does not fill NaNs that already were present).
.. versionadded:: 0.20.0
Returns
-------
DataFrame or Series
Values at the specified freq.
See Also
--------
Series.asfreq
DataFrame.asfreq
"""
return self._upsample('asfreq', fill_value=fill_value)
def std(self, ddof=1, *args, **kwargs):
"""
Compute standard deviation of groups, excluding missing values.
Parameters
----------
ddof : integer, default 1
Degrees of freedom.
Returns
-------
DataFrame or Series
Standard deviation of values within each group.
"""
nv.validate_resampler_func('std', args, kwargs)
return self._downsample('std', ddof=ddof)
def var(self, ddof=1, *args, **kwargs):
"""
Compute variance of groups, excluding missing values.
Parameters
----------
ddof : integer, default 1
degrees of freedom
Returns
-------
DataFrame or Series
Variance of values within each group.
"""
nv.validate_resampler_func('var', args, kwargs)
return self._downsample('var', ddof=ddof)
@Appender(GroupBy.size.__doc__)
def size(self):
# It's a special case as higher level does return
# a copy of 0-len objects. GH14962
result = self._downsample('size')
if not len(self.ax) and isinstance(self._selected_obj, ABCDataFrame):
result = pd.Series([], index=result.index, dtype='int64')
return result
def quantile(self, q=0.5, **kwargs):
"""
Return value at the given quantile.
.. versionadded:: 0.24.0
Parameters
----------
q : float or array-like, default 0.5 (50% quantile)
Returns
-------
DataFrame or Series
Quantile of values within each group.
See Also
--------
Series.quantile
DataFrame.quantile
DataFrameGroupBy.quantile
"""
return self._downsample('quantile', q=q, **kwargs)
# downsample methods
for method in ['sum', 'prod']:
def f(self, _method=method, min_count=0, *args, **kwargs):
nv.validate_resampler_func(_method, args, kwargs)
return self._downsample(_method, min_count=min_count)
f.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, f)
# downsample methods
for method in ['min', 'max', 'first', 'last', 'mean', 'sem',
'median', 'ohlc']:
def g(self, _method=method, *args, **kwargs):
nv.validate_resampler_func(_method, args, kwargs)
return self._downsample(_method)
g.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, g)
# groupby & aggregate methods
for method in ['count']:
def h(self, _method=method):
return self._downsample(_method)
h.__doc__ = getattr(GroupBy, method).__doc__
setattr(Resampler, method, h)
# series only methods
for method in ['nunique']:
def h(self, _method=method):
return self._downsample(_method)
h.__doc__ = getattr(SeriesGroupBy, method).__doc__
setattr(Resampler, method, h)
def _maybe_process_deprecations(r, how=None, fill_method=None, limit=None):
"""
Potentially we might have a deprecation warning, show it
but call the appropriate methods anyhow.
"""
if how is not None:
# .resample(..., how='sum')
if isinstance(how, str):
method = "{0}()".format(how)
# .resample(..., how=lambda x: ....)
else:
method = ".apply(<func>)"
# if we have both a how and fill_method, then show
# the following warning
if fill_method is None:
warnings.warn("how in .resample() is deprecated\n"
"the new syntax is "
".resample(...).{method}".format(
method=method),
FutureWarning, stacklevel=3)
r = r.aggregate(how)
if fill_method is not None:
# show the prior function call
method = '.' + method if how is not None else ''
args = "limit={0}".format(limit) if limit is not None else ""
warnings.warn("fill_method is deprecated to .resample()\n"
"the new syntax is .resample(...){method}"
".{fill_method}({args})".format(
method=method,
fill_method=fill_method,
args=args),
FutureWarning, stacklevel=3)
if how is not None:
r = getattr(r, fill_method)(limit=limit)
else:
r = r.aggregate(fill_method, limit=limit)
return r
class _GroupByMixin(GroupByMixin):
"""
Provide the groupby facilities.
"""
def __init__(self, obj, *args, **kwargs):
parent = kwargs.pop('parent', None)
groupby = kwargs.pop('groupby', None)
if parent is None:
parent = obj
# initialize our GroupByMixin object with
# the resampler attributes
for attr in self._attributes:
setattr(self, attr, kwargs.get(attr, getattr(parent, attr)))
super().__init__(None)
self._groupby = groupby
self._groupby.mutated = True
self._groupby.grouper.mutated = True
self.groupby = copy.copy(parent.groupby)
@no_type_check
def _apply(self, f, grouper=None, *args, **kwargs):
"""
Dispatch to _upsample; we are stripping all of the _upsample kwargs and
performing the original function call on the grouped object.
"""
def func(x):
x = self._shallow_copy(x, groupby=self.groupby)
if isinstance(f, str):
return getattr(x, f)(**kwargs)
return x.apply(f, *args, **kwargs)
result = self._groupby.apply(func)
return self._wrap_result(result)
_upsample = _apply
_downsample = _apply
_groupby_and_aggregate = _apply
class DatetimeIndexResampler(Resampler):
@property
def _resampler_for_grouping(self):
return DatetimeIndexResamplerGroupby
def _get_binner_for_time(self):
# this is how we are actually creating the bins
if self.kind == 'period':
return self.groupby._get_time_period_bins(self.ax)
return self.groupby._get_time_bins(self.ax)
def _downsample(self, how, **kwargs):
"""
Downsample the cython defined function.
Parameters
----------
how : string / cython mapped function
**kwargs : kw args passed to how function
"""
self._set_binner()
how = self._is_cython_func(how) or how
ax = self.ax
obj = self._selected_obj
if not len(ax):
# reset to the new freq
obj = obj.copy()
obj.index.freq = self.freq
return obj
# do we have a regular frequency
if ax.freq is not None or ax.inferred_freq is not None:
if len(self.grouper.binlabels) > len(ax) and how is None:
# let's do an asfreq
return self.asfreq()
# we are downsampling
# we want to call the actual grouper method here
result = obj.groupby(
self.grouper, axis=self.axis).aggregate(how, **kwargs)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _adjust_binner_for_upsample(self, binner):
"""
Adjust our binner when upsampling.
The range of a new index should not be outside specified range
"""
if self.closed == 'right':
binner = binner[1:]
else:
binner = binner[:-1]
return binner
def _upsample(self, method, limit=None, fill_value=None):
"""
Parameters
----------
method : string {'backfill', 'bfill', 'pad',
'ffill', 'asfreq'} method for upsampling
limit : int, default None
Maximum size gap to fill when reindexing
fill_value : scalar, default None
Value to use for missing values
See Also
--------
.fillna
"""
self._set_binner()
if self.axis:
raise AssertionError('axis must be 0')
if self._from_selection:
raise ValueError("Upsampling from level= or on= selection"
" is not supported, use .set_index(...)"
" to explicitly set index to"
" datetime-like")
ax = self.ax
obj = self._selected_obj
binner = self.binner
res_index = self._adjust_binner_for_upsample(binner)
# if we have the same frequency as our axis, then we are equal sampling
if limit is None and to_offset(ax.inferred_freq) == self.freq:
result = obj.copy()
result.index = res_index
else:
result = obj.reindex(res_index, method=method,
limit=limit, fill_value=fill_value)
result = self._apply_loffset(result)
return self._wrap_result(result)
def _wrap_result(self, result):
result = super()._wrap_result(result)
# we may have a different kind that we were asked originally
# convert if needed
if self.kind == 'period' and not isinstance(result.index, PeriodIndex):
result.index = result.index.to_period(self.freq)
return result
class DatetimeIndexResamplerGroupby(_GroupByMixin, DatetimeIndexResampler):
"""
Provides a resample of a groupby implementation
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return DatetimeIndexResampler
class PeriodIndexResampler(DatetimeIndexResampler):
@property
def _resampler_for_grouping(self):
return PeriodIndexResamplerGroupby
def _get_binner_for_time(self):
if self.kind == 'timestamp':
return super()._get_binner_for_time()
return self.groupby._get_period_bins(self.ax)
def _convert_obj(self, obj):
obj = super()._convert_obj(obj)
if self._from_selection:
# see GH 14008, GH 12871
msg = ("Resampling from level= or on= selection"
" with a PeriodIndex is not currently supported,"
" use .set_index(...) to explicitly set index")
raise NotImplementedError(msg)
if self.loffset is not None:
# Cannot apply loffset/timedelta to PeriodIndex -> convert to
# timestamps
self.kind = 'timestamp'
# convert to timestamp
if self.kind == 'timestamp':
obj = obj.to_timestamp(how=self.convention)
return obj
def _downsample(self, how, **kwargs):
"""
Downsample the cython defined function.
Parameters
----------
how : string / cython mapped function
**kwargs : kw args passed to how function
"""
# we may need to actually resample as if we are timestamps
if self.kind == 'timestamp':
return super()._downsample(how, **kwargs)
how = self._is_cython_func(how) or how
ax = self.ax
if is_subperiod(ax.freq, self.freq):
# Downsampling
return self._groupby_and_aggregate(how, grouper=self.grouper,
**kwargs)
elif is_superperiod(ax.freq, self.freq):
if how == 'ohlc':
# GH #13083
# upsampling to subperiods is handled as an asfreq, which works
# for pure aggregating/reducing methods
# OHLC reduces along the time dimension, but creates multiple
# values for each period -> handle by _groupby_and_aggregate()
return self._groupby_and_aggregate(how, grouper=self.grouper)
return self.asfreq()
elif ax.freq == self.freq:
return self.asfreq()
raise IncompatibleFrequency(
'Frequency {} cannot be resampled to {}, as they are not '
'sub or super periods'.format(ax.freq, self.freq))
def _upsample(self, method, limit=None, fill_value=None):
"""
Parameters
----------
method : string {'backfill', 'bfill', 'pad', 'ffill'}
method for upsampling
limit : int, default None
Maximum size gap to fill when reindexing
fill_value : scalar, default None
Value to use for missing values
See Also
--------
.fillna
"""
# we may need to actually resample as if we are timestamps
if self.kind == 'timestamp':
return super()._upsample(method, limit=limit,
fill_value=fill_value)
self._set_binner()
ax = self.ax
obj = self.obj
new_index = self.binner
# Start vs. end of period
memb = ax.asfreq(self.freq, how=self.convention)
# Get the fill indexer
indexer = memb.get_indexer(new_index, method=method, limit=limit)
return self._wrap_result(_take_new_index(
obj, indexer, new_index, axis=self.axis))
class PeriodIndexResamplerGroupby(_GroupByMixin, PeriodIndexResampler):
"""
Provides a resample of a groupby implementation.
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return PeriodIndexResampler
class TimedeltaIndexResampler(DatetimeIndexResampler):
@property
def _resampler_for_grouping(self):
return TimedeltaIndexResamplerGroupby
def _get_binner_for_time(self):
return self.groupby._get_time_delta_bins(self.ax)
def _adjust_binner_for_upsample(self, binner):
"""
Adjust our binner when upsampling.
The range of a new index is allowed to be greater than original range
so we don't need to change the length of a binner, GH 13022
"""
return binner
class TimedeltaIndexResamplerGroupby(_GroupByMixin, TimedeltaIndexResampler):
"""
Provides a resample of a groupby implementation.
.. versionadded:: 0.18.1
"""
@property
def _constructor(self):
return TimedeltaIndexResampler
def resample(obj, kind=None, **kwds):
"""
Create a TimeGrouper and return our resampler.
"""
tg = TimeGrouper(**kwds)
return tg._get_resampler(obj, kind=kind)
resample.__doc__ = Resampler.__doc__
def get_resampler_for_grouping(groupby, rule, how=None, fill_method=None,
limit=None, kind=None, **kwargs):
"""
Return our appropriate resampler when grouping as well.
"""
# .resample uses 'on' similar to how .groupby uses 'key'
kwargs['key'] = kwargs.pop('on', None)
tg = TimeGrouper(freq=rule, **kwargs)
resampler = tg._get_resampler(groupby.obj, kind=kind)
r = resampler._get_resampler_for_grouping(groupby=groupby)
return _maybe_process_deprecations(r,
how=how,
fill_method=fill_method,
limit=limit)
class TimeGrouper(Grouper):
"""
Custom groupby class for time-interval grouping.
Parameters
----------
freq : pandas date offset or offset alias for identifying bin edges
closed : closed end of interval; 'left' or 'right'
label : interval boundary to use for labeling; 'left' or 'right'
convention : {'start', 'end', 'e', 's'}
If axis is PeriodIndex
"""
_attributes = Grouper._attributes + ('closed', 'label', 'how',
'loffset', 'kind', 'convention',
'base')
def __init__(self, freq='Min', closed=None, label=None, how='mean',
axis=0, fill_method=None, limit=None, loffset=None,
kind=None, convention=None, base=0, **kwargs):
# Check for correctness of the keyword arguments which would
# otherwise silently use the default if misspelled
if label not in {None, 'left', 'right'}:
raise ValueError('Unsupported value {} for `label`'.format(label))
if closed not in {None, 'left', 'right'}:
raise ValueError('Unsupported value {} for `closed`'.format(
closed))
if convention not in {None, 'start', 'end', 'e', 's'}:
raise ValueError('Unsupported value {} for `convention`'
.format(convention))
freq = to_offset(freq)
end_types = {'M', 'A', 'Q', 'BM', 'BA', 'BQ', 'W'}
rule = freq.rule_code
if (rule in end_types or
('-' in rule and rule[:rule.find('-')] in end_types)):
if closed is None:
closed = 'right'
if label is None:
label = 'right'
else:
if closed is None:
closed = 'left'
if label is None:
label = 'left'
self.closed = closed
self.label = label
self.kind = kind
self.convention = convention or 'E'
self.convention = self.convention.lower()
if isinstance(loffset, str):
loffset = to_offset(loffset)
self.loffset = loffset
self.how = how
self.fill_method = fill_method
self.limit = limit
self.base = base
# always sort time groupers
kwargs['sort'] = True
super().__init__(freq=freq, axis=axis, **kwargs)
def _get_resampler(self, obj, kind=None):
"""
Return my resampler or raise if we have an invalid axis.
Parameters
----------
obj : input object
kind : string, optional
'period','timestamp','timedelta' are valid
Returns
-------
a Resampler
Raises
------
TypeError if incompatible axis
"""
self._set_grouper(obj)
ax = self.ax
if isinstance(ax, DatetimeIndex):
return DatetimeIndexResampler(obj,
groupby=self,
kind=kind,
axis=self.axis)
elif isinstance(ax, PeriodIndex) or kind == 'period':
return PeriodIndexResampler(obj,
groupby=self,
kind=kind,
axis=self.axis)
elif isinstance(ax, TimedeltaIndex):
return TimedeltaIndexResampler(obj,
groupby=self,
axis=self.axis)
raise TypeError("Only valid with DatetimeIndex, "
"TimedeltaIndex or PeriodIndex, "
"but got an instance of %r" % type(ax).__name__)
def _get_grouper(self, obj, validate=True):
# create the resampler and return our binner
r = self._get_resampler(obj)
r._set_binner()
return r.binner, r.grouper, r.obj
def _get_time_bins(self, ax):
if not isinstance(ax, DatetimeIndex):
raise TypeError('axis must be a DatetimeIndex, but got '
'an instance of %r' % type(ax).__name__)
if len(ax) == 0:
binner = labels = DatetimeIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
first, last = _get_timestamp_range_edges(ax.min(), ax.max(),
self.freq,
closed=self.closed,
base=self.base)
# GH #12037
# use first/last directly instead of call replace() on them
# because replace() will swallow the nanosecond part
# thus last bin maybe slightly before the end if the end contains
# nanosecond part and lead to `Values falls after last bin` error
binner = labels = date_range(freq=self.freq,
start=first,
end=last,
tz=ax.tz,
name=ax.name,
ambiguous='infer',
nonexistent='shift_forward')
ax_values = ax.asi8
binner, bin_edges = self._adjust_bin_edges(binner, ax_values)
# general version, knowing nothing about relative frequencies
bins = lib.generate_bins_dt64(
ax_values, bin_edges, self.closed, hasnans=ax.hasnans)
if self.closed == 'right':
labels = binner
if self.label == 'right':
labels = labels[1:]
elif self.label == 'right':
labels = labels[1:]
if ax.hasnans:
binner = binner.insert(0, NaT)
labels = labels.insert(0, NaT)
# if we end up with more labels than bins
# adjust the labels
# GH4076
if len(bins) < len(labels):
labels = labels[:len(bins)]
return binner, bins, labels
def _adjust_bin_edges(self, binner, ax_values):
# Some hacks for > daily data, see #1471, #1458, #1483
if self.freq != 'D' and is_superperiod(self.freq, 'D'):
if self.closed == 'right':
# GH 21459, GH 9119: Adjust the bins relative to the wall time
bin_edges = binner.tz_localize(None)
bin_edges = bin_edges + timedelta(1) - Nano(1)
bin_edges = bin_edges.tz_localize(binner.tz).asi8
else:
bin_edges = binner.asi8
# intraday values on last day
if bin_edges[-2] > ax_values.max():
bin_edges = bin_edges[:-1]
binner = binner[:-1]
else:
bin_edges = binner.asi8
return binner, bin_edges
def _get_time_delta_bins(self, ax):
if not isinstance(ax, TimedeltaIndex):
raise TypeError('axis must be a TimedeltaIndex, but got '
'an instance of %r' % type(ax).__name__)
if not len(ax):
binner = labels = TimedeltaIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
start, end = ax.min(), ax.max()
labels = binner = timedelta_range(start=start,
end=end,
freq=self.freq,
name=ax.name)
end_stamps = labels + self.freq
bins = ax.searchsorted(end_stamps, side='left')
# Addresses GH #10530
if self.base > 0:
labels += type(self.freq)(self.base)
return binner, bins, labels
def _get_time_period_bins(self, ax):
if not isinstance(ax, DatetimeIndex):
raise TypeError('axis must be a DatetimeIndex, but got '
'an instance of %r' % type(ax).__name__)
freq = self.freq
if not len(ax):
binner = labels = PeriodIndex(data=[], freq=freq, name=ax.name)
return binner, [], labels
labels = binner = pd.period_range(start=ax[0],
end=ax[-1],
freq=freq,
name=ax.name)
end_stamps = (labels + freq).asfreq(freq, 's').to_timestamp()
if ax.tzinfo:
end_stamps = end_stamps.tz_localize(ax.tzinfo)
bins = ax.searchsorted(end_stamps, side='left')
return binner, bins, labels
def _get_period_bins(self, ax):
if not isinstance(ax, PeriodIndex):
raise TypeError('axis must be a PeriodIndex, but got '
'an instance of %r' % type(ax).__name__)
memb = ax.asfreq(self.freq, how=self.convention)
# NaT handling as in pandas._lib.lib.generate_bins_dt64()
nat_count = 0
if memb.hasnans:
nat_count = np.sum(memb._isnan)
memb = memb[~memb._isnan]
# if index contains no valid (non-NaT) values, return empty index
if not len(memb):
binner = labels = PeriodIndex(
data=[], freq=self.freq, name=ax.name)
return binner, [], labels
freq_mult = self.freq.n
start = ax.min().asfreq(self.freq, how=self.convention)
end = ax.max().asfreq(self.freq, how='end')
bin_shift = 0
# GH 23882
if self.base:
# get base adjusted bin edge labels
p_start, end = _get_period_range_edges(start,
end,
self.freq,
closed=self.closed,
base=self.base)
# Get offset for bin edge (not label edge) adjustment
start_offset = (pd.Period(start, self.freq)
- pd.Period(p_start, self.freq))
bin_shift = start_offset.n % freq_mult
start = p_start
labels = binner = pd.period_range(start=start, end=end,
freq=self.freq, name=ax.name)
i8 = memb.asi8
# when upsampling to subperiods, we need to generate enough bins
expected_bins_count = len(binner) * freq_mult
i8_extend = expected_bins_count - (i8[-1] - i8[0])
rng = np.arange(i8[0], i8[-1] + i8_extend, freq_mult)
rng += freq_mult
# adjust bin edge indexes to account for base
rng -= bin_shift
bins = memb.searchsorted(rng, side='left')
if nat_count > 0:
# NaT handling as in pandas._lib.lib.generate_bins_dt64()
# shift bins by the number of NaT
bins += nat_count
bins = np.insert(bins, 0, nat_count)
binner = binner.insert(0, NaT)
labels = labels.insert(0, NaT)
return binner, bins, labels
def _take_new_index(obj, indexer, new_index, axis=0):
from pandas.core.api import Series, DataFrame
if isinstance(obj, Series):
new_values = algos.take_1d(obj.values, indexer)
return Series(new_values, index=new_index, name=obj.name)
elif isinstance(obj, DataFrame):
if axis == 1:
raise NotImplementedError("axis 1 is not supported")
return DataFrame(obj._data.reindex_indexer(
new_axis=new_index, indexer=indexer, axis=1))
else:
raise ValueError("'obj' should be either a Series or a DataFrame")
def _get_timestamp_range_edges(first, last, offset, closed='left', base=0):
"""
Adjust the `first` Timestamp to the preceding Timestamp that resides on
the provided offset. Adjust the `last` Timestamp to the following
Timestamp that resides on the provided offset. Input Timestamps that
already reside on the offset will be adjusted depending on the type of
offset and the `closed` parameter.
Parameters
----------
first : pd.Timestamp
The beginning Timestamp of the range to be adjusted.
last : pd.Timestamp
The ending Timestamp of the range to be adjusted.
offset : pd.DateOffset
The dateoffset to which the Timestamps will be adjusted.
closed : {'right', 'left'}, default None
Which side of bin interval is closed.
base : int, default 0
The "origin" of the adjusted Timestamps.
Returns
-------
A tuple of length 2, containing the adjusted pd.Timestamp objects.
"""
if isinstance(offset, Tick):
if isinstance(offset, Day):
# _adjust_dates_anchored assumes 'D' means 24H, but first/last
# might contain a DST transition (23H, 24H, or 25H).
# So "pretend" the dates are naive when adjusting the endpoints
tz = first.tz
first = first.tz_localize(None)
last = last.tz_localize(None)
first, last = _adjust_dates_anchored(first, last, offset,
closed=closed, base=base)
if isinstance(offset, Day):
first = first.tz_localize(tz)
last = last.tz_localize(tz)
return first, last
else:
first = first.normalize()
last = last.normalize()
if closed == 'left':
first = Timestamp(offset.rollback(first))
else:
first = Timestamp(first - offset)
last = Timestamp(last + offset)
return first, last
def _get_period_range_edges(first, last, offset, closed='left', base=0):
"""
Adjust the provided `first` and `last` Periods to the respective Period of
the given offset that encompasses them.
Parameters
----------
first : pd.Period
The beginning Period of the range to be adjusted.
last : pd.Period
The ending Period of the range to be adjusted.
offset : pd.DateOffset
The dateoffset to which the Periods will be adjusted.
closed : {'right', 'left'}, default None
Which side of bin interval is closed.
base : int, default 0
The "origin" of the adjusted Periods.
Returns
-------
A tuple of length 2, containing the adjusted pd.Period objects.
"""
if not all(isinstance(obj, pd.Period) for obj in [first, last]):
raise TypeError("'first' and 'last' must be instances of type Period")
# GH 23882
first = first.to_timestamp()
last = last.to_timestamp()
adjust_first = not offset.onOffset(first)
adjust_last = offset.onOffset(last)
first, last = _get_timestamp_range_edges(first, last, offset,
closed=closed, base=base)
first = (first + adjust_first * offset).to_period(offset)
last = (last - adjust_last * offset).to_period(offset)
return first, last
def _adjust_dates_anchored(first, last, offset, closed='right', base=0):
# First and last offsets should be calculated from the start day to fix an
# error cause by resampling across multiple days when a one day period is
# not a multiple of the frequency.
#
# See https://github.com/pandas-dev/pandas/issues/8683
# GH 10117 & GH 19375. If first and last contain timezone information,
# Perform the calculation in UTC in order to avoid localizing on an
# Ambiguous or Nonexistent time.
first_tzinfo = first.tzinfo
last_tzinfo = last.tzinfo
start_day_nanos = first.normalize().value
if first_tzinfo is not None:
first = first.tz_convert('UTC')
if last_tzinfo is not None:
last = last.tz_convert('UTC')
base_nanos = (base % offset.n) * offset.nanos // offset.n
start_day_nanos += base_nanos
foffset = (first.value - start_day_nanos) % offset.nanos
loffset = (last.value - start_day_nanos) % offset.nanos
if closed == 'right':
if foffset > 0:
# roll back
fresult = first.value - foffset
else:
fresult = first.value - offset.nanos
if loffset > 0:
# roll forward
lresult = last.value + (offset.nanos - loffset)
else:
# already the end of the road
lresult = last.value
else: # closed == 'left'
if foffset > 0:
fresult = first.value - foffset
else:
# start of the road
fresult = first.value
if loffset > 0:
# roll forward
lresult = last.value + (offset.nanos - loffset)
else:
lresult = last.value + offset.nanos
fresult = Timestamp(fresult)
lresult = Timestamp(lresult)
if first_tzinfo is not None:
fresult = fresult.tz_localize('UTC').tz_convert(first_tzinfo)
if last_tzinfo is not None:
lresult = lresult.tz_localize('UTC').tz_convert(last_tzinfo)
return fresult, lresult
def asfreq(obj, freq, method=None, how=None, normalize=False, fill_value=None):
"""
Utility frequency conversion method for Series/DataFrame.
"""
if isinstance(obj.index, PeriodIndex):
if method is not None:
raise NotImplementedError("'method' argument is not supported")
if how is None:
how = 'E'
new_obj = obj.copy()
new_obj.index = obj.index.asfreq(freq, how=how)
elif len(obj.index) == 0:
new_obj = obj.copy()
new_obj.index = obj.index._shallow_copy(freq=to_offset(freq))
else:
dti = date_range(obj.index[0], obj.index[-1], freq=freq)
dti.name = obj.index.name
new_obj = obj.reindex(dti, method=method, fill_value=fill_value)
if normalize:
new_obj.index = new_obj.index.normalize()
return new_obj
| {
"content_hash": "a0260599246287a0ecb69b7a32e83af5",
"timestamp": "",
"source": "github",
"line_count": 1780,
"max_line_length": 79,
"avg_line_length": 32.613483146067416,
"alnum_prop": 0.5455109212430235,
"repo_name": "cbertinato/pandas",
"id": "632b5a9c5e0024135b873c0c307af85842400425",
"size": "58052",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas/core/resample.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "541"
},
{
"name": "C",
"bytes": "394466"
},
{
"name": "C++",
"bytes": "17248"
},
{
"name": "HTML",
"bytes": "606963"
},
{
"name": "Makefile",
"bytes": "529"
},
{
"name": "Python",
"bytes": "15010333"
},
{
"name": "Shell",
"bytes": "27209"
},
{
"name": "Smarty",
"bytes": "2040"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
try:
import cPickle as pickle
except ImportError:
import pickle
from src.utils.utils import to_str, to_unicode
#from src.typeclasses.models import PackedDBobject,PackedDict,PackedList
from src.players.models import PlayerAttribute
from django.contrib.contenttypes.models import ContentType
CTYPEGET = ContentType.objects.get
GA = object.__getattribute__
SA = object.__setattr__
DA = object.__delattr__
class PackedDBobject(object):
"""
Attribute helper class.
A container for storing and easily identifying database objects in
the database (which doesn't suppport storing db_objects directly).
"""
def __init__(self, ID, db_model, db_key):
self.id = ID
self.db_model = db_model
self.key = db_key
def __str__(self):
return "%s(#%s)" % (self.key, self.id)
def __unicode__(self):
return u"%s(#%s)" % (self.key, self.id)
class PackedDict(dict):
"""
Attribute helper class.
A variant of dict that stores itself to the database when
updating one of its keys. This is called and handled by
Attribute.validate_data().
"""
def __init__(self, db_obj, *args, **kwargs):
"""
Sets up the packing dict. The db_store variable
is set by Attribute.validate_data() when returned in
order to allow custom updates to the dict.
db_obj - the Attribute object storing this dict.
The 'parent' property is set to 'init' at creation,
this stops the system from saving itself over and over
when first assigning the dict. Once initialization
is over, the Attribute from_attr() method will assign
the parent (or None, if at the root)
"""
self.db_obj = db_obj
self.parent = 'init'
super(PackedDict, self).__init__(*args, **kwargs)
def __str__(self):
return "{%s}" % ", ".join("%s:%s" % (key, str(val)) for key, val in self.items())
def save(self):
"Relay save operation upwards in tree until we hit the root."
if self.parent == 'init':
pass
elif self.parent:
self.parent.save()
else:
self.db_obj.value = self
def __setitem__(self, *args, **kwargs):
"assign item to this dict"
super(PackedDict, self).__setitem__(*args, **kwargs)
self.save()
def __delitem__(self, *args, **kwargs):
"delete with del self[key]"
super(PackedDict, self).__delitem__(*args, **kwargs)
self.save()
def clear(self, *args, **kwargs):
"Custom clear"
super(PackedDict, self).clear(*args, **kwargs)
self.save()
def pop(self, *args, **kwargs):
"Custom pop"
ret = super(PackedDict, self).pop(*args, **kwargs)
self.save()
return ret
def popitem(self, *args, **kwargs):
"Custom popitem"
ret = super(PackedDict, self).popitem(*args, **kwargs)
self.save()
return ret
def setdefault(self, *args, **kwargs):
"Custom setdefault"
super(PackedDict, self).setdefault(*args, **kwargs)
self.save()
def update(self, *args, **kwargs):
"Custom update"
super(PackedDict, self).update(*args, **kwargs)
self.save()
class PackedList(list):
"""
Attribute helper class.
A variant of list that stores itself to the database when
updating one of its keys. This is called and handled by
Attribute.validate_data().
"""
def __init__(self, db_obj, *args, **kwargs):
"""
sets up the packing list.
db_obj - the attribute object storing this list.
the 'parent' property is set to 'init' at creation,
this stops the system from saving itself over and over
when first assigning the dict. once initialization
is over, the attribute from_attr() method will assign
the parent (or none, if at the root)
"""
self.db_obj = db_obj
self.parent = 'init'
super(PackedList, self).__init__(*args, **kwargs)
def __str__(self):
return "[%s]" % ", ".join(str(val) for val in self)
def save(self):
"relay save operation upwards in tree until we hit the root."
if self.parent == 'init':
pass
elif self.parent:
self.parent.save()
else:
self.db_obj.value = self
def __setitem__(self, *args, **kwargs):
"Custom setitem that stores changed list to database."
super(PackedList, self).__setitem__(*args, **kwargs)
self.save()
def __delitem__(self, *args, **kwargs):
"delete with del self[index]"
super(PackedList, self).__delitem__(*args, **kwargs)
self.save()
def append(self, *args, **kwargs):
"Custom append"
super(PackedList, self).append(*args, **kwargs)
self.save()
def extend(self, *args, **kwargs):
"Custom extend"
super(PackedList, self).extend(*args, **kwargs)
self.save()
def insert(self, *args, **kwargs):
"Custom insert"
super(PackedList, self).insert(*args, **kwargs)
self.save()
def remove(self, *args, **kwargs):
"Custom remove"
super(PackedList, self).remove(*args, **kwargs)
self.save()
def pop(self, *args, **kwargs):
"Custom pop"
ret = super(PackedList, self).pop(*args, **kwargs)
self.save()
return ret
def reverse(self, *args, **kwargs):
"Custom reverse"
super(PackedList, self).reverse(*args, **kwargs)
self.save()
def sort(self, *args, **kwargs):
"Custom sort"
super(PackedList, self).sort(*args, **kwargs)
self.save()
class PackedSet(set):
"""
A variant of Set that stores new updates to the databse.
"""
def __init__(self, db_obj, *args, **kwargs):
"""
sets up the packing set.
db_obj - the attribute object storing this set
the 'parent' property is set to 'init' at creation,
this stops the system from saving itself over and over
when first assigning the dict. once initialization
is over, the attribute from_attr() method will assign
the parent (or none, if at the root)
"""
self.db_obj = db_obj
self.parent = 'init'
super(PackedSet, self).__init__(*args, **kwargs)
def __str__(self):
return "{%s}" % ", ".join(str(val) for val in self)
def save(self):
"relay save operation upwards in tree until we hit the root."
if self.parent == 'init':
pass
elif self.parent:
self.parent.save()
else:
self.db_obj.value = self
def add(self, *args, **kwargs):
"Add an element to the set"
super(PackedSet, self).add(*args, **kwargs)
self.save()
def clear(self, *args, **kwargs):
"Remove all elements from this set"
super(PackedSet, self).clear(*args, **kwargs)
self.save()
def difference_update(self, *args, **kwargs):
"Remove all elements of another set from this set."
super(PackedSet, self).difference_update(*args, **kwargs)
self.save()
def discard(self, *args, **kwargs):
"Remove an element from a set if it is a member.\nIf not a member, do nothing."
super(PackedSet, self).discard(*args, **kwargs)
self.save()
def intersection_update(self, *args, **kwargs):
"Update a set with the intersection of itself and another."
super(PackedSet, self).intersection_update(*args, **kwargs)
self.save()
def pop(self, *args, **kwargs):
"Remove and return an arbitrary set element.\nRaises KeyError if the set is empty."
super(PackedSet, self).pop(*args, **kwargs)
self.save()
def remove(self, *args, **kwargs):
"Remove an element from a set; it must be a member.\nIf the element is not a member, raise a KeyError."
super(PackedSet, self).remove(*args, **kwargs)
self.save()
def symmetric_difference_update(self, *args, **kwargs):
"Update a set with the symmetric difference of itself and another."
super(PackedSet, self).symmetric_difference_update(*args, **kwargs)
self.save()
def update(self, *args, **kwargs):
"Update a set with the union of itself and others."
super(PackedSet, self).update(*args, **kwargs)
self.save()
def to_attr(data):
"""
Convert data to proper attr data format before saving
We have to make sure to not store database objects raw, since
this will crash the system. Instead we must store their IDs
and make sure to convert back when the attribute is read back
later.
Due to this it's criticial that we check all iterables
recursively, converting all found database objects to a form
the database can handle. We handle lists, tuples and dicts
(and any nested combination of them) this way, all other
iterables are stored and returned as lists.
data storage format:
(simple|dbobj|iter, <data>)
where
simple - a single non-db object, like a string or number
dbobj - a single dbobj
iter - any iterable object - will be looped over recursively
to convert dbobj->id.
"""
def iter_db2id(item):
"""
recursively looping through stored iterables, replacing objects with ids.
(Python only builds nested functions once, so there is no overhead for nesting)
"""
dtype = type(item)
if dtype in (basestring, int, float): # check the most common types first, for speed
return item
elif hasattr(item, "id") and hasattr(item, "db_model_name") and hasattr(item, "db_key"):
db_model_name = item.db_model_name
if db_model_name == "typeclass":
db_model_name = GA(item.dbobj, "db_model_name")
return PackedDBobject(item.id, db_model_name, item.db_key)
elif dtype == tuple:
return tuple(iter_db2id(val) for val in item)
elif dtype in (dict, PackedDict):
return dict((key, iter_db2id(val)) for key, val in item.items())
elif hasattr(item, '__iter__'):
return list(iter_db2id(val) for val in item)
else:
return item
dtype = type(data)
if dtype in (basestring, int, float):
return ("simple",data)
elif hasattr(data, "id") and hasattr(data, "db_model_name") and hasattr(data, 'db_key'):
# all django models (objectdb,scriptdb,playerdb,channel,msg,typeclass)
# have the protected property db_model_name hardcoded on themselves for speed.
db_model_name = data.db_model_name
if db_model_name == "typeclass":
# typeclass cannot help us, we want the actual child object model name
db_model_name = GA(data.dbobj, "db_model_name")
return ("dbobj", PackedDBobject(data.id, db_model_name, data.db_key))
elif hasattr(data, "__iter__"):
return ("iter", iter_db2id(data))
else:
return ("simple", data)
def from_attr(attr, datatuple):
"""
Retrieve data from a previously stored attribute. This
is always a dict with keys type and data.
datatuple comes from the database storage and has
the following format:
(simple|dbobj|iter, <data>)
where
simple - a single non-db object, like a string. is returned as-is.
dbobj - a single dbobj-id. This id is retrieved back from the database.
iter - an iterable. This is traversed iteratively, converting all found
dbobj-ids back to objects. Also, all lists and dictionaries are
returned as their PackedList/PackedDict counterparts in order to
allow in-place assignment such as obj.db.mylist[3] = val. Mylist
is then a PackedList that saves the data on the fly.
"""
# nested functions
def id2db(data):
"""
Convert db-stored dbref back to object
"""
mclass = CTYPEGET(model=data.db_model).model_class()
try:
return mclass.objects.dbref_search(data.id)
except AttributeError:
try:
return mclass.objects.get(id=data.id)
except mclass.DoesNotExist: # could happen if object was deleted in the interim.
return None
def iter_id2db(item):
"""
Recursively looping through stored iterables, replacing ids with actual objects.
We return PackedDict and PackedLists instead of normal lists; this is needed in order for
the user to do dynamic saving of nested in-place, such as obj.db.attrlist[2]=3. What is
stored in the database are however always normal python primitives.
"""
dtype = type(item)
if dtype in (basestring, int, float): # check the most common types first, for speed
return item
elif dtype == PackedDBobject:
return id2db(item)
elif dtype == tuple:
return tuple([iter_id2db(val) for val in item])
elif dtype in (dict, PackedDict):
return PackedDict(attr, dict(zip([key for key in item.keys()],
[iter_id2db(val) for val in item.values()])))
elif hasattr(item, '__iter__'):
return PackedList(attr, list(iter_id2db(val) for val in item))
else:
return item
typ, data = datatuple
if typ == 'simple':
# single non-db objects
return data
elif typ == 'dbobj':
# a single stored dbobj
return id2db(data)
elif typ == 'iter':
# all types of iterables
return iter_id2db(data)
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for attr in orm.PlayerAttribute.objects.all():
try:
# repack attr into new format, and reimport
val = pickle.loads(to_str(attr.db_value))
if hasattr(val, '__iter__'):
val = ("iter", val)
elif type(val) == PackedDBobject:
val = ("dbobj", val)
else:
val = ("simple", val)
attr.db_value = to_unicode(pickle.dumps(to_str(to_attr(from_attr(attr, val)))))
attr.save()
except TypeError, RuntimeError:
pass
def backwards(self, orm):
"Write your backwards methods here."
raise RuntimeError
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'objects.objectdb': {
'Meta': {'object_name': 'ObjectDB'},
'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destinations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_home': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'homes_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations_set'", 'null': 'True', 'to': "orm['objects.ObjectDB']"}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_player': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']", 'null': 'True', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'players.playerattribute': {
'Meta': {'object_name': 'PlayerAttribute'},
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']"}),
'db_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'players.playerdb': {
'Meta': {'object_name': 'PlayerDB'},
'db_cmdset_storage': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'db_date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_lock_storage': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['objects.ObjectDB']", 'null': 'True', 'blank': 'True'}),
'db_permissions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'db_typeclass_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'players.playernick': {
'Meta': {'unique_together': "(('db_nick', 'db_type', 'db_obj'),)", 'object_name': 'PlayerNick'},
'db_nick': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'db_obj': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['players.PlayerDB']"}),
'db_real': ('django.db.models.fields.TextField', [], {}),
'db_type': ('django.db.models.fields.CharField', [], {'default': "'inputline'", 'max_length': '16', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['players']
| {
"content_hash": "27d6d0618947f6c8dad4c3f4e363bdf9",
"timestamp": "",
"source": "github",
"line_count": 474,
"max_line_length": 187,
"avg_line_length": 46.37552742616034,
"alnum_prop": 0.5802019834409972,
"repo_name": "TaliesinSkye/evennia",
"id": "d7d91294fd0af44929ab9117d1285c81be2537d0",
"size": "22000",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/players/migrations/0008_converting_attributes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "59698"
},
{
"name": "D",
"bytes": "9343933"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "JavaScript",
"bytes": "91190"
},
{
"name": "Python",
"bytes": "2840755"
},
{
"name": "Shell",
"bytes": "4577"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = ResourceContainer()
result.template = "object/resource_container/shared_resource_container_organic_food.iff"
result.attribute_template_id = -1
result.stfName("resource_container_n","organic_food_small")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "25a64047bc1f47fbe9529e6118c4eb58",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 89,
"avg_line_length": 26.76923076923077,
"alnum_prop": 0.7241379310344828,
"repo_name": "obi-two/Rebelion",
"id": "143a4a08124ee9c26333ccb8872d09bd4d965afa",
"size": "493",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/resource_container/shared_resource_container_organic_food.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
import pygame
from pytemo import colors, fonts
from pytemo.widgets.base import BaseWidget
class BarChartWidget(BaseWidget):
_data = None
_offsets = None
def __init__(self, size):
BaseWidget.__init__(self, size)
def update_data(self, data):
if self._data is not None:
old_max = max(self._data.values)
new_max = max(data.values)
self._offsets = ()
for pos, label_text in enumerate(data.labels):
new_value = data.values[pos]
old_value = self._data.values[pos]
old_percent = (old_value * 100) / old_max
new_percent = (new_value * 100) / new_max
self._offsets += ((new_percent - old_percent),)
self._data = data
self.updating = True
def _draw_background(self):
# draw box with shadow
pygame.draw.rect(self.surface, colors.BOX_SHADOW_COLOR, (0, 0) + self.surface.get_size())
pygame.draw.rect(self.surface, colors.BOX_COLOR,
(0, 0) + (self.surface.get_width() - 2, self.surface.get_height() - 2 ))
def _draw(self):
section_width = self.surface.get_width() / len(self._data.labels)
label_y = self.surface.get_height() - 20 - fonts.MEDIUM_FONT.get_height()
bar_start_y = 20
bar_end_y = label_y - 20
max_value = max(self._data.values)
new_offsets = ()
for pos, label_text in enumerate(self._data.labels):
section_center = (pos * section_width) + (section_width / 2)
section_start = (pos * section_width)
label = fonts.MEDIUM_FONT.render(label_text, True, colors.TEXT_COLOR)
label_rect = label.get_rect()
label_rect.topleft = (section_center - (label.get_width() / 2), label_y)
self.surface.blit(label, label_rect)
bar_width = section_width - section_width / 2
value = self._data.values[pos]
percent = int((value * 100) / max_value)
if self._offsets is not None:
offset = self._offsets[pos]
percent = percent - offset
if offset > 0:
new_offsets += ((offset - 1),)
elif offset < 0:
new_offsets += ((offset + 1),)
else:
new_offsets += ((0),)
bar_height = percent * (bar_end_y - bar_start_y) / 100
bar_x = (section_width / 4) + section_start
color = colors.CHART_COLOR_1
pygame.draw.rect(self.surface, color, (bar_x, bar_end_y - bar_height, bar_width, bar_height))
if self._offsets is not None:
self._offsets = new_offsets
print self._offsets
self.updating = True
class BarChartData:
labels = None
values = None
def __init__(self, labels, values):
self.labels = labels
self.values = values | {
"content_hash": "d859a4416a9a84dfe3dd504f08bbe52a",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 105,
"avg_line_length": 33.61363636363637,
"alnum_prop": 0.5405679513184585,
"repo_name": "jochil/pytemo",
"id": "13302179374f8fb83baa5c97551e75a04ef5b68a",
"size": "2958",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytemo/widgets/bar_chart.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31684"
}
],
"symlink_target": ""
} |
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
class DistributedGardenAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory("DistributedGardenAI")
def sendNewProp(self, todo0, todo1, todo2, todo3):
pass
| {
"content_hash": "549e38a3a968c2cb000430d15a347b60",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 79,
"avg_line_length": 35.666666666666664,
"alnum_prop": 0.8161993769470405,
"repo_name": "ToonTownInfiniteRepo/ToontownInfinite",
"id": "34378ce2ebf35f1edc16aae5093ab11a8f92e1ae",
"size": "321",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "toontown/estate/DistributedGardenAI.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1703277"
},
{
"name": "C#",
"bytes": "9892"
},
{
"name": "C++",
"bytes": "5468044"
},
{
"name": "Emacs Lisp",
"bytes": "210083"
},
{
"name": "F#",
"bytes": "4611"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Objective-C",
"bytes": "23212"
},
{
"name": "Puppet",
"bytes": "5245"
},
{
"name": "Python",
"bytes": "34010215"
},
{
"name": "Shell",
"bytes": "11192"
},
{
"name": "Tcl",
"bytes": "1981257"
}
],
"symlink_target": ""
} |
from dataclasses import dataclass
from pathlib import PurePath
from pants.backend.go.target_types import GoBinaryMainPackageField, GoBinaryTarget, GoPackageTarget
from pants.backend.go.util_rules.binary import GoBinaryMainPackage, GoBinaryMainPackageRequest
from pants.backend.go.util_rules.build_pkg import BuiltGoPackage
from pants.backend.go.util_rules.build_pkg_target import BuildGoPackageTargetRequest
from pants.backend.go.util_rules.first_party_pkg import (
FallibleFirstPartyPkgAnalysis,
FirstPartyPkgAnalysisRequest,
)
from pants.backend.go.util_rules.import_analysis import ImportConfig, ImportConfigRequest
from pants.backend.go.util_rules.link import LinkedGoBinary, LinkGoBinaryRequest
from pants.core.goals.package import (
BuiltPackage,
BuiltPackageArtifact,
OutputPathField,
PackageFieldSet,
)
from pants.core.goals.run import RunFieldSet
from pants.engine.fs import AddPrefix, Digest, MergeDigests
from pants.engine.internals.selectors import Get
from pants.engine.rules import collect_rules, rule
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
@dataclass(frozen=True)
class GoBinaryFieldSet(PackageFieldSet, RunFieldSet):
required_fields = (GoBinaryMainPackageField,)
main: GoBinaryMainPackageField
output_path: OutputPathField
@rule(desc="Package Go binary", level=LogLevel.DEBUG)
async def package_go_binary(field_set: GoBinaryFieldSet) -> BuiltPackage:
main_pkg = await Get(GoBinaryMainPackage, GoBinaryMainPackageRequest(field_set.main))
main_pkg_analysis = await Get(
FallibleFirstPartyPkgAnalysis, FirstPartyPkgAnalysisRequest(main_pkg.address)
)
analysis = main_pkg_analysis.analysis
if not analysis:
raise ValueError(
f"Unable to analyze main package `{main_pkg.address}` for go_binary target {field_set.address}: {main_pkg_analysis.stderr}"
)
if analysis.name != "main":
raise ValueError(
f"{GoPackageTarget.alias} target `{main_pkg.address}` is used as the main package for "
f"{GoBinaryTarget.address} target `{field_set.address}` but uses package name `{analysis.name}` "
"instead of `main`. Go requires that main packages actually use `main` as the package name."
)
built_package = await Get(
BuiltGoPackage, BuildGoPackageTargetRequest(main_pkg.address, is_main=True)
)
main_pkg_a_file_path = built_package.import_paths_to_pkg_a_files["main"]
import_config = await Get(
ImportConfig, ImportConfigRequest(built_package.import_paths_to_pkg_a_files)
)
input_digest = await Get(Digest, MergeDigests([built_package.digest, import_config.digest]))
output_filename = PurePath(field_set.output_path.value_or_default(file_ending=None))
binary = await Get(
LinkedGoBinary,
LinkGoBinaryRequest(
input_digest=input_digest,
archives=(main_pkg_a_file_path,),
import_config_path=import_config.CONFIG_PATH,
output_filename=f"./{output_filename.name}",
description=f"Link Go binary for {field_set.address}",
),
)
renamed_output_digest = await Get(Digest, AddPrefix(binary.digest, str(output_filename.parent)))
artifact = BuiltPackageArtifact(relpath=str(output_filename))
return BuiltPackage(renamed_output_digest, (artifact,))
def rules():
return [*collect_rules(), UnionRule(PackageFieldSet, GoBinaryFieldSet)]
| {
"content_hash": "ad4fb49e67570f180b175b9e383201f6",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 135,
"avg_line_length": 42.901234567901234,
"alnum_prop": 0.7398561151079137,
"repo_name": "benjyw/pants",
"id": "f0f65292eccaf48c9d13e61f48d29d74475f92d7",
"size": "3607",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/python/pants/backend/go/goals/package_binary.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "688"
},
{
"name": "Go",
"bytes": "67315"
},
{
"name": "Java",
"bytes": "10690"
},
{
"name": "Kotlin",
"bytes": "6433"
},
{
"name": "Mustache",
"bytes": "3595"
},
{
"name": "Python",
"bytes": "7135320"
},
{
"name": "Rust",
"bytes": "1601736"
},
{
"name": "Scala",
"bytes": "21950"
},
{
"name": "Shell",
"bytes": "31723"
},
{
"name": "Starlark",
"bytes": "72809"
}
],
"symlink_target": ""
} |
import sys
import os
import pprint
def index_directory(path):
files = {}
count = 0
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
size = os.stat(full_path).st_size
key = '{},{}'.format(size, filename)
if files.get(key) is None:
files[key] = set([])
files[key] |= set([full_path])
if count % 1000 == 0:
print(' Indexing {}...'.format(dirpath))
count += 1
return files
def compare_indexes(old_files, new_files):
old_keys = set(old_files.keys())
new_keys = set(new_files.keys())
print(' Comparing contents...')
missing_in_new = old_keys - new_keys
both = old_keys & new_keys
missing_in_old = new_keys - old_keys
#print('Missing in old:')
#for key in missing_in_old:
# pprint.pprint(new_files[key])
# print()
print('Missing in new:')
for key in missing_in_new:
pprint.pprint(old_files[key])
print()
#print('Present in both:')
#for key in both:
# pprint.pprint(old_files[key])
# pprint.pprint(new_files[key])
# print()
def run_check(old, new):
print('Indexing new directory...')
new_files = index_directory(new)
#pprint.pprint(new_files)
print('Indexing old directory...')
old_files = index_directory(old)
#pprint.pprint(old_files)
compare_indexes(old_files, new_files)
if __name__ == '__main__':
old = sys.argv[1]
new = sys.argv[2]
print('Comparing old location {} to new location {}'.format(old, new))
run_check(old, new)
| {
"content_hash": "2a3d57dd3a36e2f1eec773df7f6bca09",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 74,
"avg_line_length": 27.387096774193548,
"alnum_prop": 0.5694935217903416,
"repo_name": "davidstrauss/file-reorg-check",
"id": "b64fe35a78a84ca36d4c9bbceda59caf2736867c",
"size": "1698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "check.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1698"
}
],
"symlink_target": ""
} |
""" Tests for the Torque/PBS Job """
import sure
from mock import Mock
from .fixtures import *
from metapipe.models import pbs_job
def test_qstat_queued():
j = pbs_job.PBSJob('', None)
pbs_job.call = Mock(return_value=pbs_job_qstat_queued)
j.is_queued().should.equal(True)
def test_qstat_running():
j = pbs_job.PBSJob('', None)
pbs_job.call = Mock(return_value=pbs_job_qstat_running)
j.is_running().should.equal(True)
def test_qstat_exception():
j = pbs_job.PBSJob('', None)
pbs_job.call = Mock(return_value=('', None))
j.is_running().should.equal(False)
def test_submit():
j = pbs_job.PBSJob('', None)
pbs_job.call = Mock(return_value=pbs_job_qsub)
j.make = Mock()
j.submit()
j.id.should.equal('9974279')
| {
"content_hash": "a7ceb92a9f0abea7fd64394e984bb467",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 59,
"avg_line_length": 20.44736842105263,
"alnum_prop": 0.6422136422136422,
"repo_name": "Sonictherocketman/metapipe",
"id": "20eb398bb3f3efa2c31b80284007c187f3b6055e",
"size": "777",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test_pbs_job.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3244"
},
{
"name": "Python",
"bytes": "110371"
},
{
"name": "Roff",
"bytes": "2"
},
{
"name": "Shell",
"bytes": "150"
}
],
"symlink_target": ""
} |
class TrikeActor(TObject):
self.TypeID = #FIXME
#FIXME class method to register TypeIDs with TObject
#Which function should TObject.processEvent() call for events
self.handlers = {"832E5EC291334D2193A9D51D90ADAFEA": self.__dict__[create], #TEVentCreateActor
"8DBFB5BFDD0B485AA037FFBEABC65CC3": self.__dict__[rename], #TEventRenameActor
"DA1E0FF394494772835956FEB9EEB694": self.__dict__[setType], #TEventSetActorType
"8DA1D94500C94441815F71CF8F28CEFE": self.__dict__[unsetType], #TEventUnsetActorType
"6E5F1DCE9CA34B519649306973FC3EB6": self.__dict__[makeFavoredUser], #TEventMakeFavoredUser
"2D3DC63D80564C06AF99D767401984F8": self.__dict__[makeNotFavoredUser], #TEventMakeNotFavoredUser
"5710BEFD4F3C426C80DE44AA3E41A84B": self.__dict__[unsetFavoredUser], #TEventUnsetFavoredUser
"D54741C973764E7394596AC3053796CD": self.__dict__[delete]} #TEventDeleteActor
self.addListener(TEventCreateDataObject, self.__dict__[handleNewDataObject])
self.initialState = TState(prototype = True)
self.initialState.setValue("name", "")
self.initialState.setValue("type", None)
self.initialState.setValue("isFavoredUser", None)
#self.description = ""
#self.isAuthenticated = None #self.isAttacker = None
#self.usesSystem = None #self.usedBySystem = None
#self.isShared = None #self.hasSharedResources = None
self.Types = ["Component Process", "Execution Environment", "External Interactor"]
def __init__(self, model, creationEvent):
super.__init__(self, TOType.TrikeActor, creationEvent.getChangeID())
self.receiveEvent(creationEvent)
pass
def create(self, params): #name, type, isFavoredUser
self._wState.setValue("name", params["name"])
self._wState.setValue("type", params["type"])
self._wState.setValue("isFavoredUser", params["isFavoredUser"])
self._sendEvent(TEventActorCreated.TypeID, TrikeDataObject.TypeID, {"name": params["name"]})
def rename(self, params):
self._wState.setValue("name", params["name"])
self._setKey()
pass
def _setKey(self):
super._setKey(self._wState.getValue("name"))
pass
def setType(self, params): #type
pass
def unsetType(self, params):
pass
def makeFavoredUser(self, params):
pass
def makeNotFavoredUser(self, params):
pass
def unsetFavoredUser(self, params):
pass
#FIXME deletion
def delete(self, params):
super._delink()
pass
#FIXME synthesize
def synthesize(self, changeID):
pass
class TrikeDataObject(TObject):
self.Types = ["Data", "Software", "Hardware Data Container", "Software Data Container"]
def __init__(self, creationEvent):
super.__init__(self, TOType.TrikeDataObject, creationEvent.getChangeID())
self.name = ""
#self.description = ""
self.isAsset = None
#self.isShared = None #self.isTransient = None
pass
def rename(self, newName):
pass
def makeAsset(self):
pass
def makeNotAsset(self):
pass
def unsetAsset(self):
pass
class TrikeActionSet(TObject):
self.ActionValues = ["Unknown", "N/A", "Always", "Sometimes", "Never"]
def __init__(self, creationEvent):
super.__init__(self, TOType.TrikeAction, creationEvent.getChangeID())
self.Actor = None
self.Asset = None
self.isCreateAllowed = self.ActionValues[0]
#self.isReadAllowed = self.ActionValues[0] #self.isUpdateAllowed = self.ActionValues[0]
#self.isDeleteAllowed = self.ActionValues[0] #self.iseXecuteAllowed = self.ActionValues[0]
#self.isconFigureAllowed = self.ActionValues[0]
self.createRules = None
#self.readRules = "" #self.updateRules = ""
#self.deleteRules = "" #self.executeRules = ""
#self.configureRules = ""
pass
def setCreateAllowed(self, value):
pass
def unsetCreateAllowed(self, value):
pass
def setCreateRules(self, rules):
pass
def unsetCreateRules(self, rules):
pass
| {
"content_hash": "8be7def7d8fc2696bbc7898738a1cb4a",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 117,
"avg_line_length": 33.25954198473283,
"alnum_prop": 0.6336929079641955,
"repo_name": "Dymaxion00/octotrike",
"id": "3703f6d4ec6057392cd983209b2fceb7e329823b",
"size": "4533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TrikeActor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57873"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import unittest
import pipgh
import pipgh.pipgh
class TestMain(unittest.TestCase):
def test_init_doc(self):
doc = pipgh.__doc__
self.assertTrue(doc != None)
self.assertTrue('Usage:' in doc)
self.assertTrue('Commands:' in doc)
self.assertTrue('Options:' in doc)
self.assertTrue('Examples:' in doc)
def test_cli_fails(self):
# pipgh [--auth] search (...)
# pipgh [--auth] show (...)
# pipgh install (...)
# pipgh [-h | --help | --version]
argvs = [
[],
['unknown_command'],
['--auth'],
['--auth', 'searched'],
['--aut', 'search'],
['--aut', 'show'],
['--auth', 'install'],
['--auth', 'install', 'docopt/docopt'],
['--auth', 'show'],
['--auth', 'search'],
['-h'],
['--help'],
['-help'],
['--h'],
['-v'],
['--version'],
]
for argv in argvs:
try:
self.assertRaises(SystemExit, pipgh.main, argv, dry_run=True)
except AssertionError as e:
e.args = (e.args[0] + ' for ' + str(argv),)
raise
def test_cli_fails_output(self):
# pipgh [--auth] search (...)
# pipgh [--auth] show (...)
# pipgh install (...)
# pipgh [-h | --help | --version]
argvs = [
([], pipgh.pipgh.USAGE_MESSAGE),
([], pipgh.__version__),
(['-h'], pipgh.pipgh.USAGE_MESSAGE),
(['--help'], pipgh.pipgh.USAGE_MESSAGE),
(['-h'], pipgh.pipgh.HELP_MESSAGE.strip()),
(['--help'], pipgh.pipgh.HELP_MESSAGE.strip()),
(['--version'], pipgh.__version__),
]
for idx, (argv, output) in enumerate(argvs):
try:
pipgh.main(argv, dry_run=True)
except SystemExit as e:
args = e.args
else:
_err = 'ArgvCase#%d %s failed to SystemExit' % (idx, str(argv))
raise AssertionError(_err)
try:
self.assertTrue(output in args[0])
except AssertionError as e:
_err = 'ArgvCase#%d %s failed SystemExit\'s output'
e.args = (_err % (idx, str(argv)),)
raise
def test_dry_run(self):
# pipgh show <full_name> 2
# pipgh install <full_name> 2
# pipgh search <query>... 2+
# pipgh install <full_name> <ref> 3
# pipgh install -r <requirements.txt> 3
# pipgh --auth show <full_name> 3
# pipgh --auth search <query>... 3+
argvs = [
(['show', 'requests'], ('show', False, ['show', 'requests'])),
(['install', 'requests'], ('install', False, ['install', 'requests'])),
(['search', 'requests'], ('search', False, ['search', 'requests'])),
(['install', 'requests', 'v0.1'], ('install', False, ['install', 'requests', 'v0.1'])),
(['install', '-r', 'reqs.txt'], ('install', False, ['install', '-r', 'reqs.txt'])),
(['--auth', 'show', 'requests'], ('show', True, ['show', 'requests'])),
(['--auth', 'search', 'requests'], ('search', True, ['search', 'requests'])),
(['--auth', 'search', 'requests', 'http'], ('search', True, ['search', 'requests', 'http'])),
]
for argv, ref in argvs:
rst = pipgh.main(argv, dry_run=True)
self.assertEqual(rst, ref)
| {
"content_hash": "cc5bc41e17a9a501f01b1ce5484daeff",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 108,
"avg_line_length": 38.4639175257732,
"alnum_prop": 0.44116858751005095,
"repo_name": "ffunenga/pipgh",
"id": "87f70234dee5221a82a04ea52734f0ccd6fe0650",
"size": "3731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/cli/test_main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30767"
}
],
"symlink_target": ""
} |
import re
def char_freq_table(filepath):
file = open(filepath)
chars = file.read().lower().replace(" ", "").replace("\n", "")
freqs = {key: 0 for key in chars}
for char in chars:
freqs[char] += 1
for word in freqs:
print "%s: %s" % (word, freqs[word])
char_freq_table('Q20.txt') | {
"content_hash": "0842da86b04e67582ee3ada95b3d1400",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 66,
"avg_line_length": 24.384615384615383,
"alnum_prop": 0.5678233438485805,
"repo_name": "coedfetr/STTP-python",
"id": "1c361523bf22c5b536f7afc258a32ce1e011b425",
"size": "317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyPractice/Q20.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16517"
}
],
"symlink_target": ""
} |
"""Tests for `mvn_tri.py`."""
from absl.testing import absltest
from absl.testing import parameterized
import chex
from distrax._src.distributions.mvn_tri import MultivariateNormalTri
from distrax._src.utils import equivalence
import jax.numpy as jnp
import numpy as np
def _get_scale_tril_from_scale_triu(scale_triu: np.ndarray) -> np.ndarray:
scale_triu = np.triu(scale_triu)
scale_triu_t = np.vectorize(np.transpose, signature='(k,k)->(k,k)')(
scale_triu)
cov = np.matmul(scale_triu, scale_triu_t)
return np.linalg.cholesky(cov)
class MultivariateNormalTriTest(equivalence.EquivalenceTest):
def setUp(self):
super().setUp()
self._init_distr_cls(MultivariateNormalTri)
@parameterized.named_parameters(
('all inputs are None', {}),
('wrong dimension of loc', {
'loc': np.array(0.),
}),
('scale_tri is 0d', {
'scale_tri': np.array(1.),
}),
('scale_tri is 1d', {
'scale_tri': np.ones((4,)),
}),
('scale_tri is not square', {
'scale_tri': np.ones((4, 3)),
}),
('inconsistent loc and scale_tri', {
'loc': np.zeros((4,)),
'scale_tri': np.ones((5, 5)),
}),
)
def test_raises_on_wrong_inputs(self, dist_kwargs):
with self.assertRaises(ValueError):
self.distrax_cls(**dist_kwargs)
@parameterized.named_parameters(
('loc provided', {'loc': np.zeros((4,))}),
('scale_tri provided', {'scale_tri': np.eye(4)}),
)
def test_default_properties(self, dist_kwargs):
dist = self.distrax_cls(**dist_kwargs)
self.assertTrue(dist.is_lower)
self.assertion_fn(rtol=1e-3)(dist.loc, jnp.zeros((4,)))
self.assertion_fn(rtol=1e-3)(dist.scale_tri, jnp.eye(4))
@parameterized.named_parameters(
('unbatched', (), (4,), (4, 4), True),
('batched loc', (7,), (7, 4), (4, 4), True),
('batched scale_tri lower', (7,), (4,), (7, 4, 4), True),
('batched scale_tri upper', (7,), (4,), (7, 4, 4), False),
)
def test_properties(self, batch_shape, loc_shape, scale_tri_shape, is_lower):
rng = np.random.default_rng(2022)
loc = rng.normal(size=loc_shape)
scale_tri = rng.normal(size=scale_tri_shape)
dist = self.distrax_cls(loc=loc, scale_tri=scale_tri, is_lower=is_lower)
tri_fn = jnp.tril if is_lower else jnp.triu
self.assertEqual(dist.batch_shape, batch_shape)
self.assertEqual(dist.is_lower, is_lower)
self.assertion_fn(rtol=1e-3)(
dist.loc, jnp.broadcast_to(loc, batch_shape + (4,)))
self.assertion_fn(rtol=1e-3)(dist.scale_tri, jnp.broadcast_to(
tri_fn(scale_tri), batch_shape + (4, 4)))
@chex.all_variants
@parameterized.named_parameters(
('unbatched, no shape', (), (4,), (4, 4)),
('batched loc, no shape', (), (7, 4), (4, 4)),
('batched scale_tri, no shape', (), (4,), (7, 4, 4)),
('unbatched, with shape', (3,), (4,), (4, 4)),
('batched loc, with shape', (3,), (7, 4), (4, 4)),
('batched scale_tri, with shape', (3,), (4,), (7, 4, 4)),
)
def test_sample_shape(self, sample_shape, loc_shape, scale_tri_shape):
rng = np.random.default_rng(2022)
loc = rng.normal(size=loc_shape)
scale_tri = rng.normal(size=scale_tri_shape)
dist_kwargs = {'loc': loc, 'scale_tri': scale_tri}
tfp_dist_kwargs = {'loc': loc, 'scale_tril': scale_tri}
super()._test_sample_shape(
dist_args=(), dist_kwargs=dist_kwargs, tfp_dist_kwargs=tfp_dist_kwargs,
sample_shape=sample_shape)
@chex.all_variants
@parameterized.named_parameters(
('float32', jnp.float32),
('float64', jnp.float64))
def test_sample_dtype(self, dtype):
dist_params = {
'loc': np.array([0., 0.], dtype),
'scale_tri': np.array([[1., 0.], [0., 1.]], dtype)}
dist = self.distrax_cls(**dist_params)
samples = self.variant(dist.sample)(seed=self.key)
self.assertEqual(samples.dtype, dist.dtype)
chex.assert_type(samples, dtype)
@chex.all_variants
@parameterized.named_parameters(
('unbatched, unbatched value', (4,), (4,), (4, 4), True),
('unbatched, unbatched value, upper', (4,), (4,), (4, 4), False),
('batched loc, unbatched value', (4,), (7, 4), (4, 4), True),
('batched scale_tri, unbatched value', (4,), (4,), (7, 4, 4), True),
('unbatched, batched value', (3, 7, 4), (4,), (4, 4), True),
('batched loc, batched value', (3, 7, 4), (7, 4), (4, 4), True),
('batched scale_tri, batched value', (3, 7, 4), (4,), (7, 4, 4), True),
('batched scale_tri, batched value, upper',
(3, 7, 4), (4,), (7, 4, 4), False),
)
def test_log_prob(self, value_shape, loc_shape, scale_tri_shape, is_lower):
rng = np.random.default_rng(2022)
loc = rng.normal(size=loc_shape)
scale_tri = rng.normal(size=scale_tri_shape)
value = rng.normal(size=value_shape)
dist_kwargs = {'loc': loc, 'scale_tri': scale_tri, 'is_lower': is_lower}
if is_lower:
tfp_dist_kwargs = {'loc': loc, 'scale_tril': scale_tri}
else:
scale_tril = _get_scale_tril_from_scale_triu(scale_tri)
tfp_dist_kwargs = {'loc': loc, 'scale_tril': scale_tril}
super()._test_attribute(
attribute_string='log_prob',
dist_kwargs=dist_kwargs,
tfp_dist_kwargs=tfp_dist_kwargs,
call_args=(value,),
assertion_fn=self.assertion_fn(rtol=1e-3))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('unbatched', (4,), (4, 4)),
('batched loc', (7, 4), (4, 4)),
('batched scale_tri', (4,), (7, 4, 4)),
)
def test_method(self, loc_shape, scale_tri_shape):
rng = np.random.default_rng(2022)
loc = rng.normal(size=loc_shape)
scale_tri = rng.normal(size=scale_tri_shape)
for method in ['entropy', 'mean', 'stddev', 'variance',
'covariance', 'mode']:
for is_lower in [True, False]:
if method in ['stddev', 'covariance', 'variance']:
rtol = 2e-2 if is_lower else 5e-2
else:
rtol = 1e-3
dist_kwargs = {'loc': loc, 'scale_tri': scale_tri, 'is_lower': is_lower}
if is_lower:
tfp_dist_kwargs = {'loc': loc, 'scale_tril': scale_tri}
else:
scale_tril = _get_scale_tril_from_scale_triu(scale_tri)
tfp_dist_kwargs = {'loc': loc, 'scale_tril': scale_tril}
with self.subTest(method=method, is_lower=is_lower):
super()._test_attribute(
method,
dist_kwargs=dist_kwargs,
tfp_dist_kwargs=tfp_dist_kwargs,
assertion_fn=self.assertion_fn(rtol=rtol))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('kl distrax_to_distrax', 'kl_divergence', 'distrax_to_distrax'),
('kl distrax_to_tfp', 'kl_divergence', 'distrax_to_tfp'),
('kl tfp_to_distrax', 'kl_divergence', 'tfp_to_distrax'),
('cross-ent distrax_to_distrax', 'cross_entropy', 'distrax_to_distrax'),
('cross-ent distrax_to_tfp', 'cross_entropy', 'distrax_to_tfp'),
('cross-ent tfp_to_distrax', 'cross_entropy', 'tfp_to_distrax'))
def test_with_two_distributions(self, function_string, mode_string):
rng = np.random.default_rng(2022)
loc1 = rng.normal(size=(5, 1, 4))
scale_tri1 = rng.normal(size=(3, 4, 4))
loc2 = rng.normal(size=(3, 4))
scale_tri2 = rng.normal(size=(4, 4))
for is_lower in [True, False]:
dist1_kwargs = {
'loc': loc1, 'scale_tri': scale_tri1, 'is_lower': is_lower}
dist2_kwargs = {
'loc': loc2, 'scale_tri': scale_tri2, 'is_lower': is_lower}
if is_lower:
tfp_dist1_kwargs = {'loc': loc1, 'scale_tril': scale_tri1}
tfp_dist2_kwargs = {'loc': loc2, 'scale_tril': scale_tri2}
else:
tfp_dist1_kwargs = {
'loc': loc1,
'scale_tril': _get_scale_tril_from_scale_triu(scale_tri1)
}
tfp_dist2_kwargs = {
'loc': loc2,
'scale_tril': _get_scale_tril_from_scale_triu(scale_tri2)
}
with self.subTest(is_lower=is_lower):
super()._test_with_two_distributions(
attribute_string=function_string,
mode_string=mode_string,
dist1_kwargs=dist1_kwargs,
dist2_kwargs=dist2_kwargs,
tfp_dist1_kwargs=tfp_dist1_kwargs,
tfp_dist2_kwargs=tfp_dist2_kwargs,
assertion_fn=self.assertion_fn(rtol=1e-3))
def test_jittable(self):
super()._test_jittable(
dist_kwargs={'loc': np.zeros((4,))},
assertion_fn=self.assertion_fn(rtol=1e-3))
@parameterized.named_parameters(
('single element', 2),
('range', slice(-1)),
('range_2', (slice(None), slice(-1))),
)
def test_slice(self, slice_):
rng = np.random.default_rng(2022)
loc = rng.normal(size=(6, 5, 4))
scale_tri = rng.normal(size=(4, 4))
for is_lower in [True, False]:
with self.subTest(is_lower=is_lower):
dist_kwargs = {'loc': loc, 'scale_tri': scale_tri, 'is_lower': is_lower}
dist = self.distrax_cls(**dist_kwargs)
self.assertEqual(dist[slice_].batch_shape, loc[slice_].shape[:-1])
self.assertEqual(dist[slice_].event_shape, dist.event_shape)
self.assertEqual(dist[slice_].is_lower, dist.is_lower)
self.assertion_fn(rtol=1e-3)(dist[slice_].mean(), loc[slice_])
def test_slice_ellipsis(self):
rng = np.random.default_rng(2022)
loc = rng.normal(size=(6, 5, 4))
scale_tri = rng.normal(size=(4, 4))
for is_lower in [True, False]:
with self.subTest(is_lower=is_lower):
dist_kwargs = {'loc': loc, 'scale_tri': scale_tri, 'is_lower': is_lower}
dist = self.distrax_cls(**dist_kwargs)
self.assertEqual(dist[..., -1].batch_shape, (6,))
self.assertEqual(dist[..., -1].event_shape, dist.event_shape)
self.assertEqual(dist[..., -1].is_lower, dist.is_lower)
self.assertion_fn(rtol=1e-3)(dist[..., -1].mean(), loc[:, -1, :])
if __name__ == '__main__':
absltest.main()
| {
"content_hash": "7e80c7d9f7b7b4860211a43384411fdf",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 80,
"avg_line_length": 39.876494023904385,
"alnum_prop": 0.5885702867419322,
"repo_name": "deepmind/distrax",
"id": "a2c6a1486b19aff7bbc15eded66eb848ec39798e",
"size": "10705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "distrax/_src/distributions/mvn_tri_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1044417"
},
{
"name": "Shell",
"bytes": "3000"
}
],
"symlink_target": ""
} |
import unittest
import numpy as np
import paddle.fluid as fluid
import paddle
import paddle.fluid.layers as layers
import paddle.fluid.core as core
import gradient_checker
import paddle.nn.functional as F
from decorator_helper import prog_scope
class TestSigmoidTripleGradCheck(unittest.TestCase):
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = F.sigmoid(x)
x_arr = np.random.random(shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestSigmoidDoubleGradCheck(unittest.TestCase):
def sigmoid_wrapper(self, x):
return F.sigmoid(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = F.sigmoid(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.sigmoid_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestTanhTripleGradCheck(unittest.TestCase):
def tanh_wrapper(self, x):
return paddle.tanh(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.tanh(x)
x_arr = np.random.random(shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(
self.tanh_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestTanhDoubleGradCheck(unittest.TestCase):
def tanh_wrapper(self, x):
return paddle.tanh(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.tanh(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.tanh_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestAbsDoubleGradCheck(unittest.TestCase):
def abs_wrapper(self, x):
return paddle.abs(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.abs(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.abs_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestReluDoubleGradCheck(unittest.TestCase):
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.005
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = layers.relu(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.02
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestLeakyReluDoubleGradCheck(unittest.TestCase):
def leaky_relu_wrapper(self, x):
return paddle.nn.functional.leaky_relu(x[0], negative_slope=0.2)
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.005
alpha = 0.2
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.nn.functional.leaky_relu(x, alpha)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.02
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
gradient_checker.double_grad_check_for_dygraph(
self.leaky_relu_wrapper, [x], y, x_init=x_arr, place=place
)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places = [fluid.CUDAPlace(0)]
for p in places:
self.func(p)
class TestELUDoubleGradCheck(unittest.TestCase):
def elu_wrapper(self, x):
return paddle.nn.functional.elu(x[0], alpha=0.2)
@prog_scope()
def func(self, place):
shape = [2, 4, 4, 4]
eps = 1e-6
alpha = 0.2
dtype = np.float64
SEED = 0
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.nn.functional.elu(x, alpha=alpha)
np.random.RandomState(SEED)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.elu_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestCELUDoubleGradCheck(unittest.TestCase):
def celu_wrapper(self, x):
return paddle.nn.functional.celu(x[0], alpha=0.2)
@prog_scope()
def func(self, place):
shape = [2, 4, 4, 4]
eps = 1e-6
alpha = 0.2
dtype = np.float64
SEED = 0
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = F.celu(x, alpha=alpha)
np.random.RandomState(SEED)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.celu_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestSqrtDoubleGradCheck(unittest.TestCase):
def sqrt_wrapper(self, x):
return paddle.sqrt(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0001
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.sqrt(x)
x_arr = np.random.uniform(0.1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
gradient_checker.double_grad_check_for_dygraph(
self.sqrt_wrapper, [x], y, x_init=x_arr, place=place
)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places = [fluid.CUDAPlace(0)]
for p in places:
self.func(p)
class TestRsqrtDoubleGradCheck(unittest.TestCase):
def rsqrt_wrapper(self, x):
return paddle.rsqrt(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0001
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.rsqrt(x)
x_arr = np.random.uniform(0.1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
gradient_checker.double_grad_check_for_dygraph(
self.rsqrt_wrapper, [x], y, x_init=x_arr, place=place
)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places = [fluid.CUDAPlace(0)]
for p in places:
self.func(p)
class TestSquareDoubleGradCheck(unittest.TestCase):
def square_wrapper(self, x):
return paddle.square(x[0])
@prog_scope()
def func(self, place):
# the shape of input variable should be clearly specified, not inlcude -1.
shape = [2, 3, 7, 9]
eps = 0.005
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.square(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.square_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestAbsDoubleGradCheck(unittest.TestCase):
@prog_scope()
def func(self, place):
# the shape of input variable should be clearly specified, not inlcude -1.
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.abs(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
# Because we set delta = 0.005 in calculating numeric gradient,
# if x is too small, the numeric gradient is inaccurate.
# we should avoid this
x_arr[np.abs(x_arr) < 0.005] = 0.02
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestLogDoubleGradCheck(unittest.TestCase):
def log_wrapper(self, x):
return paddle.log(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype)
x.persistable = True
y = paddle.log(x)
x_arr = np.random.uniform(0.1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.log_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestSinDoubleGradCheck(unittest.TestCase):
def sin_wrapper(self, x):
return paddle.sin(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.sin(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.sin_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestCosDoubleGradCheck(unittest.TestCase):
def cos_wrapper(self, x):
return paddle.cos(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.cos(x)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.cos_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestPowDoubleGradCheck1(unittest.TestCase):
def pow_wrapper(self, x):
return paddle.pow(x[0], 2)
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.pow(x, 2)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.pow_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestPowDoubleGradCheck2(unittest.TestCase):
def pow_wrapper(self, x):
return paddle.pow(x[0], 1)
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.pow(x, 1)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.double_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.double_grad_check_for_dygraph(
self.pow_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestSinTripleGradCheck(unittest.TestCase):
def sin_wrapper(self, x):
return paddle.sin(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.sin(x)
x_arr = np.random.random(shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(
self.sin_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestPowTripleGradCheck1(unittest.TestCase):
def pow_wrapper(self, x):
return paddle.pow(x[0], 1)
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.pow(x, 1)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(
self.pow_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestPowTripleGradCheck2(unittest.TestCase):
def pow_wrapper(self, x):
return paddle.pow(x[0], 2)
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.pow(x, 2)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(
self.pow_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestPowTripleGradCheck3(unittest.TestCase):
def pow_wrapper(self, x):
return paddle.pow(x[0], 4)
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 1e-6
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.pow(x, 4)
x_arr = np.random.uniform(-1, 1, shape).astype(dtype)
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(
self.pow_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
class TestCosTripleGradCheck(unittest.TestCase):
def cos_wrapper(self, x):
return paddle.cos(x[0])
@prog_scope()
def func(self, place):
shape = [2, 3, 7, 9]
eps = 0.0005
dtype = np.float64
x = layers.data('x', shape, False, dtype=dtype)
x.persistable = True
y = paddle.cos(x)
x_arr = np.random.random(shape).astype(dtype)
x_arr[np.abs(x_arr) < 0.005] = 0.002
gradient_checker.triple_grad_check(
[x], y, x_init=x_arr, place=place, eps=eps
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
gradient_checker.triple_grad_check_for_dygraph(
self.cos_wrapper, [x], y, x_init=x_arr, place=place
)
fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": False})
def test_grad(self):
paddle.enable_static()
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for p in places:
self.func(p)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "da8c935ede90baa0b8dd117511b641d6",
"timestamp": "",
"source": "github",
"line_count": 744,
"max_line_length": 82,
"avg_line_length": 31.521505376344088,
"alnum_prop": 0.565410199556541,
"repo_name": "PaddlePaddle/Paddle",
"id": "e0c40e20021119dbd01e2f43e9a52cfb1b695a3a",
"size": "24065",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_activation_nn_grad.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
} |
import requests
import os
from bottle import route, run, template, get, debug, static_file, request, response
import json
debug(True)
http_header = { "content-type": "application/json" }
# Core Routing
@route('/')
def jsontest():
return template('graph_display')
# return template('test')
@route('/static/<filename:path>')
def send_static(filename):
return static_file(filename, root='static')
# API Helpers
def build_response(endpoint, data):
api_endpoint = "http://localhost:19002/" + endpoint
response = requests.get(api_endpoint, params=data, headers=http_header)
try:
return response.json();
except ValueError:
return []
# Upload file
# @route('/upload', method='POST')
# def run_upload_file():
# upload = request.files.get('upload')
# name, ext = os.path.splitext(upload.filename)
# if ext not in ('.adm','.txt'):
# return 'File extension not allowed.'
# file_path = "./graphFiles/";
# if not os.path.exists(file_path):
# os.makedirs(file_path)
# file_path_name = os.path.join(file_path, upload.filename);
# print file_path_name;
# with open(file_path_name, 'w') as open_file:
# open_file.write(upload.file.read())
# return '<p>OK</p>'
# endpoint = "ddl";
# data = {'ddl': 'drop dataverse OriginalGraph if exists; create dataverse OriginalGraph; use dataverse OriginalGraph; create type GraphType as open{source_node: int32, label: string, target_node:{{int32}}, weight:{{double}}} \n create dataset Graph(GraphType) primary key source_node;'}
# build_response(endpoint, data);
# endpoint = "update";
# updateQuery = 'use dataverse OriginalGraph; load dataset Graph using localfs(("path"="localhost://'+file_path_name+'"),("format"="adm"))';
# data = {'update' : updateQuery}
# build_response(endpint, data);
# Log In
@route('/logIn', method = 'POST')
def run_log_in():
asterix_host = "localhost"
asterix_port = 19002
user_id = request.forms.get('user_id');
password = request.forms.get('password');
#print password;
#print user_id+','+password
query_statement = 'use dataverse Account; for $n in dataset AccountInfo where $n.user_id='+str(user_id)+' return $n';
query = {
'query': query_statement
};
http_header = {
'content-type': 'application/json'
}
query_url = "http://" + asterix_host + ":" + str(asterix_port) + "/query"
try:
response = requests.get(query_url, params=query, headers=http_header)
result = str(response.json()["results"][0]);
# print result;
resultArray = result.split(', ');
resultLabel = resultArray[1];
resultPassword = resultArray[2];
labelArray = resultLabel.split(':');
label = labelArray[1].replace('"', '').strip();
passwordArray = resultPassword.split(':');
correctPassword = passwordArray[1].replace('"', '').replace('}', '').strip();
if(password==correctPassword):
print "correct"
return '<p id="returnResult">1</p><p id="returnLabel">'+ label +'</p>'
else:
print "error"
return '<p id="returnResult">0</p>'
except (ConnectionError, HTTPError):
print "Encountered connection error; stopping execution"
sys.exit(1)
return True
# API Endpoints
@route('/query')
def run_asterix_query():
return (build_response("query", dict(request.query)))
@route('/query/status')
def run_asterix_query_status():
return (build_response("query/status", dict(request.query)))
@route('/query/result')
def run_asterix_query_result():
return (build_response("query/result", dict(request.query)))
@route('/ddl')
def run_asterix_ddl():
return (build_response("ddl", dict(request.query)))
@route('/update')
def run_asterix_update():
return (build_response("update", dict(request.query)))
# res = bootstrap.bootstrap()
run(host='localhost', port=8081, debug=True)
| {
"content_hash": "5004aff903225e43186575042c6db3c2",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 291,
"avg_line_length": 33.4,
"alnum_prop": 0.6304890219560878,
"repo_name": "zhiminJimmyXiang/Pregelix_Graph_UI",
"id": "c8264be31c3e130aa61bd7b3fe68c7fe54dc5221",
"size": "4027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run_graph_display.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4955"
},
{
"name": "JavaScript",
"bytes": "69449"
},
{
"name": "PHP",
"bytes": "25856"
},
{
"name": "Python",
"bytes": "142002"
}
],
"symlink_target": ""
} |
from rest_framework import test
from waldur_openstack.openstack.tests import fixtures
class TenantQuotasTest(test.APITransactionTestCase):
def setUp(self):
super(TenantQuotasTest, self).setUp()
self.fixture = fixtures.OpenStackFixture()
self.tenant = self.fixture.tenant
self.project = self.fixture.project
self.customer = self.fixture.customer
def test_quotas_for_tenant_are_created_for_project_and_customer(self):
self.assertEqual(self.project.quotas.get(name='vpc_cpu_count').usage, 0)
self.assertEqual(self.project.quotas.get(name='vpc_ram_size').usage, 0)
self.assertEqual(self.project.quotas.get(name='vpc_storage_size').usage, 0)
self.assertEqual(self.project.quotas.get(name='vpc_floating_ip_count').usage, 0)
self.assertEqual(self.project.quotas.get(name='vpc_instance_count').usage, 0)
self.assertEqual(self.customer.quotas.get(name='vpc_cpu_count').usage, 0)
self.assertEqual(self.customer.quotas.get(name='vpc_ram_size').usage, 0)
self.assertEqual(self.customer.quotas.get(name='vpc_storage_size').usage, 0)
self.assertEqual(
self.customer.quotas.get(name='vpc_floating_ip_count').usage, 0
)
self.assertEqual(self.customer.quotas.get(name='vpc_instance_count').usage, 0)
def test_quotas_for_tenant_are_increased_for_project_and_customer(self):
self.tenant.set_quota_usage('vcpu', 1)
self.tenant.set_quota_usage('ram', 1024)
self.tenant.set_quota_usage('storage', 102400)
self.tenant.set_quota_usage('floating_ip_count', 2)
self.tenant.set_quota_usage('instances', 1)
self.assertEqual(self.project.quotas.get(name='vpc_cpu_count').usage, 1)
self.assertEqual(self.project.quotas.get(name='vpc_ram_size').usage, 1024)
self.assertEqual(self.project.quotas.get(name='vpc_storage_size').usage, 102400)
self.assertEqual(self.project.quotas.get(name='vpc_floating_ip_count').usage, 2)
self.assertEqual(self.project.quotas.get(name='vpc_instance_count').usage, 1)
self.assertEqual(self.customer.quotas.get(name='vpc_cpu_count').usage, 1)
self.assertEqual(self.customer.quotas.get(name='vpc_ram_size').usage, 1024)
self.assertEqual(
self.customer.quotas.get(name='vpc_storage_size').usage, 102400
)
self.assertEqual(
self.customer.quotas.get(name='vpc_floating_ip_count').usage, 2
)
self.assertEqual(self.customer.quotas.get(name='vpc_instance_count').usage, 1)
| {
"content_hash": "a1bbcb812512bb194f0325ef34ab9b83",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 88,
"avg_line_length": 51.68,
"alnum_prop": 0.6845975232198143,
"repo_name": "opennode/waldur-mastermind",
"id": "6671a329eb2b8a2a9d2ca13e0c2945c27fe57c3c",
"size": "2584",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/waldur_openstack/openstack/tests/test_quotas.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4429"
},
{
"name": "Dockerfile",
"bytes": "6258"
},
{
"name": "HTML",
"bytes": "42329"
},
{
"name": "JavaScript",
"bytes": "729"
},
{
"name": "Python",
"bytes": "5520019"
},
{
"name": "Shell",
"bytes": "15429"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name='scrapy-tracker',
version='0.1.0',
url='https://github.com/vkastyniuk/scrapy-tracker',
description='A scrapy plugin to track item updates',
long_description=open('README.md').read(),
author='Viachaslau Kastyniuk',
maintainer='Viachaslau Kastyniuk',
maintainer_email='viachaslau.kastyniuk@gmail.com',
license='BSD',
packages=['scrapy_tracker'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Framework :: Scrapy',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
requires=['scrapy', 'six'],
extras_require={
'sql': [
'SQLAlchemy>=1.0.0',
'cachetools'
],
'redis': [
'redis'
]
}
)
| {
"content_hash": "a2022d055bc01f6a4761536e0f0ae322",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 79,
"avg_line_length": 33.42857142857143,
"alnum_prop": 0.5769230769230769,
"repo_name": "vkastyniuk/scrapy-tracker",
"id": "1adde34e6fd0c82d5918251f4455152db05dd055",
"size": "1426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "17489"
}
],
"symlink_target": ""
} |
from django import forms
from djmoney.forms.fields import MoneyField
from foobar.wallet import api as wallet_api
from django.utils.translation import ugettext as _
from .. import api
class CorrectionForm(forms.Form):
balance = MoneyField(label=_('Balance'), min_value=0)
comment = forms.CharField(label=_('Comment'),
max_length=128,
required=False)
class DepositForm(forms.Form):
def __init__(self, *args, **kwargs):
self.owner_id = kwargs.pop('owner_id')
super().__init__(*args, **kwargs)
deposit_or_withdrawal = MoneyField(max_digits=10, decimal_places=2)
comment = forms.CharField(max_length=128, required=False)
def clean_deposit_or_withdrawal(self):
data = self.cleaned_data['deposit_or_withdrawal']
wallet, balance = wallet_api.get_balance(self.owner_id)
if data.amount < 0 and -data > balance:
raise forms.ValidationError(_('Not enough funds'))
return data
class EditProfileForm(forms.Form):
name = forms.CharField(label=_('Account Name'), max_length=128)
email = forms.EmailField(label=_('E-mail'))
def __init__(self, *args, **kwargs):
self.account = kwargs.pop('account')
super().__init__(*args, **kwargs)
def clean_email(self):
email = self.cleaned_data.get('email')
account = api.get_account(email=email)
if account is not None and account.id != self.account.id:
raise forms.ValidationError(_('This e-mail is already in use.'))
return email
| {
"content_hash": "79f549a2b03bf39296f5f2dd96e78cbb",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 76,
"avg_line_length": 35.95454545454545,
"alnum_prop": 0.634007585335019,
"repo_name": "uppsaladatavetare/foobar-api",
"id": "d020908fa84f1f3387610d3be871490019ebfe59",
"size": "1582",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/foobar/views/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3317"
},
{
"name": "HTML",
"bytes": "10880"
},
{
"name": "JavaScript",
"bytes": "10604"
},
{
"name": "Makefile",
"bytes": "796"
},
{
"name": "Python",
"bytes": "318730"
}
],
"symlink_target": ""
} |
"""
FILE: queue_samples_hello_world_async.py
DESCRIPTION:
These samples demonstrate common scenarios like instantiating a client,
creating a queue, and sending and receiving messages.
USAGE:
python queue_samples_hello_world_async.py
Set the environment variables with your own values before running the sample:
1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account
"""
import asyncio
import os
class QueueHelloWorldSamplesAsync(object):
connection_string = os.getenv("AZURE_STORAGE_CONNECTION_STRING")
async def create_client_with_connection_string_async(self):
# Instantiate the QueueServiceClient from a connection string
from azure.storage.queue.aio import QueueServiceClient
queue_service = QueueServiceClient.from_connection_string(conn_str=self.connection_string)
# Get queue service properties
async with queue_service:
properties = await queue_service.get_service_properties()
async def queue_and_messages_example_async(self):
# Instantiate the QueueClient from a connection string
from azure.storage.queue.aio import QueueClient
queue = QueueClient.from_connection_string(conn_str=self.connection_string, queue_name="myqueue")
async with queue:
# Create the queue
# [START async_create_queue]
await queue.create_queue()
# [END async_create_queue]
try:
# Send messages
await asyncio.gather(
queue.send_message("I'm using queues!"),
queue.send_message("This is my second message")
)
# Receive the messages
response = queue.receive_messages(messages_per_page=2)
# Print the content of the messages
async for message in response:
print(message.content)
finally:
# [START async_delete_queue]
await queue.delete_queue()
# [END async_delete_queue]
async def main():
sample = QueueHelloWorldSamplesAsync()
await sample.create_client_with_connection_string_async()
await sample.queue_and_messages_example_async()
if __name__ == '__main__':
asyncio.run(main())
| {
"content_hash": "2b10b773c5f8f2624b8906a8184bd806",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 105,
"avg_line_length": 33.31428571428572,
"alnum_prop": 0.6457975986277873,
"repo_name": "Azure/azure-sdk-for-python",
"id": "bc33a36a29a97c12bf2bc7532b362e045e441385",
"size": "2660",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/storage/azure-storage-queue/samples/queue_samples_hello_world_async.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from helper import greeting
if __name__ == "__main__":
x = 'goodbye'
greeting(x) | {
"content_hash": "c3acea24c4de47c7fe34d8ac1c3b1cdb",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 27,
"avg_line_length": 17.8,
"alnum_prop": 0.5730337078651685,
"repo_name": "Ansist/cs3240-labdemo",
"id": "c508fa3fa4f025f38f30554d4953ebf2360c591a",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "goodbye.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "299"
}
],
"symlink_target": ""
} |
from ConfigParser import ConfigParser
import errno
import logging
import os
class AirflowConfigException(Exception):
pass
defaults = {
'core': {
'unit_test_mode': False,
'parallelism': 32,
'load_examples': True,
'plugins_folder': None,
},
'webserver': {
'base_url': 'http://localhost:8080',
'web_server_host': '0.0.0.0',
'web_server_port': '8080',
'authenticate': False,
'demo_mode': False,
'secret_key': 'airflowified',
},
'scheduler': {
'statsd_on': False,
'statsd_host': 'localhost',
'statsd_port': 8125,
'statsd_prefix': 'airflow',
'job_heartbeat_sec': 5,
'scheduler_heartbeat_sec': 60,
'authenticate': False,
},
'celery': {
'default_queue': 'default',
},
}
DEFAULT_CONFIG = """\
[core]
# The home folder for airflow, default is ~/airflow
airflow_home = {AIRFLOW_HOME}
# The folder where you airflow pipelines live, most likely a
# subfolder in a code repository
dags_folder = {AIRFLOW_HOME}/dags
# The folder where airflow should store its log files
base_log_folder = {AIRFLOW_HOME}/logs
# The executor class that airflow should use. Choices include
# SequentialExecutor, LocalExecutor, CeleryExecutor
executor = SequentialExecutor
# The SqlAlchemy connection string to the metadata database.
# SqlAlchemy supports many different database engine, more information
# their website
sql_alchemy_conn = sqlite:///{AIRFLOW_HOME}/airflow.db
# The amount of parallelism as a setting to the executor. This defines
# the max number of task instances that should run simultaneously
# on this airflow installation
parallelism = 32
# Whether to load the examples that ship with Airflow. It's good to
# get started, but you probably want to set this to False in a production
# environment
load_examples = True
# Where your Airflow plugins are stored
plugins_folder = {AIRFLOW_HOME}/plugins
[webserver]
# The base url of your website as airflow cannot guess what domain or
# cname you are using. This is use in automated emails that
# airflow sends to point links to the right web server
base_url = http://localhost:8080
# The ip specified when starting the web server
web_server_host = 0.0.0.0
# The port on which to run the web server
web_server_port = 8080
# Secret key used to run your flask app
secret_key = temporary_key
[smtp]
# If you want airflow to send emails on retries, failure, and you want to
# the airflow.utils.send_email function, you have to configure an smtp
# server here
smtp_host = localhost
smtp_user = airflow
smtp_port = 25
smtp_password = airflow
smtp_mail_from = airflow@airflow.com
[celery]
# This section only applies if you are using the CeleryExecutor in
# [core] section above
# The app name that will be used by celery
celery_app_name = airflow.executors.celery_executor
# The concurrency that will be used when starting workers with the
# "airflow worker" command. This defines the number of task instances that
# a worker will take, so size up your workers based on the resources on
# your worker box and the nature of your tasks
celeryd_concurrency = 16
# When you start an airflow worker, airflow starts a tiny web server
# subprocess to serve the workers local log files to the airflow main
# web server, who then builds pages and sends them to users. This defines
# the port on which the logs are served. It needs to be unused, and open
# visible from the main web server to connect into the workers.
worker_log_server_port = 8793
# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally
# a sqlalchemy database. Refer to the Celery documentation for more
# information.
broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
# Another key Celery setting
celery_result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start
# it `airflow flower`. This defines the port that Celery Flower runs on
flower_port = 8383
# Default queue that tasks get assigned to and that worker listen on.
default_queue = default
[scheduler]
# Task instances listen for external kill signal (when you clear tasks
# from the CLI or the UI), this defines the frequency at which they should
# listen (in seconds).
job_heartbeat_sec = 5
# The scheduler constantly tries to trigger new tasks (look at the
# scheduler section in the docs for more information). This defines
# how often the scheduler should run (in seconds).
scheduler_heartbeat_sec = 5
# Statsd (https://github.com/etsy/statsd) integration settings
# statsd_on = False
# statsd_host = localhost
# statsd_port = 8125
# statsd_prefix = airflow
"""
TEST_CONFIG = """\
[core]
airflow_home = {AIRFLOW_HOME}
dags_folder = {AIRFLOW_HOME}/dags
base_log_folder = {AIRFLOW_HOME}/logs
executor = SequentialExecutor
sql_alchemy_conn = sqlite:///{AIRFLOW_HOME}/unittests.db
unit_test_mode = True
load_examples = True
[webserver]
base_url = http://localhost:8080
web_server_host = 0.0.0.0
web_server_port = 8080
[smtp]
smtp_host = localhost
smtp_user = airflow
smtp_port = 25
smtp_password = airflow
smtp_mail_from = airflow@airflow.com
[celery]
celery_app_name = airflow.executors.celery_executor
celeryd_concurrency = 16
worker_log_server_port = 8793
broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
celery_result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
flower_port = 5555
default_queue = default
[scheduler]
job_heartbeat_sec = 1
scheduler_heartbeat_sec = 5
authenticate = true
"""
class ConfigParserWithDefaults(ConfigParser):
def __init__(self, defaults, *args, **kwargs):
self.defaults = defaults
ConfigParser.__init__(self, *args, **kwargs)
def get(self, section, key):
section = str(section).lower()
key = str(key).lower()
d = self.defaults
try:
return ConfigParser.get(self, section, key)
except:
if section not in d or key not in d[section]:
raise AirflowConfigException(
"section/key [{section}/{key}] not found "
"in config".format(**locals()))
else:
return d[section][key]
def getboolean(self, section, key):
val = str(self.get(section, key)).lower().strip()
if '#' in val:
val = val.split('#')[0].strip()
if val == "true":
return True
elif val == "false":
return False
else:
raise AirflowConfigException("Not a boolean.")
def getint(self, section, key):
return int(self.get(section, key))
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise AirflowConfigException('Had trouble creating a directory')
"""
Setting AIRFLOW_HOME and AIRFLOW_CONFIG from environment variables, using
"~/airflow" and "~/airflow/airflow.cfg" respectively as defaults.
"""
if 'AIRFLOW_HOME' not in os.environ:
AIRFLOW_HOME = os.path.expanduser('~/airflow')
else:
AIRFLOW_HOME = os.path.expanduser(os.environ['AIRFLOW_HOME'])
mkdir_p(AIRFLOW_HOME)
if 'AIRFLOW_CONFIG' not in os.environ:
if os.path.isfile(os.path.expanduser('~/airflow.cfg')):
AIRFLOW_CONFIG = os.path.expanduser('~/airflow.cfg')
else:
AIRFLOW_CONFIG = AIRFLOW_HOME + '/airflow.cfg'
else:
AIRFLOW_CONFIG = os.environ['AIRFLOW_CONFIG']
if not os.path.isfile(AIRFLOW_CONFIG):
"""
These configuration options are used to generate a default configuration
when it is missing. The right way to change your configuration is to alter
your configuration file, not this code.
"""
logging.info("Creating new config file in: " + AIRFLOW_CONFIG)
f = open(AIRFLOW_CONFIG, 'w')
f.write(DEFAULT_CONFIG.format(**locals()))
f.close()
TEST_CONFIG_FILE = AIRFLOW_HOME + '/unittests.cfg'
if not os.path.isfile(TEST_CONFIG_FILE):
logging.info("Creating new config file in: " + TEST_CONFIG_FILE)
f = open(TEST_CONFIG_FILE, 'w')
f.write(TEST_CONFIG.format(**locals()))
f.close()
logging.info("Reading the config from " + AIRFLOW_CONFIG)
def test_mode():
conf = ConfigParserWithDefaults(defaults)
conf.read(TEST_CONFIG)
conf = ConfigParserWithDefaults(defaults)
conf.read(AIRFLOW_CONFIG)
| {
"content_hash": "7291cc26feddb487d496e1e0e68a5279",
"timestamp": "",
"source": "github",
"line_count": 281,
"max_line_length": 78,
"avg_line_length": 30.177935943060497,
"alnum_prop": 0.69375,
"repo_name": "robbwagoner/airflow",
"id": "94bcb0e30fb7eb198c41e127c4c541934054fd01",
"size": "8480",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "airflow/configuration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36075"
},
{
"name": "HTML",
"bytes": "86636"
},
{
"name": "JavaScript",
"bytes": "894248"
},
{
"name": "Python",
"bytes": "374332"
},
{
"name": "Shell",
"bytes": "365"
}
],
"symlink_target": ""
} |
class Command(object):
"""Defines constants for the standard WebDriver commands.
While these constants have no meaning in and of themselves, they are
used to marshal commands through a service that implements WebDriver's
remote wire protocol:
http://code.google.com/p/selenium/wiki/JsonWireProtocol
"""
# Keep in sync with org.openqa.selenium.remote.DriverCommand
NEW_SESSION = "newSession"
DELETE_SESSION = "deleteSession"
CLOSE = "close"
QUIT = "quit"
GET = "get"
GO_BACK = "goBack"
GO_FORWARD = "goForward"
REFRESH = "refresh"
ADD_COOKIE = "addCookie"
GET_COOKIE = "getCookie"
GET_ALL_COOKIES = "getCookies"
DELETE_COOKIE = "deleteCookie"
DELETE_ALL_COOKIES = "deleteAllCookies"
FIND_ELEMENT = "findElement"
FIND_ELEMENTS = "findElements"
FIND_CHILD_ELEMENT = "findChildElement"
FIND_CHILD_ELEMENTS = "findChildElements"
CLEAR_ELEMENT = "clearElement"
CLICK_ELEMENT = "clickElement"
HOVER_OVER_ELEMENT = "hoverOverElement"
SEND_KEYS_TO_ELEMENT = "sendKeysToElement"
SEND_MODIFIER_KEY_TO_ACTIVE_ELEMENT = "sendModifierKeyToActiveElement"
SUBMIT_ELEMENT = "submitElement"
TOGGLE_ELEMENT = "toggleElement"
GET_CURRENT_WINDOW_HANDLE = "getCurrentWindowHandle"
GET_WINDOW_HANDLES = "getWindowHandles"
SWITCH_TO_WINDOW = "switchToWindow"
SWITCH_TO_FRAME = "switchToFrame"
GET_ACTIVE_ELEMENT = "getActiveElement"
GET_CURRENT_URL = "getCurrentUrl"
GET_PAGE_SOURCE = "getPageSource"
GET_TITLE = "getTitle"
EXECUTE_SCRIPT = "executeScript"
GET_SPEED = "getSpeed"
SET_SPEED = "setSpeed"
SET_BROWSER_VISIBLE = "setBrowserVisible"
IS_BROWSER_VISIBLE = "isBrowserVisible"
GET_ELEMENT_TEXT = "getElementText"
GET_ELEMENT_VALUE = "getElementValue"
GET_ELEMENT_TAG_NAME = "getElementTagName"
SET_ELEMENT_SELECTED = "setElementSelected"
DRAG_ELEMENT = "dragElement"
IS_ELEMENT_SELECTED = "isElementSelected"
IS_ELEMENT_ENABLED = "isElementEnabled"
IS_ELEMENT_DISPLAYED = "isElementDisplayed"
GET_ELEMENT_LOCATION = "getElementLocation"
GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW = (
"getElementLocationOnceScrolledIntoView")
GET_ELEMENT_SIZE = "getElementSize"
GET_ELEMENT_ATTRIBUTE = "getElementAttribute"
GET_ELEMENT_VALUE_OF_CSS_PROPERTY = "getElementValueOfCssProperty"
ELEMENT_EQUALS = "elementEquals"
SCREENSHOT = "screenshot"
IMPLICIT_WAIT = "implicitlyWait"
EXECUTE_ASYNC_SCRIPT = "executeAsyncScript"
SET_SCRIPT_TIMEOUT = "setScriptTimeout"
#Alerts
DISMISS_ALERT = "dismissAlert"
ACCEPT_ALERT = "acceptAlert"
SET_ALERT_VALUE = "setAlertValue"
GET_ALERT_TEXT = "getAlertText"
# Advanced user interactions
CLICK = "mouseClick";
DOUBLE_CLICK = "mouseDoubleClick";
MOUSE_DOWN = "mouseButtonDown";
MOUSE_UP = "mouseButtonUp";
MOVE_TO = "mouseMoveTo";
| {
"content_hash": "62191698ee810df749772bc69daa6dc2",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 74,
"avg_line_length": 36.8875,
"alnum_prop": 0.7017960013554727,
"repo_name": "hali4ka/robotframework-selenium2library",
"id": "6a0f3ee52b226ae774a6687436737fbc8f4545d2",
"size": "3565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Selenium2Library/lib/selenium-2.8.1/py/selenium/webdriver/remote/command.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "9719"
},
{
"name": "Python",
"bytes": "514078"
},
{
"name": "Ruby",
"bytes": "25068"
},
{
"name": "Shell",
"bytes": "82"
}
],
"symlink_target": ""
} |
class ReduceContext:
def __init__(self,key,iterator):
self.key = key
self.value = None
self.iterator = iterator;
return
def loadNextValue(self):
self.key = self.keysList.pop();
self.value = self.valuesList.pop();
return
| {
"content_hash": "f06a2befc4e04d10735385184e106ccf",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 43,
"avg_line_length": 22.076923076923077,
"alnum_prop": 0.5679442508710801,
"repo_name": "neosky2142/PyMR",
"id": "098a4be270648b8b3d1c11fc9da18ed4fb2f6bd1",
"size": "287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ReduceContext.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37599"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('entities', '0002_auto_20150316_1433'),
]
operations = [
migrations.AlterField(
model_name='post',
name='owner',
field=models.ForeignKey(related_name='post_owner', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
| {
"content_hash": "6bdb891a841f78c239d8bdeb9fc10399",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 92,
"avg_line_length": 24.3,
"alnum_prop": 0.6193415637860082,
"repo_name": "fabteam1/komsukomsuhuhu",
"id": "4945fb7345435aabe35eb7c795ffed6695a3a2b7",
"size": "510",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "komsukomsuhuu/entities/migrations/0003_auto_20150319_1332.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "30298"
},
{
"name": "HTML",
"bytes": "111904"
},
{
"name": "JavaScript",
"bytes": "39265"
},
{
"name": "Python",
"bytes": "64768"
}
],
"symlink_target": ""
} |
"""Tests for samba.samba3."""
from samba.samba3 import (
Registry,
WinsDatabase,
IdmapDatabase,
)
from samba.samba3 import passdb
from samba.samba3 import param as s3param
from samba.tests import TestCase, TestCaseInTempDir
from samba.dcerpc.security import dom_sid
import os
for p in [ "../../../../../testdata/samba3", "../../../../testdata/samba3" ]:
DATADIR = os.path.join(os.path.dirname(__file__), p)
if os.path.exists(DATADIR):
break
class RegistryTestCase(TestCase):
def setUp(self):
super(RegistryTestCase, self).setUp()
self.registry = Registry(os.path.join(DATADIR, "registry"))
def tearDown(self):
self.registry.close()
super(RegistryTestCase, self).tearDown()
def test_length(self):
self.assertEquals(28, len(self.registry))
def test_keys(self):
self.assertTrue("HKLM" in self.registry.keys())
def test_subkeys(self):
self.assertEquals(["SOFTWARE", "SYSTEM"], self.registry.subkeys("HKLM"))
def test_values(self):
self.assertEquals({'DisplayName': (1L, 'E\x00v\x00e\x00n\x00t\x00 \x00L\x00o\x00g\x00\x00\x00'),
'ErrorControl': (4L, '\x01\x00\x00\x00')},
self.registry.values("HKLM/SYSTEM/CURRENTCONTROLSET/SERVICES/EVENTLOG"))
class PassdbTestCase(TestCaseInTempDir):
def setUp(self):
super(PassdbTestCase, self).setUp()
os.system("cp -r %s %s" % (DATADIR, self.tempdir))
datadir = os.path.join(self.tempdir, "samba3")
self.lp = s3param.get_context()
self.lp.load(os.path.join(datadir, "smb.conf"))
self.lp.set("private dir", datadir)
self.lp.set("state directory", datadir)
self.lp.set("lock directory", datadir)
self.lp.set("cache directory", datadir)
passdb.set_secrets_dir(datadir)
self.pdb = passdb.PDB("tdbsam")
def tearDown(self):
self.lp = []
self.pdb = []
os.system("rm -rf %s" % os.path.join(self.tempdir, "samba3"))
super(PassdbTestCase, self).tearDown()
def test_param(self):
self.assertEquals("BEDWYR", self.lp.get("netbios name"))
self.assertEquals("SAMBA", self.lp.get("workgroup"))
self.assertEquals("USER", self.lp.get("security"))
def test_policy(self):
policy = self.pdb.get_account_policy()
self.assertEquals(0, policy['bad lockout attempt'])
self.assertEquals(-1, policy['disconnect time'])
self.assertEquals(0, policy['lockout duration'])
self.assertEquals(999999999, policy['maximum password age'])
self.assertEquals(0, policy['minimum password age'])
self.assertEquals(5, policy['min password length'])
self.assertEquals(0, policy['password history'])
self.assertEquals(0, policy['refuse machine password change'])
self.assertEquals(0, policy['reset count minutes'])
self.assertEquals(0, policy['user must logon to change password'])
def test_get_sid(self):
domain_sid = passdb.get_global_sam_sid()
self.assertEquals(dom_sid("S-1-5-21-2470180966-3899876309-2637894779"), domain_sid)
def test_usernames(self):
userlist = self.pdb.search_users(0)
self.assertEquals(3, len(userlist))
def test_getuser(self):
user = self.pdb.getsampwnam("root")
self.assertEquals(16, user.acct_ctrl)
self.assertEquals("", user.acct_desc)
self.assertEquals(0, user.bad_password_count)
self.assertEquals(0, user.bad_password_time)
self.assertEquals(0, user.code_page)
self.assertEquals(0, user.country_code)
self.assertEquals("", user.dir_drive)
self.assertEquals("BEDWYR", user.domain)
self.assertEquals("root", user.full_name)
self.assertEquals(dom_sid('S-1-5-21-2470180966-3899876309-2637894779-513'), user.group_sid)
self.assertEquals("\\\\BEDWYR\\root", user.home_dir)
self.assertEquals([-1 for i in range(21)], user.hours)
self.assertEquals(21, user.hours_len)
self.assertEquals(9223372036854775807, user.kickoff_time)
self.assertEquals(None, user.lanman_passwd)
self.assertEquals(9223372036854775807, user.logoff_time)
self.assertEquals(0, user.logon_count)
self.assertEquals(168, user.logon_divs)
self.assertEquals("", user.logon_script)
self.assertEquals(0, user.logon_time)
self.assertEquals("", user.munged_dial)
self.assertEquals('\x87\x8d\x80\x14`l\xda)gzD\xef\xa15?\xc7', user.nt_passwd)
self.assertEquals("", user.nt_username)
self.assertEquals(1125418267, user.pass_can_change_time)
self.assertEquals(1125418267, user.pass_last_set_time)
self.assertEquals(2125418266, user.pass_must_change_time)
self.assertEquals(None, user.plaintext_passwd)
self.assertEquals("\\\\BEDWYR\\root\\profile", user.profile_path)
self.assertEquals(None, user.pw_history)
self.assertEquals(dom_sid("S-1-5-21-2470180966-3899876309-2637894779-1000"), user.user_sid)
self.assertEquals("root", user.username)
self.assertEquals("", user.workstations)
def test_group_length(self):
grouplist = self.pdb.enum_group_mapping()
self.assertEquals(13, len(grouplist))
def test_get_group(self):
group = self.pdb.getgrsid(dom_sid("S-1-5-32-544"))
self.assertEquals("Administrators", group.nt_name)
self.assertEquals(-1, group.gid)
self.assertEquals(5, group.sid_name_use)
def test_groupsids(self):
grouplist = self.pdb.enum_group_mapping()
sids = []
for g in grouplist:
sids.append(str(g.sid))
self.assertTrue("S-1-5-32-544" in sids)
self.assertTrue("S-1-5-32-545" in sids)
self.assertTrue("S-1-5-32-546" in sids)
self.assertTrue("S-1-5-32-548" in sids)
self.assertTrue("S-1-5-32-549" in sids)
self.assertTrue("S-1-5-32-550" in sids)
self.assertTrue("S-1-5-32-551" in sids)
def test_alias_length(self):
aliaslist = self.pdb.search_aliases()
self.assertEquals(1, len(aliaslist))
self.assertEquals("Jelmers NT Group", aliaslist[0]['account_name'])
class WinsDatabaseTestCase(TestCase):
def setUp(self):
super(WinsDatabaseTestCase, self).setUp()
self.winsdb = WinsDatabase(os.path.join(DATADIR, "wins.dat"))
def test_length(self):
self.assertEquals(22, len(self.winsdb))
def test_first_entry(self):
self.assertEqual((1124185120, ["192.168.1.5"], 0x64), self.winsdb["ADMINISTRATOR#03"])
def tearDown(self):
self.winsdb.close()
super(WinsDatabaseTestCase, self).tearDown()
class IdmapDbTestCase(TestCase):
def setUp(self):
super(IdmapDbTestCase, self).setUp()
self.idmapdb = IdmapDatabase(os.path.join(DATADIR,
"winbindd_idmap"))
def test_user_hwm(self):
self.assertEquals(10000, self.idmapdb.get_user_hwm())
def test_group_hwm(self):
self.assertEquals(10002, self.idmapdb.get_group_hwm())
def test_uids(self):
self.assertEquals(1, len(list(self.idmapdb.uids())))
def test_gids(self):
self.assertEquals(3, len(list(self.idmapdb.gids())))
def test_get_user_sid(self):
self.assertEquals("S-1-5-21-58189338-3053988021-627566699-501", self.idmapdb.get_user_sid(65534))
def test_get_group_sid(self):
self.assertEquals("S-1-5-21-2447931902-1787058256-3961074038-3007", self.idmapdb.get_group_sid(10001))
def tearDown(self):
self.idmapdb.close()
super(IdmapDbTestCase, self).tearDown()
| {
"content_hash": "f6d2db6976899212b185a6e581ac10f2",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 110,
"avg_line_length": 37.96551724137931,
"alnum_prop": 0.6444790450240041,
"repo_name": "nmercier/linux-cross-gcc",
"id": "126e133f4c1b196740d6f77c31dbb91d66fe1c45",
"size": "8434",
"binary": false,
"copies": "35",
"ref": "refs/heads/master",
"path": "linux/lib/python2.7/dist-packages/samba/tests/samba3.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1047092"
},
{
"name": "C++",
"bytes": "151335"
},
{
"name": "Makefile",
"bytes": "82796"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "29123266"
},
{
"name": "Shell",
"bytes": "14668"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from reviewer.models import ReviewGroup, ReviewLog
@admin.register(ReviewGroup)
class ReviewGroupAdmin(admin.ModelAdmin):
list_display = ['organization', 'group', 'visibility_level']
@admin.register(ReviewLog)
class ReviewGroupAdmin(admin.ModelAdmin):
list_display = ['content_type', 'object_id', 'subject',
'reviewer',
'created_at',
'visibility_level',
'log_type', 'message']
search_fields = ['subject', 'object_id', 'message']
| {
"content_hash": "1b7bd86c72de1b8c00a545288690f569",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 64,
"avg_line_length": 34.8125,
"alnum_prop": 0.6283662477558348,
"repo_name": "MoveOnOrg/eventroller",
"id": "c4aa30a5d6aecc91817684a21391d29347f72d5a",
"size": "557",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "reviewer/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "609"
},
{
"name": "HTML",
"bytes": "7391"
},
{
"name": "JavaScript",
"bytes": "24435"
},
{
"name": "Python",
"bytes": "182608"
}
],
"symlink_target": ""
} |
"""
HTTP handeler to serve specific endpoint request like
http://myserver:9004/endpoints/mymodel
For how generic endpoints requests is served look
at endpoints_handler.py
"""
import json
import logging
import shutil
from tabpy.tabpy_server.common.util import format_exception
from tabpy.tabpy_server.handlers import ManagementHandler
from tabpy.tabpy_server.handlers.base_handler import STAGING_THREAD
from tabpy.tabpy_server.management.state import get_query_object_path
from tabpy.tabpy_server.psws.callbacks import on_state_change
from tabpy.tabpy_server.handlers.util import AuthErrorStates
from tornado import gen
class EndpointHandler(ManagementHandler):
def initialize(self, app):
super(EndpointHandler, self).initialize(app)
def get(self, endpoint_name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing GET for /endpoints/{endpoint_name}")
self._add_CORS_header()
if not endpoint_name:
self.write(json.dumps(self.tabpy_state.get_endpoints()))
else:
if endpoint_name in self.tabpy_state.get_endpoints():
self.write(json.dumps(self.tabpy_state.get_endpoints()[endpoint_name]))
else:
self.error_out(
404,
"Unknown endpoint",
info=f"Endpoint {endpoint_name} is not found",
)
@gen.coroutine
def put(self, name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing PUT for /endpoints/{name}")
try:
if not self.request.body:
self.error_out(400, "Input body cannot be empty")
self.finish()
return
try:
request_data = json.loads(self.request.body.decode("utf-8"))
except BaseException as ex:
self.error_out(
400, log_message="Failed to decode input body", info=str(ex)
)
self.finish()
return
# check if endpoint exists
endpoints = self.tabpy_state.get_endpoints(name)
if len(endpoints) == 0:
self.error_out(404, f"endpoint {name} does not exist.")
self.finish()
return
new_version = int(endpoints[name]["version"]) + 1
self.logger.log(logging.INFO, f"Endpoint info: {request_data}")
err_msg = yield self._add_or_update_endpoint(
"update", name, new_version, request_data
)
if err_msg:
self.error_out(400, err_msg)
self.finish()
else:
self.write(self.tabpy_state.get_endpoints(name))
self.finish()
except Exception as e:
err_msg = format_exception(e, "update_endpoint")
self.error_out(500, err_msg)
self.finish()
@gen.coroutine
def delete(self, name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing DELETE for /endpoints/{name}")
try:
endpoints = self.tabpy_state.get_endpoints(name)
if len(endpoints) == 0:
self.error_out(404, f"endpoint {name} does not exist.")
self.finish()
return
# update state
try:
endpoint_info = self.tabpy_state.delete_endpoint(name)
except Exception as e:
self.error_out(400, f"Error when removing endpoint: {e.message}")
self.finish()
return
# delete files
if endpoint_info["type"] != "alias":
delete_path = get_query_object_path(
self.settings["state_file_path"], name, None
)
try:
yield self._delete_po_future(delete_path)
except Exception as e:
self.error_out(400, f"Error while deleting: {e}")
self.finish()
return
self.set_status(204)
self.finish()
except Exception as e:
err_msg = format_exception(e, "delete endpoint")
self.error_out(500, err_msg)
self.finish()
on_state_change(
self.settings, self.tabpy_state, self.python_service, self.logger
)
@gen.coroutine
def _delete_po_future(self, delete_path):
future = STAGING_THREAD.submit(shutil.rmtree, delete_path)
ret = yield future
raise gen.Return(ret)
| {
"content_hash": "463b853ee9a582aa883cf345c62d4a96",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 88,
"avg_line_length": 34.690140845070424,
"alnum_prop": 0.5617133576938693,
"repo_name": "tableau/TabPy",
"id": "583b17629d82ddfb31de07b8e3d1cf277fe462e4",
"size": "4926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tabpy/tabpy_server/handlers/endpoint_handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "258"
},
{
"name": "HTML",
"bytes": "1548"
},
{
"name": "Procfile",
"bytes": "131"
},
{
"name": "Python",
"bytes": "267988"
},
{
"name": "Shell",
"bytes": "490"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class UirevisionValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="uirevision", parent_name="funnel", **kwargs):
super(UirevisionValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| {
"content_hash": "8ecb6967536046c9103d88a215161ea1",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 81,
"avg_line_length": 36.54545454545455,
"alnum_prop": 0.6268656716417911,
"repo_name": "plotly/plotly.py",
"id": "8a0c44d507a9d884e7dfbca96ab978522c20b447",
"size": "402",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/funnel/_uirevision.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
"""
Set of utility function used throughout the app
"""
from uuid import uuid4
import hashlib
import re
from google.appengine.ext import ndb #pylint: disable=import-error
import config
from pydash import _
import itertools, operator
EMAIL_REGEX = r'^[-!#$%&\'*+\\.\/0-9=?A-Za-z^_`{|}~]+@([-0-9A-Za-z]+\.)+([0-9A-Za-z]){2,4}$'
# see: https://gist.github.com/dperini/729294 (PYTHON PORT)
URL_REGEX = r'^(?:(?:https?|ftp)://)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*(?:\.(?:[a-z\u00a1-\uffff]{2,})))(?::\d{2,5})?(?:/\S*)?$'
def uuid():
"""Generates random UUID used as user token for verification, reseting password etc.
Returns:
string: 32 characters long string
"""
return uuid4().hex
def create_name_from_email(email):
"""Function tries to recreate real name from email address
Examples:
>>> create_name_from_email('bobby.tables@email.com')
Bobby Tables
>>> create_name_from_email('bobby-tables@email.com')
Bobby Tables
Args:
email (string): Email address
Returns:
string: Hopefully user's real name
"""
return re.sub(r'_+|-+|\.+|\++', ' ', email.split('@')[0]).title()
def password_hash(password):
"""Hashes given plain text password with sha256 encryption
Hashing is salted with salt configured by admin, stored in >>> model.Config
Args:
password (string): Plain text password
Returns:
string: hashed password, 64 characters
"""
sha = hashlib.sha256()
sha.update(password.encode('utf-8'))
sha.update(config.CONFIG_DB.salt)
return sha.hexdigest()
def list_to_dict(input_list):
"""Creates dictionary with keys from list values
This function is primarily useful for converting passed data from Angular checkboxes,
since angular ng-model can't return list of checked group of checkboxes, instead
it returns something like {'a': True, 'b': True} for each checkbox
Example:
>>> list_to_dict(['a', 'b'])
{'a': True, 'b': True}
Args:
input_list (list): List of any type
Returns:
dict: Dict with 'True' values
"""
return _.zip_object(input_list, _.map(input_list, _.constant(True)))
def dict_to_list(input_dict):
"""Creates list from dictionary with true booloean values
This function is primarily useful for converting passed data from Angular checkboxes,
since angular ng-model can't return list of checked group of checkboxes, instead
it returns something like {'a': True, 'b': True} for each checkbox
Example:
>>> dict_to_list({'a': True, 'b': True, 'c': False})
['a', 'b']
Args:
input_dict (dict): Dict with boolean values
Returns:
list: list of truthful values
"""
return _.keys(_.pick(input_dict, _.identity))
def constrain_string(string, minlen, maxlen):
"""Validation function constrains minimal and maximal lengths of string.
Args:
string (string): String to be checked
minlen (int): Minimal length
maxlen (int): Maximal length
Returns:
string: Returns given string
Raises:
ValueError: If string doesn't fit into min/max constrains
"""
if len(string) < minlen:
raise ValueError('Input need to be at least %s characters long' % minlen)
elif len(string) > maxlen:
raise ValueError('Input need to be maximum %s characters long' % maxlen)
return string
def constrain_regex(string, regex):
"""Validation function checks validity of string for given regex.
Args:
string (string): String to be checked
regex (string): Regular expression
Returns:
string: Returns given string
Raises:
ValueError: If string doesn't match regex
"""
regex_email = re.compile(regex, re.IGNORECASE)
if not regex_email.match(string):
raise ValueError('Incorrect regex format')
return string
def create_validator(lengths=None, regex='', required=True):
"""This is factory function, which creates validator functions, which
will then validate passed strings according to lengths or regex set at creation time
Args:
lengths (list): list of exact length 2. e.g [3, 7]
indicates that string should be between 3 and 7 charactes
regex (string): Regular expression
required (bool): Wheter empty value '' should be accepted as valid,
ignoring other constrains
Returns:
function: Function, which will be used for validating input
"""
def validator_function(value, prop):
"""Function validates input against constraints given from closure function
These functions are primarily used as ndb.Property validators
Args:
value (string): input value to be validated
prop (string): ndb.Property name, which is validated
Returns:
string: Returns original string, if valid
Raises:
ValueError: If input isn't valid
"""
# when we compare ndb.Property with equal operator e.g User.name == 'abc' it
# passes arguments to validator in different order than as when e.g putting data,
# hence the following parameters switch
if isinstance(value, ndb.Property):
value = prop
if not required and value == '':
return ''
if regex:
return constrain_regex(value, regex)
return constrain_string(value, lengths[0], lengths[1])
return validator_function
###############################################################################
# Request Parameters
###############################################################################
def param(name, cast=None):
return None
#value = None
#if flask.request.json:
# return flask.request.json.get(name, None)
#if value is None:
# value = flask.request.args.get(name, None)
#if value is None and flask.request.form:
# value = flask.request.form.get(name, None)
#if cast and value is not None:
# if cast is bool:
# return value.lower() in ['true', 'yes', 'y', '1', '']
# if cast is list:
# return value.split(',') if len(value) > 0 else []
# if cast is ndb.Key:
# return ndb.Key(urlsafe=value)
# return cast(value)
#return value
###############################################################################
# Model manipulations
###############################################################################
def get_dbs(
query, order=None, limit=None, cursor=None, keys_only=None, **filters
):
limit = limit or config.DEFAULT_DB_LIMIT
cursor = Cursor.from_websafe_string(cursor) if cursor else None
model_class = ndb.Model._kind_map[query.kind]
for prop in filters:
if filters.get(prop, None) is None:
continue
if isinstance(filters[prop], list):
for value in filters[prop]:
query = query.filter(model_class._properties[prop] == value)
# new custom wodor app -------------
elif isinstance(filters[prop], dict):
if filters[prop]['test'] == '>':
query = query.filter(model_class._properties[prop] > filters[prop]['value'])
elif filters[prop]['test'] == '>=':
query = query.filter(model_class._properties[prop] >= filters[prop]['value'])
elif filters[prop]['test'] == '<':
query = query.filter(model_class._properties[prop] < filters[prop]['value'])
elif filters[prop]['test'] == '<=':
query = query.filter(model_class._properties[prop] < filters[prop]['value'])
elif filters[prop]['test'] == '==':
query = query.filter(model_class._properties[prop] == filters[prop]['value'])
elif filters[prop]['test'] == '!=':
query = query.filter(model_class._properties[prop] != filters[prop]['value'])
elif filters[prop]['test'] == 'IN':
values = filters[prop]['value']
if isinstance(values, list):
values = filters[prop]['value']
else:
values = values.split(',')
query = query.filter(model_class._properties[prop].IN(values))
query = query.order(model_class._key)
query = query.order(model_class._properties[prop]) # TODO does it work?
else:
query = query.filter(model_class._properties[prop] == filters[prop])
# ----------------------------------
if order:
for o in order.split(','):
if o.startswith('-'):
query = query.order(-model_class._properties[o[1:]])
else:
query = query.order(model_class._properties[o])
model_dbs, next_cursor, more = query.fetch_page(
limit, start_cursor=cursor, keys_only=keys_only,
)
next_cursor = next_cursor.to_websafe_string() if more else None
return list(model_dbs), next_cursor
def sort_uniq(sequence,key_sort=None,key_group=None,reverse=False):
sorted_seq = list(sorted(sequence,key=operator.itemgetter(key_sort), reverse=reverse))
#return sorted_seq
uniq_seq = {v[key_group]:v for v in sorted_seq}.values()
return list(sorted(uniq_seq,key=operator.itemgetter(key_sort), reverse=not reverse))
| {
"content_hash": "114356f3f5e78c3cd2439424f88f74f4",
"timestamp": "",
"source": "github",
"line_count": 278,
"max_line_length": 457,
"avg_line_length": 34.111510791366904,
"alnum_prop": 0.599388379204893,
"repo_name": "wodore/wodore-ng",
"id": "2775f09f0f9ac62324ca4cba31af06bda16c4598",
"size": "9499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9501"
},
{
"name": "HTML",
"bytes": "57499"
},
{
"name": "JavaScript",
"bytes": "61074"
},
{
"name": "Python",
"bytes": "207624"
},
{
"name": "Shell",
"bytes": "337"
}
],
"symlink_target": ""
} |
import numpy as np
from rl.util import logger, log_self
def create_dummy_states(state):
state_shape = state.shape
previous_state = np.zeros(state_shape)
pre_previous_state = np.zeros(state_shape)
pre_pre_previous_state = np.zeros(state_shape)
if (previous_state.ndim == 1):
previous_state = np.zeros([state_shape[0]])
pre_previous_state = np.zeros([state_shape[0]])
pre_pre_previous_state = np.zeros([state_shape[0]])
return (previous_state, pre_previous_state, pre_pre_previous_state)
class PreProcessor(object):
'''
The Base class for state preprocessing
'''
def __init__(self, max_queue_size=4, **kwargs):
'''Construct externally, and set at Agent.compile()'''
self.agent = None
self.state = None
self.exp_queue = []
self.MAX_QUEUE_SIZE = max_queue_size
self.never_debugged = True
log_self(self)
def reset_state(self, init_state):
'''reset the state of LinearMemory per episode env.reset()'''
self.state = np.array(init_state) # cast into np for safety
(previous_state, pre_previous_state,
pre_pre_previous_state) = create_dummy_states(self.state)
self.previous_state = previous_state
self.pre_previous_state = pre_previous_state
self.pre_pre_previous_state = pre_pre_previous_state
return self.preprocess_state()
def exp_queue_size(self):
return len(self.exp_queue)
def debug_state(self, processed_state, processed_next_state):
if self.never_debugged:
logger.debug("State shape: {}".format(processed_state.shape))
logger.debug(
"Next state shape: {}".format(processed_next_state.shape))
self.never_debugged = False
def preprocess_env_spec(self, env_spec):
'''helper to tweak env_spec according to preprocessor'''
class_name = self.__class__.__name__
if class_name is 'StackStates':
env_spec['state_dim'] = env_spec['state_dim'] * 2
elif class_name is 'Atari':
env_spec['state_dim'] = (84, 84, 4)
return env_spec
def preprocess_state(self):
raise NotImplementedError()
def advance_state(self, next_state):
self.pre_pre_previous_state = self.pre_previous_state
self.pre_previous_state = self.previous_state
self.previous_state = self.state
self.state = next_state
def add_raw_exp(self, action, reward, next_state, done):
'''
Buffer currently set to hold only last 4 experiences
Amount needed for Atari games preprocessing
'''
self.exp_queue.append([self.state, action, reward, next_state, done])
if (self.exp_queue_size() > self.MAX_QUEUE_SIZE):
del self.exp_queue[0]
self.advance_state(next_state)
def preprocess_memory(self, action, reward, next_state, done):
raise NotImplementedError()
| {
"content_hash": "f7b06b9c4f528899a327e6e5f776e4b4",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 77,
"avg_line_length": 36.72839506172839,
"alnum_prop": 0.626218487394958,
"repo_name": "kengz/openai_lab",
"id": "0125f519510336fa8e4d27f34900633b08b41088",
"size": "2975",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rl/preprocessor/base_preprocessor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "5528"
},
{
"name": "Python",
"bytes": "162421"
},
{
"name": "Shell",
"bytes": "4276"
}
],
"symlink_target": ""
} |
import time
from unittest import mock
import pytest
from google.api_core import exceptions
from google.cloud.bigquery_storage_v1.services import big_query_write
from google.cloud.bigquery_storage_v1 import types as gapic_types
from google.cloud.bigquery_storage_v1 import exceptions as bqstorage_exceptions
from google.protobuf import descriptor_pb2
REQUEST_TEMPLATE = gapic_types.AppendRowsRequest()
@pytest.fixture(scope="module")
def module_under_test():
from google.cloud.bigquery_storage_v1 import writer
return writer
def test_constructor_and_default_state(module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
manager = module_under_test.AppendRowsStream(mock_client, REQUEST_TEMPLATE)
# Public state
assert manager.is_active is False
# Private state
assert manager._client is mock_client
def test_close_before_open(module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
manager = module_under_test.AppendRowsStream(mock_client, REQUEST_TEMPLATE)
manager.close()
with pytest.raises(bqstorage_exceptions.StreamClosedError):
manager.send(object())
@mock.patch("google.api_core.bidi.BidiRpc", autospec=True)
@mock.patch("google.api_core.bidi.BackgroundConsumer", autospec=True)
def test_initial_send(background_consumer, bidi_rpc, module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
request_template = gapic_types.AppendRowsRequest(
write_stream="stream-name-from-REQUEST_TEMPLATE",
offset=0,
proto_rows=gapic_types.AppendRowsRequest.ProtoData(
writer_schema=gapic_types.ProtoSchema(
proto_descriptor=descriptor_pb2.DescriptorProto()
)
),
)
manager = module_under_test.AppendRowsStream(mock_client, request_template)
type(bidi_rpc.return_value).is_active = mock.PropertyMock(
return_value=(False, True)
)
proto_rows = gapic_types.ProtoRows()
proto_rows.serialized_rows.append(b"hello, world")
initial_request = gapic_types.AppendRowsRequest(
write_stream="this-is-a-stream-resource-path",
offset=42,
proto_rows=gapic_types.AppendRowsRequest.ProtoData(rows=proto_rows),
)
future = manager.send(initial_request)
assert isinstance(future, module_under_test.AppendRowsFuture)
background_consumer.assert_called_once_with(manager._rpc, manager._on_response)
background_consumer.return_value.start.assert_called_once()
assert manager._consumer == background_consumer.return_value
# Make sure the request template and the first request are merged as
# expected. Needs to be especially careful that nested properties such as
# writer_schema and rows are merged as expected.
expected_request = gapic_types.AppendRowsRequest(
write_stream="this-is-a-stream-resource-path",
offset=42,
proto_rows=gapic_types.AppendRowsRequest.ProtoData(
writer_schema=gapic_types.ProtoSchema(
proto_descriptor=descriptor_pb2.DescriptorProto()
),
rows=proto_rows,
),
)
bidi_rpc.assert_called_once_with(
start_rpc=mock_client.append_rows,
initial_request=expected_request,
# Extra header is required to route requests to the correct location.
metadata=(
("x-goog-request-params", "write_stream=this-is-a-stream-resource-path"),
),
)
bidi_rpc.return_value.add_done_callback.assert_called_once_with(
manager._on_rpc_done
)
assert manager._rpc == bidi_rpc.return_value
manager._consumer.is_active = True
assert manager.is_active is True
@mock.patch("google.api_core.bidi.BidiRpc", autospec=True)
@mock.patch("google.api_core.bidi.BackgroundConsumer", autospec=True)
def test_initial_send_with_timeout(background_consumer, bidi_rpc, module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
manager = module_under_test.AppendRowsStream(mock_client, REQUEST_TEMPLATE)
type(bidi_rpc.return_value).is_active = mock.PropertyMock(return_value=False)
type(background_consumer.return_value).is_active = mock.PropertyMock(
return_value=False
)
initial_request = gapic_types.AppendRowsRequest(
write_stream="this-is-a-stream-resource-path"
)
now = time.monotonic()
later = now + module_under_test._DEFAULT_TIMEOUT + 1
with mock.patch.object(module_under_test.time, "sleep"), mock.patch.object(
module_under_test.time, "monotonic", mock.MagicMock(side_effect=(now, later))
), pytest.raises(exceptions.Unknown):
manager.send(initial_request)
def test_future_done_false(module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
manager = module_under_test.AppendRowsStream(mock_client, REQUEST_TEMPLATE)
future = module_under_test.AppendRowsFuture(manager)
assert not future.done()
def test_future_done_true_with_result(module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
manager = module_under_test.AppendRowsStream(mock_client, REQUEST_TEMPLATE)
future = module_under_test.AppendRowsFuture(manager)
future.set_result(object())
assert future.done()
def test_future_done_true_with_exception(module_under_test):
mock_client = mock.create_autospec(big_query_write.BigQueryWriteClient)
manager = module_under_test.AppendRowsStream(mock_client, REQUEST_TEMPLATE)
future = module_under_test.AppendRowsFuture(manager)
future.set_exception(ValueError())
assert future.done()
| {
"content_hash": "b19d8336092084c7b6a9cdbc85053c02",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 85,
"avg_line_length": 39.44827586206897,
"alnum_prop": 0.7244755244755244,
"repo_name": "googleapis/python-bigquery-storage",
"id": "9ecb185818d6dff178f12226a6eabbe75de95512",
"size": "6296",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/test_writer_v1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1136897"
},
{
"name": "Shell",
"bytes": "30690"
}
],
"symlink_target": ""
} |
import datetime
from copy import deepcopy
import logging
import xml.etree.ElementTree as et
import os
import pystache
import re
import string
import sys
import subprocess
import urllib2
# SDK 3.12.6: https://github.com/Parrot-Developers/arsdk_manifests/blob/d7640c80ed7147971995222d9f4655932a904aa8/release.xml
LIBARCOMMANDS_GIT_OWNER = "Parrot-Developers"
LIBARCOMMANDS_GIT_HASH = "ab28dab91845cd36c4d7002b55f70805deaff3c8"
# From XML types to ROS primitive types
ROS_TYPE_MAP = {
"bool": "bool",
"u8": "uint8",
"i8": "int8",
"u16": "uint16",
"i16": "int16",
"u32": "uint32",
"i32": "int32",
"u64": "uint64",
"i64": "int64",
"float": "float32",
"double": "float64",
"string": "string",
"enum": "enum"
}
# From XML types to BebopSDK union defined in ARCONTROLLER_Dictionary.h
BEBOP_TYPE_MAP = {
"bool": "U8",
"u8": "U8",
"i8": "I8",
"u16": "U16",
"i16": "I16",
"u32": "U32",
"i32": "I32",
"u64": "U64",
"i64": "I64",
"float": "Float",
"double": "Double",
"string": "String",
"enum": "I32"
}
# From XML types to Dynamic Reconfigure Types
DYN_TYPE_MAP = {
"bool": "bool_t",
"u8": "int_t",
"i8": "int_t",
"u16": "int_t",
"i16": "int_t",
"u32": "int_t",
"i32": "int_t",
"u64": "int_t",
"i64": "int_t",
"float": "double_t",
"double": "double_t",
"string": "str_t",
"enum": "enum"
}
C_TYPE_MAP = {
"bool": "bool",
"u8": "int32_t",
"i8": "int32_t",
"u16": "int32_t",
"i16": "int32_t",
"u32": "int32_t",
"i32": "int32_t",
"u64": "int32_t",
"i64": "int32_t",
"float": "double", # for rosparam
"double": "double",
"string": "std::string",
"enum": "int32_t"
}
blacklist_settings_keys = set(["wifiSecurity"])
min_max_regex = re.compile('\[([0-9\.\-]+)\:([0-9\.\-]+)\]')
rend = pystache.Renderer()
def get_xml_url(filename):
return rend.render_path("templates/url.mustache",
{"repo_owner": LIBARCOMMANDS_GIT_OWNER, "hash": LIBARCOMMANDS_GIT_HASH, "filename": filename})
def load_from_url(url):
f = urllib2.urlopen(url)
data = f.read()
f.close()
return data
def is_state_tag(name):
return (not name.find("State") == -1) and (name.find("Settings") == -1)
def is_settings_tag(name):
return (not name.find("Settings") == -1) and (name.find("State") == -1)
def strip_text(text):
return re.sub("\s\s+", " ", text.strip().replace('\n', '').replace('\r', '')).replace('"', '').replace("'", "")
def cap_word(text):
return text.lower().title()
def guess_min_max(arg_comment):
m = min_max_regex.search(arg_comment)
if m:
logging.info(" ... [min:max]")
return [float(m.group(1)), float(m.group(2))]
elif (arg_comment.lower().find("m/s2") != -1):
logging.info(" ... acc (m/s2)")
return [0.0, 5.0]
elif (arg_comment.lower().find("m/s") != -1):
logging.info(" ... speed (m/s)")
return [0.0, 10.0]
elif (arg_comment.lower().find("in meters") != -1) or (arg_comment.lower().find("in m") != -1):
logging.info(" ... meters")
return [0, 160]
elif (arg_comment.lower().find("in degree/s") != -1):
logging.info(" ... rotations speed degrees/s")
return [0, 900.0]
elif (arg_comment.lower().find("in degree") != -1):
logging.info(" ... degrees")
return [-180.0, 180.0]
elif (arg_comment.lower().find("1") != -1) and (arg_comment.lower().find("0") != -1):
logging.info(" ... bool")
return [0, 1]
elif (arg_comment.lower().find("latitude") != -1):
logging.info(" ... latitude")
return [-90.0, 90.0]
elif (arg_comment.lower().find("longitude") != -1):
logging.info(" ... longitude")
return [-180.0, 180.0]
elif (arg_comment.lower().find("[rad/s]") != -1):
logging.info(" ... angular speed (rad/s)")
return [0.0, 5.0]
elif (arg_comment.lower().find("channel") != -1):
logging.info(" ... unknown int")
return [0, 50]
elif (arg_comment.lower().find("second") != -1):
logging.info(" ... time (s)")
return [0, 120]
return []
def today():
return datetime.datetime.now().strftime("%Y-%m-%d")
def generate_states(xml_filename):
xml_url = get_xml_url(xml_filename)
project = xml_filename.split(".")[0]
logging.info("XML Filename: %s" % (xml_filename, ))
logging.info("Fetching source XML file for project %s " % (project, ))
logging.info("URL: %s" % (xml_url, ))
xml = load_from_url(xml_url)
xml_root = et.fromstring(xml)
# iterate all <class> tags
logging.info("Iterating all State <class> tags ...")
generator = os.path.basename(__file__)
generator_git_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
d_cpp = dict({
"url": xml_url,
"project": project,
"date": today(),
"generator": generator,
"generator_git_hash": generator_git_hash,
"queue_size": 10, # 5Hz
"frame_id": "base_link",
"cpp_class": list()
})
d_msg = dict()
for cl in xml_root.iter("class"):
if not is_state_tag(cl.attrib["name"]):
continue
# Iterate all cmds
# Generate one .msg and one C++ class for each of them
for cmd in cl.iter("cmd"):
# .msg
msg_name = cap_word(project) + cl.attrib["name"] + cmd.attrib["name"]
comment_el = cmd.find("comment")
msg_file_comment = ""
if not comment_el is None:
msg_file_comment = comment_el.attrib["desc"]
d = dict({
"url": xml_url,
"msg_filename": msg_name,
"date": today(),
"generator": generator,
"generator_git_hash": generator_git_hash,
"msg_file_comment": strip_text(msg_file_comment),
"msg_field": list()
})
# C++ class
cpp_class_dict_key = rend.render_path("templates/dictionary_key.mustache",
{"project": project.upper(), "class": cl.attrib["name"].upper(), "cmd": cmd.attrib["name"].upper()})
# cmd.attrib["name"] and cl.attrib["name"] are already in CamelCase
cpp_class_name = msg_name
cpp_class_instance_name = project.lower() + "_" + cl.attrib["name"].lower() + "_" + cmd.attrib["name"].lower() + "_ptr";
cpp_class_param_name = "states/enable_" + cl.attrib["name"].lower() + "_" + cmd.attrib["name"].lower()
topic_name = "states/" + project + "/" + cl.attrib["name"] + "/" + cmd.attrib["name"]
arg_list = []
for arg in cmd.iter("arg"):
# .msg
f_name = arg.attrib["name"]
f_type = ROS_TYPE_MAP[arg.attrib.get("type", "bool")]
f_comment = strip_text(arg.text)
f_enum_list = list()
if (f_type == "enum"):
f_type = "uint8"
counter = 0
for enum in arg.iter("enum"):
f_enum_list.append({
"constant_name": f_name + "_" + enum.attrib["name"],
"constant_value": counter,
"constant_comment": strip_text(enum.text)
})
counter += 1
d["msg_field"].append({
"msg_field_type": f_type,
"msg_field_name": f_name,
"msg_field_comment": f_comment,
"msg_field_enum": deepcopy(f_enum_list)
})
# C++ class
arg_list.append({
"cpp_class_arg_key": cpp_class_dict_key + "_" + arg.attrib["name"].upper(),
"cpp_class_arg_name": f_name,
"cpp_class_arg_sdk_type": BEBOP_TYPE_MAP[arg.attrib.get("type", "bool")]
})
d_msg[msg_name] = deepcopy(d)
# C++ class
d_cpp["cpp_class"].append({
"cpp_class_name": cpp_class_name,
"cpp_class_comment": strip_text(msg_file_comment),
"cpp_class_instance_name": cpp_class_instance_name,
"cpp_class_param_name": cpp_class_param_name,
"topic_name": topic_name,
"latched": "true",
"cpp_class_msg_type": msg_name,
"key": cpp_class_dict_key,
"cpp_class_arg": deepcopy(arg_list)
})
logging.info("... Done iterating, writing results to file")
# .msg write
for k, d in d_msg.items():
msg_filename = "%s.msg" % k
logging.info("Writing %s" % (msg_filename, ))
with open(msg_filename, "w") as msg_file:
msg_file.write(rend.render_path("templates/msg.mustache", d))
header_file_name = "%s_state_callbacks.h" % (project.lower(), )
logging.info("Writing %s" % (header_file_name, ))
with open(header_file_name, "w") as header_file:
header_file.write(rend.render_path("templates/state_callbacks.h.mustache", d_cpp))
include_file_name = "%s_state_callback_includes.h" % (project.lower(), )
logging.info("Writing %s" % (include_file_name, ))
with open(include_file_name, "w") as include_file:
include_file.write(rend.render_path("templates/state_callback_includes.h.mustache", d_cpp))
with open("callbacks_common.h", "w") as header_file:
header_file.write(rend.render_path("templates/callbacks_common.h.mustache", d_cpp))
rst_file_name = "%s_states_param_topic.rst" % (project.lower(), )
logging.info("Writing %s" % (rst_file_name, ))
with open(rst_file_name, "w") as rst_file:
rst_file.write(rend.render_path("templates/states_param_topic.rst.mustache", d_cpp))
def generate_settings(xml_filename):
xml_url = get_xml_url(xml_filename)
project = xml_filename.split(".")[0]
logging.info("Fetching source XML file for project %s " % (project, ))
logging.info("URL: %s" % (xml_url, ))
xml = load_from_url(xml_url)
xml_root = et.fromstring(xml)
generator = os.path.basename(__file__)
generator_git_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
# make sure that the name of the config file matches the third argument
# of gen.generate()
d_cfg = dict({
"cfg_filename": "Bebop%s.cfg" % (project.title(), ),
"url": xml_url,
"project": project.title(),
"date": today(),
"generator": generator,
"generator_git_hash": generator_git_hash,
"cfg_class": list(),
"cpp_class": list()
})
for cl in xml_root.iter("class"):
if not is_settings_tag(cl.attrib["name"]):
continue
# At the moment the XML file is not 100% consistent between Settings and SettingsChanged and inner Commands
# 1. Check if `class["name"]State` exists
if not xml_root.findall(".//class[@name='%s']" % (cl.attrib["name"] + "State", )):
logging.warning("No State Class for %s " % (cl.attrib["name"], ))
continue
# Iterate all cmds
# generate one C++ class for each command
cfg_class_d = {
"cfg_class_name": cl.attrib["name"].lower(),
"cfg_class_comment": strip_text(cl.text),
"cfg_cmd": list()
}
for cmd in cl.iter("cmd"):
# 2. Check if `cmd["name"]Changed` exists
if not xml_root.findall(".//cmd[@name='%s']" % (cmd.attrib["name"] + "Changed", )):
logging.warning("No Changed CMD for %s " % (cmd.attrib["name"], ))
continue
# blacklist
if strip_text(cmd.attrib["name"]) in blacklist_settings_keys:
logging.warning("Key %s is blacklisted!" % (cmd.attrib["name"], ))
continue
comment_el = cmd.find("comment")
cmd_comment = ""
if not comment_el is None:
cmd_comment = comment_el.attrib["desc"]
# .cfg
cfg_cmd_d = {
"cfg_cmd_comment": strip_text(cmd_comment),
"cfg_arg": list()
}
# C++
# We are iterating classes with names ending in "Setting". For each of these classes
# there exists a corresponding class with the same name + "State" (e.g PilotingSetting and PilottingSettingState)
# The inner commands of the corresponding class are also follow a similar conention, they end in "CHANGED".
# We create cfg files based on Settings, and ROS param updates based on SettingsChanged
cpp_class_dict_key = rend.render_path("templates/dictionary_key.mustache",
{"project": project.upper(), "class": cl.attrib["name"].upper() + "STATE", "cmd": cmd.attrib["name"].upper() + "CHANGED"} )
# cmd.attrib["name"] and cl.attrib["name"] are already in CamelCase
cpp_class_name = cl.attrib["name"] + cmd.attrib["name"]
cpp_class_comment = strip_text(cmd_comment)
cpp_class_instance_name = project.lower() + "_" + cl.attrib["name"].lower() + "_" + cmd.attrib["name"].lower() + "_ptr";
cpp_class_params = list()
counter = 0
# generate one dyamic reconfigure variable per arg
for arg in cmd.iter("arg"):
# .cfg
arg_name = cl.attrib["name"] + cmd.attrib["name"] + cap_word(arg.attrib["name"])
arg_type = DYN_TYPE_MAP[arg.attrib.get("type", "bool")]
arg_comment = strip_text(arg.text)
arg_enum_list = list()
minmax_list = list()
arg_default = 0
arg_min = 0.0
arg_max = 0.0
counter = 0
need_enum_cast = False
if (arg_type == "enum"):
need_enum_cast = True
arg_type = "int_t"
for enum in arg.iter("enum"):
arg_enum_list.append({
"constant_name": arg_name + "_" + enum.attrib["name"],
"constant_value": counter,
"constant_comment": strip_text(enum.text)
})
counter += 1
elif not arg_type == "str_t":
# No min/max values defined in XML, guessing the type and propose a value:
logging.info("Guessing type of \"%s\"" % (arg_name))
logging.info(" from: %s" % (arg_comment))
minmax_list = guess_min_max(arg_comment)
if (len(minmax_list) == 2):
[arg_min, arg_max] = minmax_list
logging.info(" min: %s max: %s" % (arg_min, arg_max))
else:
logging.warning(" Can not guess [min:max] values for this arg, skipping it")
# We create a fake enum for int_t types that only accept bool values
# The source XML should have defined them as bool_t
# Since these are fake enums (no defines in SDK), we don't need int->enum casting
if arg_type == "int_t" and arg_min == 0 and arg_max == 1:
arg_enum_list.append({
"constant_name": arg_name + "_OFF",
"constant_value": 0,
"constant_comment": "Disabled"
})
arg_enum_list.append({
"constant_name": arg_name + "_ON",
"constant_value": 1,
"constant_comment": "Enabled"
})
counter = 2
# either we found minmax or the arg is of type enum
if len(minmax_list) or need_enum_cast or arg_type == "str_t":
# hack
if arg_type == "str_t":
arg_min = "''"
arg_max = "''"
arg_default = "''"
cfg_cmd_d["cfg_arg"].append({
"cfg_arg_type": arg_type,
"cfg_arg_name": arg_name,
"cfg_arg_comment": arg_comment,
"cfg_arg_default": arg_default,
"cfg_arg_min": arg_min,
"cfg_arg_max": arg_max,
# Render once trick: http://stackoverflow.com/a/10118092
"cfg_arg_enum": {'items' : deepcopy(arg_enum_list)} if len(arg_enum_list) else [],
"enum_max": counter - 1
})
# generate c enum type
if (need_enum_cast):
enum_cast = "static_cast<eARCOMMANDS_%s_%s_%s_%s>" % (project.upper(), cl.attrib["name"].upper(), cmd.attrib["name"].upper(), arg.attrib["name"].upper())
else:
enum_cast = ""
cpp_class_params.append({
"cpp_class_arg_key": cpp_class_dict_key + "_" + arg.attrib["name"].upper(),
"cpp_class_param_name": arg_name,
"cpp_class_comment": cpp_class_comment,
"cpp_class_param_enum_cast": enum_cast,
"cpp_class_param_type": C_TYPE_MAP[arg.attrib.get("type", "bool")],
"cpp_class_param_sdk_type": BEBOP_TYPE_MAP[arg.attrib.get("type", "bool")]
})
# Skip cmds with no arguments
if len(cfg_cmd_d["cfg_arg"]):
cfg_class_d["cfg_cmd"].append(deepcopy(cfg_cmd_d))
d_cfg["cpp_class"].append({
"cpp_class_dict_key": cpp_class_dict_key,
"cpp_class_name": cpp_class_name,
"cpp_class_instance_name": cpp_class_instance_name,
"cpp_class_params": deepcopy(cpp_class_params)
})
d_cfg["cfg_class"].append(deepcopy(cfg_class_d))
logging.info("... Done iterating, writing results to file")
# .cfg write
cfg_file_name = d_cfg["cfg_filename"]
logging.info("Writing %s" % (cfg_file_name, ))
with open(cfg_file_name, "w") as cfg_file:
cfg_file.write(rend.render_path("templates/cfg.mustache", d_cfg))
header_file_name = "%s_setting_callbacks.h" % (project.lower(), )
logging.info("Writing %s" % (header_file_name, ))
with open(header_file_name, "w") as header_file:
header_file.write(rend.render_path("templates/setting_callbacks.h.mustache", d_cfg))
include_file_name = "%s_setting_callback_includes.h" % (project.lower(), )
logging.info("Writing %s" % (include_file_name, ))
with open(include_file_name, "w") as include_file:
include_file.write(rend.render_path("templates/setting_callback_includes.h.mustache", d_cfg))
rst_file_name = "%s_settings_param.rst" % (project.lower(), )
logging.info("Writing %s" % (rst_file_name, ))
with open(rst_file_name, "w") as rst_file:
rst_file.write(rend.render_path("templates/settings_param.rst.mustache", d_cfg))
def main():
# Setup stuff
logging.basicConfig(level="INFO")
generate_states("common.xml")
generate_states("ardrone3.xml")
#generate_settings("common_commands.xml")
generate_settings("ardrone3.xml")
generator = os.path.basename(__file__)
generator_git_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
with open("last_build_info", "w") as last_build_file:
last_build_file.write(rend.render_path(
"templates/last_build_info.mustache",
{
"source_hash": LIBARCOMMANDS_GIT_HASH,
"date": datetime.datetime.now(),
"generator": generator,
"generator_git_hash": generator_git_hash
}))
if __name__ == "__main__":
main()
| {
"content_hash": "fb975d9b7f6bcecdeeffe975a0e366c4",
"timestamp": "",
"source": "github",
"line_count": 514,
"max_line_length": 177,
"avg_line_length": 39.71206225680934,
"alnum_prop": 0.5123456790123457,
"repo_name": "AutonomyLab/bebop_autonomy",
"id": "bf4d647db5eeb3e6f73fb038e680b7e7ea91c4da",
"size": "20435",
"binary": false,
"copies": "1",
"ref": "refs/heads/indigo-devel",
"path": "bebop_driver/scripts/meta/generate.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "405229"
},
{
"name": "CMake",
"bytes": "4926"
},
{
"name": "HTML",
"bytes": "20764"
},
{
"name": "Python",
"bytes": "33280"
},
{
"name": "Shell",
"bytes": "830"
}
],
"symlink_target": ""
} |
"""Module which extends the Camera type."""
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from ._scenepic import (
Camera,
camera_orbit
)
Camera.orbit = camera_orbit
| {
"content_hash": "94001f957d0b00bc95d16449c12436c9",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 43,
"avg_line_length": 18.545454545454547,
"alnum_prop": 0.7058823529411765,
"repo_name": "microsoft/scenepic",
"id": "75267282896104751c1d475962f0f5098d0fa58e",
"size": "204",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/scenepic/camera.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "223"
},
{
"name": "C++",
"bytes": "1151808"
},
{
"name": "CMake",
"bytes": "14718"
},
{
"name": "JavaScript",
"bytes": "3011"
},
{
"name": "Jupyter Notebook",
"bytes": "48054"
},
{
"name": "Python",
"bytes": "213419"
},
{
"name": "TypeScript",
"bytes": "230730"
}
],
"symlink_target": ""
} |
from django import forms
class UploadFileForm(forms.Form):
file = forms.FileField() | {
"content_hash": "8392781165ec7ee336dadef4bf69b066",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 33,
"avg_line_length": 17.8,
"alnum_prop": 0.7528089887640449,
"repo_name": "Omenia/RFHistory",
"id": "03d9dee665a7e528d849f66d6058d7472bf364aa",
"size": "89",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ServerApp/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7300"
}
],
"symlink_target": ""
} |
import pandas as pd
from datanator.util import mongo_util
import json
import datetime
from pymongo import ASCENDING
from pymongo.collation import Collation, CollationStrength
import datanator.config.core
from datanator_query_python.query import query_uniprot
from datanator.data_source import uniprot_nosql
class Halflife(mongo_util.MongoUtil):
def __init__(self, cache_dir=None, server=None, db=None, collection_str=None,
authDB=None, readPreference=None, username=None, password=None,
verbose=None, max_entries=None, uniprot_col_db=None):
"""Init
Args:
cache_dir (:obj:`str`, optional): Cache directory for logs. Defaults to None.
server (:obj:`str`, optional): MongoDB server address. Defaults to None.
db (:obj:`str`, optional): Database where initial uniprot collection resides. Defaults to None.
collection_str (:obj:`str`, optional): name of collection. Defaults to None.
authDB (:obj:`str`, optional): MongoDB authentication database. Defaults to None.
readPreference (:obj:`str`, optional): MongoDB read preference. Defaults to None.
username (:obj:`str`, optional): MongoDB username. Defaults to None.
password (:obj:`str`, optional): MongoDB password. Defaults to None.
verbose (:obj:`bool`, optional): Wheter to display verbose messages. Defaults to None.
max_entries (:obj:`int`, optional): Number of records to be processed. Defaults to None.
uniprot_col_db (:obj:`int`, optional): Database to which new uniprot records will be inserted. Defaults to None.
"""
super(Halflife, self).__init__(MongoDB=server, db=uniprot_col_db, username=username,
password=password, authSource=authDB,
verbose=verbose)
self.cache_dir = cache_dir
self.client, self.db, self.collection = self.con_db(collection_str)
self.url = "https://static-content.springer.com/esm/art%3A10.1186%2Fs12864-016-3219-8/MediaObjects/12864_2016_3219_MOESM5_ESM.xlsx"
self.max_entries = max_entries
self.collation = Collation(locale='en', strength=CollationStrength.SECONDARY)
self.uniprot_manager = query_uniprot.QueryUniprot(username=username, password=password,
server=server, database=db, collection_str='uniprot')
self.uniprot_col_manager = uniprot_nosql.UniprotNoSQL(MongoDB=server, db=uniprot_col_db, max_entries=max_entries,
username=username, password=password, collection_str='uniprot')
self.username = username
self.password = password
self.server = server
self.max_entries = max_entries
self.db_str = uniprot_col_db
self.verbose = verbose
def download_xlsx(self, sheet_name):
"""Download supplementary xlsx file
Args:
sheet_name (:obj:`str`): name of sheet in xlsx
Returns:
(:obj:`pandas.DataFrame`): xlsx transformed to pandas.DataFrame
"""
if self.max_entries == float('inf'):
nrows = None
else:
nrows = self.max_entries
data = pd.read_excel(self.url, sheet_name=sheet_name, nrows=nrows)
columns = ['gene_fragment', 'cog_class', 'ar_cog', 'cog', 'function', 'gene_name', 'half_life', 'half_life_std', 'std_over_avg']
data.columns = columns
data['half_life'] = data['half_life'].apply(lambda x: x*60)
data['half_life_std'] = data['half_life_std'].apply(lambda x: x*60)
return data
def load_halflife(self, df, growth_medium='MeOH', start=0):
df_json = json.loads(df.to_json(orient='records'))
row_count = len(df.index)
for i, doc in enumerate(df_json[start:]):
if i == self.max_entries:
break
if self.verbose and i % 100 == 0:
print('Processing {} row {} out of {}'.format(growth_medium, i + start, row_count))
doc['halflives'] = [{'halflife': doc['half_life'], 'std': doc['half_life_std'], 'std_over_avg': doc['std_over_avg'],
'unit': 's', 'reference': [{'doi': '10.1186/s12864-016-3219-8'}], 'growth_medium': growth_medium,
'ordered_locus_name': doc['gene_fragment'], 'ar_cog': doc['ar_cog'], 'cog_class': doc['cog_class'],
'cog': doc['cog'], 'species': 'Methanosarcina acetivorans', 'ncbi_taxonomy_id': 188937}]
doc['modified'] = datetime.datetime.utcnow()
del doc['half_life']
del doc['half_life_std']
del doc['std_over_avg']
del doc['ar_cog']
del doc['cog']
del doc['cog_class']
if doc['gene_name'] != '-':
self.collection.update_one({'gene_name': doc['gene_name']},
{'$set': doc}, upsert=True, collation=self.collation)
elif doc['function'] != '-':
self.fill_uniprot_by_oln(doc['gene_fragment'])
self.collection.update_one({'function': doc['function']},
{'$set': doc}, upsert=True, collation=self.collation)
else:
self.fill_uniprot_by_oln(doc['gene_fragment'])
self.collection.update_one({'halflives.ordered_locus_name': doc['gene_fragment']},
{'$set': doc}, upsert=True, collation=self.collation)
if i == 0:
self.collection.create_index([("gene_name", ASCENDING)], background=True,
collation=self.collation)
self.collection.create_index([("halflives.ordered_locus_name", ASCENDING)], background=True,
collation=self.collation)
self.collection.create_index([("function", ASCENDING)], background=True,
collation=self.collation)
self.collection.update_many({'gene_fragment':{'$exists': True}}, {'$unset': {'gene_fragment': ""}})
def add_to_halflife(self, df, growth_medium='TMA'):
"""Add df to existing rna_halflife collection
Args:
df (:obj:`pandas.DataFrame`): dataframe to be added.
growth_medium (:obj:`str`): medium in which the cells were grown. Defaults to TMA.
"""
df_json = json.loads(df.to_json(orient='records'))
row_count = len(df.index)
for i, doc in enumerate(df_json):
if i == self.max_entries:
break
if self.verbose and i % 100 == 0:
print('Processing {} row {} out of {}'.format(growth_medium, i, row_count))
to_add = {'halflife': doc['half_life'], 'std': doc['half_life_std'], 'std_over_avg': doc['std_over_avg'],
'unit': 's', 'reference': [{'doi': '10.1186/s12864-016-3219-8'}], 'growth_medium': growth_medium,
'ordered_locus_name': doc['gene_fragment'], 'ar_cog': doc['ar_cog'], 'cog_class': doc['cog_class'],
'cog': doc['cog'], 'species': 'Methanosarcina acetivorans', 'ncbi_taxonomy_id': 188937}
if doc['gene_name'] != '-':
self.collection.update_one({'gene_name': doc['gene_name']},
{'$addToSet': {'halflives': to_add},
'$set': {'modified': datetime.datetime.utcnow()}},
upsert=True, collation=self.collation)
elif doc['function'] != '-':
self.collection.update_one({'function': doc['function']},
{'$addToSet': {'halflives': to_add},
'$set': {'modified': datetime.datetime.utcnow()}},
upsert=True, collation=self.collation)
else:
query = {'halflives.ordered_locus_name': doc['gene_fragment']}
result = self.collection.find_one(filter=query, collation=self.collation)
if result is not None:
self.collection.update_one(query,
{'$addToSet': {'halflives': to_add},
'$set': {'modified': datetime.datetime.utcnow()}},
upsert=True, collation=self.collation)
else:
doc['halflives'] = [to_add]
doc['modified'] = datetime.datetime.utcnow()
del doc['half_life']
del doc['half_life_std']
del doc['std_over_avg']
del doc['ar_cog']
del doc['cog']
del doc['cog_class']
self.collection.update_one(query, {'$set': doc}, upsert=True, collation=self.collation)
self.collection.update_many({'gene_fragment':{'$exists': True}}, {'$unset': {'gene_fragment': ""}})
def fill_protein_name(self):
"""Create and fill 'protein_name' field for documents
'gene_name' field with values other than '-'
"""
con_0 = {'gene_name': {'$ne': '-'}}
con_1 = {'gene_name': {'$exists': True}}
query = {'$and': [con_0, con_1]}
projection = {'gene_name': 1}
docs = self.collection.find(filter=query, projection=projection, collation=self.collation)
for doc in docs:
gene_name = doc['gene_name']
q = {'gene_name': gene_name}
result = self.uniprot_manager.collection.find_one(filter=q, collation=self.collation, projection={'protein_name': 1})
if result is not None:
protein_name = result['protein_name']
else:
protein_name = ""
self.collection.update_one({'_id': doc['_id']},
{'$set': {'protein_name': protein_name}})
def fill_gene_protein_name(self):
"""Fill 'gene_name' field where 'gene_name' has value of '-' and create
'protein_name' field
"""
query = {'gene_name': '-'}
projection = {'gene_name': 1, 'halflives': 1}
gene_name = ''
protein_name = ''
docs = self.collection.find(filter=query, projection=projection, collation=self.collation)
uniprot_manager = query_uniprot.QueryUniprot(username=self.username, password=self.password,
server=self.server, database=self.db_str, collection_str='uniprot')
for doc in docs:
oln = doc['halflives'][0]['ordered_locus_name']
oln = 'MA_' + oln.split('MA')[1] # "MA0002" to "MA_0002"
gene_name, protein_name = uniprot_manager.get_gene_protein_name_by_oln(oln, species=[188937])
self.collection.update_one({'_id': doc['_id']},
{'$set': {'gene_name': gene_name,
'protein_name': protein_name}})
def fill_uniprot_by_oln(self, oln):
"""Fill uniprot collection using ordered locus name
Args:
oln (:obj:`str`): Ordered locus name
"""
gene_name, protein_name = self.uniprot_manager.get_gene_protein_name_by_oln(oln, species=[188937])
if gene_name is None and protein_name is None: # no such entry in uniprot collection
self.uniprot_col_manager.load_uniprot(query=True, msg=oln)
else:
return
def uniprot_names(self, results, count):
"""Extract protein_name and gene_name from returned
tuple of uniprot query function
Args:
results (:obj:`Iter`): pymongo cursor object.
count (:obj:`int`): Number of documents found.
Return:
(:obj:`tuple` of :obj:`str`): gene_name and protein_name
"""
if count == 0:
return '', ''
else:
for result in results:
gene_name = result['gene_name']
protein_name = result['protein_name']
return gene_name, protein_name
def main():
src_db = 'datanator'
des_db = 'datanator'
collection_str = 'rna_halflife'
username = datanator.config.core.get_config()[
'datanator']['mongodb']['user']
password = datanator.config.core.get_config(
)['datanator']['mongodb']['password']
server = datanator.config.core.get_config(
)['datanator']['mongodb']['server']
src = Halflife(username=username, password=password, server=server,
authDB='admin', db=src_db, uniprot_col_db=des_db,
verbose=True, collection_str=collection_str, max_entries=float('inf'))
df = src.download_xlsx('MeOH')
src.load_halflife(df, start=800)
df = src.download_xlsx('TMA')
src.add_to_halflife(df, growth_medium='TMA')
df = src.download_xlsx('Acetate')
src.add_to_halflife(df, growth_medium='Acetate')
src.fill_protein_name()
src.fill_gene_protein_name()
if __name__ == '__main__':
main() | {
"content_hash": "4e56aefb8956032010ad88587c81caf3",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 139,
"avg_line_length": 52.869565217391305,
"alnum_prop": 0.5444826555023924,
"repo_name": "KarrLab/kinetic_datanator",
"id": "c3657f768543a7eb5cd3350e44285354df2e46a1",
"size": "13376",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datanator/data_source/rna_halflife/doi_10_1186_s12864_016_3219_8.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1217"
},
{
"name": "Dockerfile",
"bytes": "171"
},
{
"name": "HTML",
"bytes": "50579"
},
{
"name": "Python",
"bytes": "980025"
}
],
"symlink_target": ""
} |
import re
from dateutil import parser
from django import template
from django.utils.safestring import mark_safe
from ..models import Phase
register = template.Library()
@register.filter
def first_p(value):
if value:
p_search = re.search(r'<p(|\s+[^>]*)>.*<\/p>', value)
if p_search:
value = p_search.group(0)
if len(p_search.group(1)) > 0:
value = value.replace(p_search.group(1), '')
return value
return ''
@register.filter
def convert_datetime(value):
return parser.parse(value)
def is_active_phase(this_phase, current_phase):
if this_phase == current_phase:
return 'active'
else:
return 'inactive'
@register.simple_tag
def phases_indicator(current_phase):
if current_phase:
html = '<ul class="phase-process">'
html += '<li><span class="phase-label phase-label--preparation is-{activity}">Valmistelu</span></li>'\
.format(activity=is_active_phase(Phase.PREPARATION, current_phase))
html += '<li><span class="phase-label phase-label--experiment is-{activity}">Kokeilu</span></li>'\
.format(activity=is_active_phase(Phase.EXPERIMENT, current_phase))
html += '<li><span class="phase-label phase-label--enabled is-{activity}">Käytössä</span></li>'\
.format(activity=is_active_phase(Phase.ENABLED, current_phase))
html += '</ul>'
return mark_safe(html)
return ''
| {
"content_hash": "69757c2040af47b0363be778c073913b",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 110,
"avg_line_length": 30.93617021276596,
"alnum_prop": 0.6327372764786795,
"repo_name": "City-of-Helsinki/digihel",
"id": "b2805981da83b09f5b6d362c1c9e63863356cb79",
"size": "1457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "digi/templatetags/digi_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "147906"
},
{
"name": "CoffeeScript",
"bytes": "3604"
},
{
"name": "Dockerfile",
"bytes": "639"
},
{
"name": "HTML",
"bytes": "76983"
},
{
"name": "JavaScript",
"bytes": "48715"
},
{
"name": "Python",
"bytes": "151509"
},
{
"name": "Shell",
"bytes": "165"
}
],
"symlink_target": ""
} |
from .db import FlotillaClientDynamo
from .region_meta import RegionMetadata
| {
"content_hash": "9ecd8d8920928b4d02eb789f236ca77d",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 39,
"avg_line_length": 38.5,
"alnum_prop": 0.8571428571428571,
"repo_name": "pebble/flotilla",
"id": "c8903617591d28e1cd3d90a2f5decc3bd4451662",
"size": "77",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/flotilla/client/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "563"
},
{
"name": "Python",
"bytes": "206593"
},
{
"name": "Shell",
"bytes": "1311"
}
],
"symlink_target": ""
} |
import collections
class MemoKey(collections.namedtuple('MemoKey',
('value_id', 'restrict_to_child_or_null'))):
"""
The key used to memoize already-traversed nodes when resolving substitutions.
:param value_id: id(AbstractConfigValue)
:param restrict_to_child_or_null: Path
"""
def __new__(cls, value, restrict_to_child_or_null):
if isinstance(value, AbstractConfigValue):
value_id = id(value)
elif isinstance(value, int):
value_id = value
else:
raise ValueError('Expected AbstractConfigValue or int, got ' + value)
return super(MemoKey, cls).__new__(
value_id,
restrict_to_child_or_null
)
| {
"content_hash": "baea7d38f5c358e901631546454e364e",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 81,
"avg_line_length": 29.12,
"alnum_prop": 0.6098901098901099,
"repo_name": "chris-martin/hocon-python",
"id": "d22c08b9b732840d6077b2c51b12fad029a206b1",
"size": "728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hocon/impl/MemoKey.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "313323"
},
{
"name": "Python",
"bytes": "151883"
},
{
"name": "Scala",
"bytes": "301214"
}
],
"symlink_target": ""
} |
def fetchone(cursor):
"Returns o row from a cursor as a dict, or None"
desc = cursor.description
row = cursor.fetchone()
if row:
return dict(zip([col[0] for col in desc], row))
else:
return None
def fetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
| {
"content_hash": "27f106e3980c8fd888aee180d1531b85",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 55,
"avg_line_length": 25.705882352941178,
"alnum_prop": 0.6064073226544623,
"repo_name": "slaweet/autoskola",
"id": "39427121d5015c6435476fa667a1c81cdb451b3f",
"size": "463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/geography/models/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "26971"
},
{
"name": "JavaScript",
"bytes": "603917"
},
{
"name": "Python",
"bytes": "111201"
},
{
"name": "Shell",
"bytes": "8463"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.hazmat.primitives import interfaces
def _verify_key_size(algorithm, key):
# Verify that the key size matches the expected key size
if len(key) * 8 not in algorithm.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}.".format(
len(key) * 8, algorithm.name
))
return key
@utils.register_interface(interfaces.BlockCipherAlgorithm)
@utils.register_interface(interfaces.CipherAlgorithm)
class AES(object):
name = "AES"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.BlockCipherAlgorithm)
@utils.register_interface(interfaces.CipherAlgorithm)
class Camellia(object):
name = "camellia"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.BlockCipherAlgorithm)
@utils.register_interface(interfaces.CipherAlgorithm)
class TripleDES(object):
name = "3DES"
block_size = 64
key_sizes = frozenset([64, 128, 192])
def __init__(self, key):
if len(key) == 8:
key += key + key
elif len(key) == 16:
key += key[:8]
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.BlockCipherAlgorithm)
@utils.register_interface(interfaces.CipherAlgorithm)
class Blowfish(object):
name = "Blowfish"
block_size = 64
key_sizes = frozenset(range(32, 449, 8))
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.BlockCipherAlgorithm)
@utils.register_interface(interfaces.CipherAlgorithm)
class CAST5(object):
name = "CAST5"
block_size = 64
key_sizes = frozenset(range(40, 129, 8))
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.CipherAlgorithm)
class ARC4(object):
name = "RC4"
key_sizes = frozenset([40, 56, 64, 80, 128, 192, 256])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.CipherAlgorithm)
class IDEA(object):
name = "IDEA"
block_size = 64
key_sizes = frozenset([128])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
@utils.register_interface(interfaces.BlockCipherAlgorithm)
@utils.register_interface(interfaces.CipherAlgorithm)
class SEED(object):
name = "SEED"
block_size = 128
key_sizes = frozenset([128])
def __init__(self, key):
self.key = _verify_key_size(self, key)
@property
def key_size(self):
return len(self.key) * 8
| {
"content_hash": "e31eb6081a049329d69fdb157b9927a9",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 66,
"avg_line_length": 24.992537313432837,
"alnum_prop": 0.6434756643774261,
"repo_name": "dstufft/cryptography",
"id": "bd8437c222aeafc6c60dec8f2f5c89f7f37df7e9",
"size": "3895",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "cryptography/hazmat/primitives/ciphers/algorithms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1108"
},
{
"name": "C++",
"bytes": "686"
},
{
"name": "Go",
"bytes": "4062"
},
{
"name": "Python",
"bytes": "941021"
},
{
"name": "Shell",
"bytes": "8542"
}
],
"symlink_target": ""
} |
"""This script handles command-line invocations of the package."""
import argparse
import logging
import os
import signal
import sys
from importlib.machinery import SourceFileLoader
from bidon.db.access import ModelAccess
from bidon.db.core import get_pg_core
from pypgq.queue import Cooperative, Queue, StopMode
STOP_SIGNALS = {
signal.SIGHUP: StopMode.when_all_done,
signal.SIGINT: StopMode.when_current_done,
signal.SIGTERM: StopMode.now
}
def main():
"""Entry point for module being run as main."""
args = parse_args()
if args is None:
return
if args.action == "schema":
print(get_ddl(args.schema_name, args.is_95))
elif args.action == "start":
start_queues(args)
elif args.action == "stop":
stop_queue(args.pid_filepath, args.stop_mode)
elif args.action == "status":
query_queue(args.pid_filepath)
else:
raise Exception("Unhandled subcommand: {}".format(args.action))
def parse_args():
"""Parses command line arguments."""
fcls = argparse.ArgumentDefaultsHelpFormatter
apar = argparse.ArgumentParser(prog="python3 -m pypgq", description="PyPGQ", formatter_class=fcls)
spar = apar.add_subparsers()
schema_cmd = spar.add_parser("schema", help="Write the PyPGQ DDL to STDOUT", formatter_class=fcls)
schema_cmd.add_argument("schema_name",
default="public",
nargs="?",
help="The name of the schema to contain the job tables")
schema_cmd.add_argument("--is-95",
default=False,
action="store_true",
help="If the Postgres version is 9.5+ pass this flag to enable some "
"features")
schema_cmd.set_defaults(action="schema")
start_cmd = spar.add_parser("start",
help="Run a job queue with jobs handlers assigned and logging "
"configured in the file at --jobfile-path",
formatter_class=fcls)
start_cmd.add_argument("-c",
"--connection-string",
required=True,
help="A postgres connection string for the database holding the job "
"tables")
start_cmd.add_argument("-f",
"--job-filepath",
required=True,
help="The path to a python script that will assign handlers to the queue. "
"If the path is a directory, then the directory's parent will be "
"added to sys.path, and path/__init__.py will be loaded")
start_cmd.add_argument("-a",
"--job-args",
help="A string argument to pass as the second parameter to the setup_jobs "
"function defined in job_filepath")
start_cmd.add_argument("-s",
"--schema-name",
default="public",
help="The name of the schema that holds the job tables")
start_cmd.add_argument("-q",
"--queues",
default=1,
type=int,
help="The number of queue processes to launch. Each queue beyond the "
"first will be launched in a child process")
start_cmd.add_argument("-w",
"--workers",
default=10,
type=int,
help="The number of workers to spawn for each queue. So if this value is "
"5 and you have 1 queue, there will be 5 workers total. If 2 queues "
"then 10 workers total, etc")
start_cmd.add_argument("-p",
"--pid-filepath",
default=None,
help="The file to write the PID of the main queue. This is needed if you "
"want to daemonize the queue and use the stop cmd")
start_cmd.add_argument("--schedule-frequency",
default=60,
type=int,
help="Time, in seconds, between checks to queue scheduled jobs")
start_cmd.add_argument("--coop-mode",
default=None,
choices={"none", "advisory_lock", "row_lock"},
help="The cooperative mode to run the queues in")
start_cmd.add_argument("--coop-arg",
default=None,
help="The arg for the cooperative mode. If the coop mode is advisory_lock "
"you need to provide either a big int, or a pair of integers "
"separated by a comma for the advisory lock key")
start_cmd.set_defaults(action="start")
stop_cmd = spar.add_parser("stop",
help="Stop a job queue whose pid is listed in pid-filepath",
formatter_class=fcls)
stop_cmd.add_argument("-p", "--pid-filepath", required=True)
stop_cmd.add_argument("-m",
"--stop-mode",
default="current",
choices={"all", "current", "now"})
stop_cmd.set_defaults(action="stop")
status_cmd = spar.add_parser("status",
help="Write the queue status to STDERR of the terminal that the "
"queue is attached to",
formatter_class=fcls)
status_cmd.add_argument("-p", "--pid-filepath", required=True)
status_cmd.set_defaults(action="status")
args = apar.parse_args()
if not hasattr(args, "action"):
apar.print_usage()
return None
return args
def get_ddl(schema_name, skip_locked=True):
"""Prints the DDL to STDOUT.
:schema_name: The schema to place the objects in
"""
path = os.path.dirname(__file__)
file = os.path.join(path, "ddl.sql")
params = dict(SCHEMA_NAME=schema_name, SKIP_LOCKED=" skip locked" if skip_locked else "")
with open(file, "r") as rf:
sql_fmt = rf.read()
return sql_fmt.format(**params)
def start_queues(args):
"""Starts multiple queues via forking.
:param args: command line args
"""
coop = parse_cooperative(args.coop_mode, args.coop_arg)
is_parent = True
children = []
qcount = args.queues
if args.pid_filepath:
write_pidfile(args.pid_filepath)
if coop.mode == Cooperative.none and qcount > 1:
raise Exception("You must provide a cooperative mode if launching "
"multiple queue processes")
while qcount > 1:
pid = os.fork()
if pid == 0:
is_parent = False
break
else:
children.append(pid)
qcount -= 1
start_queue(args.connection_string,
args.job_filepath,
args.schema_name,
args.workers,
coop,
args.schedule_frequency if is_parent else None,
args.job_args,
children if is_parent else None)
def start_queue(connection_string, job_filepath, schema_name="public", workers=10, cooperative=None,
schedule_frequency=None, job_args=None, children=None):
"""Creates, configures and runs a job queue.
:param connection_string: a Postgres connection string
:param job_filepath: the path to a python script that can configure the queue
:param schema_name: the name of the schema that contains the queue tables
:param workers: the number of concurrent workers to run
"""
model_access = ModelAccess(get_pg_core(connection_string),
search_path=schema_name)
model_access.open(autocommit=True)
queue = Queue(model_access,
worker_count=workers,
cooperative=cooperative,
schedule_frequency=schedule_frequency)
job_module = load_module(job_filepath, None)
job_module.setup_jobs(queue, job_args)
def stop(sig, _):
"""Stops the queue in the manner specified by the signal.
:param sig: the signal receieved
"""
queue.stop(stop_mode=STOP_SIGNALS[sig])
if children:
for pid in children:
os.kill(pid, sig)
try:
os.waitpid(pid, 0)
except ChildProcessError:
# Child already shut down before we started waiting on it.
pass
for sig in STOP_SIGNALS:
signal.signal(sig, stop)
signal.signal(signal.SIGINFO, lambda n, f: print(queue.status(), file=sys.stderr))
log_queue_info(job_filepath, workers)
queue.start()
def parse_cooperative(coop_mode, coop_arg):
"""Parses the coop command line args into a Cooperative class instance.
:param coop_mode: a string matchng one of the coop mode types
:param coop_arg: a string that can be parsed into a coop arg
"""
if coop_mode is None or coop_mode == "none":
mode = Cooperative.none
elif coop_mode == "advisory_lock":
mode = Cooperative.advisory_lock
elif coop_mode == "row_lock":
mode = Cooperative.row_lock
else:
mode = Cooperative.none
if coop_arg is None:
arg = None
else:
if coop_arg.find(",") >= 0:
key0, key1 = coop_arg.split(",", 1)
arg = (int(key0.strip()), int(key1.strip()))
else:
arg = int(coop_arg)
return Cooperative(mode, arg)
def stop_queue(pid_filepath, mode=None):
"""Sends a kill signal to the queue running under the pid found written in the
pid file.
:param pid_filepath: the path to the pid file
:param mode: the stop mode. one of: "now", "all", "current"
"""
pid = read_pidfile(pid_filepath)
if mode == "now":
sig = signal.SIGTERM
elif mode == "all":
sig = signal.SIGHUP
else:
sig = signal.SIGINT
os.kill(pid, sig)
os.remove(pid_filepath)
def query_queue(pid_filepath):
"""Sends the SIGINFO signal to the queue running under the pid found written
in the pid file.
:param pid_filepath: the path to the pid file
"""
pid = read_pidfile(pid_filepath)
os.kill(pid, signal.SIGINFO)
def log_queue_info(job_filepath, workers):
"""Writes startup info about the queue to the queue log."""
pid = os.getpid()
log("Queue config:\n"
" pid: {pid}\n"
" job_file: {job_file}\n"
" workers: {workers}".format(
pid=pid,
job_file=job_filepath,
workers=workers))
log("To stop or query the queue, send a signal:\n"
" kill -HUP {pid} #stop the queue when there are no more waiting jobs\n"
" kill -INT {pid} #stop the queue when the currently running jobs are done\n"
" kill -TERM {pid} #cancel all running jobs and stop the queue asap\n"
" kill -INFO {pid} #print the current queue status to the queue process' STDERR".format(
pid=pid))
def write_pidfile(filepath):
"""Writes the process pid to the filepath. If the file exists an error is
raised.
:param filepath: the path to write the pid
"""
if os.path.exists(filepath):
raise Exception("Unable to write pid file. It already exists")
with open(filepath, "w") as wf:
wf.write(str(os.getpid()))
def read_pidfile(filepath):
"""Reads the pid from the contents of the file at filepath.
:param filepath: the path to the file
"""
with open(filepath, "r") as rf:
return int(rf.read().strip())
def load_module(path, name):
"""Returns a loaded module from path. If path is a direectory, then the parent directory is added
to the path, and name/__init__.py is loaded and that module returned.
:param path: the path to the directory containing the init file or to a python file
:param name: the name to assign the module
"""
if os.path.isdir(path):
mod_root, mod_name = os.path.split(path)
filename = os.path.join(path, "__init__.py")
sys.path.insert(0, mod_root)
return SourceFileLoader(name or mod_name, filename).load_module()
else:
return SourceFileLoader(name or "__loaded_module__", path).load_module()
def log(msg):
"""Writes a message to the queue log."""
logging.getLogger("pypgq").info(msg)
if __name__ == "__main__":
main()
| {
"content_hash": "c2da1fa672c609bcb005dbe831b9f9d6",
"timestamp": "",
"source": "github",
"line_count": 356,
"max_line_length": 100,
"avg_line_length": 34.27247191011236,
"alnum_prop": 0.5902794852880912,
"repo_name": "treycucco/pypgqueue",
"id": "8c38b781a368608ba8f5729191266180ac6d597e",
"size": "12201",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pypgq/__main__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "15980"
}
],
"symlink_target": ""
} |
import time
import random
from dataclasses import dataclass, field
from typing import Optional
from mephisto.abstractions.database import MephistoDB
from mephisto.abstractions.providers.mturk.mturk_agent import MTurkAgent
from mephisto.abstractions.providers.mturk.utils.script_utils import (
direct_soft_block_mturk_workers,
)
from mephisto.operations.hydra_config import RunScriptConfig
from mephisto.operations.operator import Operator
from mephisto.tools.scripts import load_db_and_process_config
from omegaconf import DictConfig, OmegaConf, MISSING
from parlai.crowdsourcing.utils.frontend import build_task
@dataclass
class MTurkConfig:
"""
Add MTurk-specific flags.
"""
worker_blocklist_paths: Optional[str] = field(
default=MISSING,
metadata={
"help": (
'Path(s) to a list of IDs of workers to soft-block, separated by newlines. Use commas to indicate multiple lists'
)
},
)
@dataclass
class MTurkRunScriptConfigMixin:
"""
Add useful flags for running MTurk tasks.
"""
current_time: int = int(time.time()) # For parametrizing block_qualification
mturk: MTurkConfig = MTurkConfig()
@dataclass
class MTurkRunScriptConfig(MTurkRunScriptConfigMixin, RunScriptConfig):
"""
Add useful flags for running MTurk tasks.
Use this instead of MTurkRunScriptConfigMixin when there are no task-specific fields
that need to be set in the script config.
"""
def get_mturk_id_from_mephisto_wrapper(agent):
"""
Returns the MTurk worker ID from a ParlAI-Wrapped Mephisto Agent.
"""
if not isinstance(agent, MTurkAgent):
return f"--NOT-MTURK-AGENT-{agent.mephisto_agent.get_worker().worker_name}"
return agent.mephisto_agent.get_worker().get_mturk_worker_id()
def soft_block_mturk_workers(
cfg: DictConfig, db: MephistoDB, soft_block_qual_name: str
):
"""
Soft-block all MTurk workers listed in the input paths.
"""
if cfg.mephisto.provider.get('_provider_type', 'mock') == 'mturk':
if cfg.mturk.get('worker_blocklist_paths', None) is None:
print(
'Skipping soft-blocking workers because no blocklist path(s) are given.'
)
else:
blocklist_paths = cfg.mturk.worker_blocklist_paths.split(',')
worker_blocklist = set()
for path in blocklist_paths:
with open(path) as f:
worker_blocklist |= set(f.read().strip().split('\n'))
print(
f'About to soft-block {len(worker_blocklist):d} workers by '
f'giving them the qualification "{soft_block_qual_name}".'
)
direct_soft_block_mturk_workers(
db=db,
worker_list=list(worker_blocklist),
soft_block_qual_name=soft_block_qual_name,
requester_name=cfg.mephisto.provider.get("requester_name", None),
)
def run_static_task(cfg: DictConfig, task_directory: str, task_id: str):
"""
Run static task, given configuration.
"""
db, cfg = load_db_and_process_config(cfg)
print(f'\nHydra config:\n{OmegaConf.to_yaml(cfg)}')
random.seed(42)
task_name = cfg.mephisto.task.get('task_name', task_id)
soft_block_qual_name = cfg.mephisto.blueprint.get(
'block_qualification', f'{task_name}_block'
)
# Default to a task-specific name to avoid soft-block collisions
soft_block_mturk_workers(cfg=cfg, db=db, soft_block_qual_name=soft_block_qual_name)
build_task(task_directory)
operator = Operator(db)
operator.validate_and_run_config(run_config=cfg.mephisto, shared_state=None)
operator.wait_for_runs_then_shutdown(
skip_input=True, log_rate=cfg.monitoring_log_rate
)
| {
"content_hash": "72e28a5c0184e9f5f0995bbd946d5eb4",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 129,
"avg_line_length": 32.96551724137931,
"alnum_prop": 0.6626569037656904,
"repo_name": "facebookresearch/ParlAI",
"id": "a9a28457599c62edc4f40b18276501e202bedf91",
"size": "4024",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "parlai/crowdsourcing/utils/mturk.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "2000"
},
{
"name": "CSS",
"bytes": "38474"
},
{
"name": "Cuda",
"bytes": "4118"
},
{
"name": "Dockerfile",
"bytes": "1218"
},
{
"name": "HTML",
"bytes": "645771"
},
{
"name": "JavaScript",
"bytes": "405110"
},
{
"name": "Makefile",
"bytes": "289"
},
{
"name": "Python",
"bytes": "6802410"
},
{
"name": "Shell",
"bytes": "26147"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.