code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from model.contact import Contact
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of groups", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "date\\contacts.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def random_number(prefix, maxlen):
symbols = string.punctuation + "" + string.digits*40
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Contact(Firstname=random_string("First", 10), Middlename=random_string("Middle", 10),
Lastname=random_string("Last", 10),
Nickname=random_string("Nick", 8), Title=random_string("Title", 15),
Company=random_string("Company", 20),
Address=random_string("Address", 40), Home=random_number("", 8), Mobile=random_number("", 10),
Work=random_number("", 10), Fax=random_number("", 10), Email=random_string("mail@", 15),
Email2=random_string("mail2@", 15), Email3=random_string("mail", 15),
Homepage=random_string("www.", 15),
Bday="3", Bmonth="March", Byear="1979",
Aday="10", Amonth="September", Ayear="2017",
Address2=random_string("Address", 40), Phone2=random_number("", 12),
Notes=random_string("Address", 70))
for i in range(n)]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata))
|
IrinaZI/Python_training
|
generator/contact.py
|
Python
|
apache-2.0
| 2,034
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, Text, DateTime, MetaData
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
chassis = Table('chassis', meta, autoload=True)
chassis.create_column(Column('extra', Text))
chassis.create_column(Column('created_at', DateTime))
chassis.create_column(Column('updated_at', DateTime))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 007 is unsupported.')
|
citrix-openstack-build/ironic
|
ironic/db/sqlalchemy/migrate_repo/versions/007_add_extra_created_updated_to_chassis.py
|
Python
|
apache-2.0
| 1,120
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from abc import abstractmethod
class ProcessHandler(object):
"""A simple class to abstract process handling calls using the same interface as subprocess.Popen.
See SubprocessProcessHandler below for an example.
"""
@abstractmethod
def wait(self):
raise NotImplementedError
@abstractmethod
def kill(self):
raise NotImplementedError
@abstractmethod
def terminate(self):
raise NotImplementedError
class SubprocessProcessHandler(ProcessHandler):
"""The simple passthrough class for a subprocess.Popen object."""
def __init__(self, process):
self._process = process
def wait(self):
return self._process.wait()
def kill(self):
return self._process.kill()
def terminate(self):
return self._process.terminate()
|
dturner-tw/pants
|
src/python/pants/util/process_handler.py
|
Python
|
apache-2.0
| 1,071
|
# fixfastq.py
# runs through a fastq file and throws out all reads that don't make sense
# Usage: nohup cat ddx5_ActD_R1_test.fastq | parallel -j 8 --pipe -L3000 python fixfastq.py > ddx5_ActD_R1_test_ofn.fastq &
import fileinput
def writeLines(lines):
if len(lines[1]) != len(lines[3]): return
for l in lines:
print l.rstrip()
lines = []
counter = 0
goodFlag = False
for line in fileinput.input():
counter += 1
if counter % 4 == 1:
if line[0] == '@':
if lines != [] and goodFlag: writeLines(lines)
lines = [line]
goodFlag = True
else:
lines = []
goodFlag = False
counter -= 1
else:
lines.append(line)
if len(lines) == 4: writeLines(lines)
|
bdo311/chirpseq-analysis
|
fixfastq.py
|
Python
|
apache-2.0
| 716
|
# python3
# pylint: disable=g-bad-file-header
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Thompson sampling evaluation of ENN agent on bandit task."""
import functools
from typing import Dict, Optional, Tuple
from acme.utils import loggers
import chex
from enn import base as enn_base
from enn import losses
from enn import utils
import haiku as hk
import jax
import jax.numpy as jnp
from neural_testbed import agents
from neural_testbed import base as testbed_base
from neural_testbed import generative
from neural_testbed.bandit import replay
import optax
class ThompsonEnnBandit:
"""Experiment of Thompson sampling bandit."""
def __init__(
self,
enn_config: agents.VanillaEnnConfig,
input_dim: int,
num_actions: int,
temperature: float = 1,
steps_per_obs: int = 1,
logger: Optional[loggers.Logger] = None,
batch_size: int = 128,
l2_weight_decay: float = 1,
replay_capacity: int = 10_000,
learning_rate: float = 1e-3,
seed: int = 0,
):
"""Initialize a Thompson Sampling experiment."""
# Initializing the agent internals
prior = testbed_base.PriorKnowledge(
input_dim=input_dim,
num_train=100,
num_classes=2,
tau=1,
layers=2,
temperature=temperature,
)
self.enn = enn_config.enn_ctor(prior)
loss_fn = enn_config.loss_ctor(prior, self.enn)
loss_fn = functools.partial(loss_fn, self.enn)
def predicate(module_name: str, name: str, value) -> bool:
del name, value
return 'prior' not in module_name
def loss_with_decay(
params: hk.Params,
batch: enn_base.Batch,
key: enn_base.RngKey) -> Tuple[enn_base.Array, enn_base.LossMetrics]:
# Adding annealing l2 weight decay manually
data_loss, metrics = loss_fn(params, batch, key)
l2_weight = losses.l2_weights_with_predicate(params, predicate)
metrics['l2_weight'] = l2_weight
decay_loss = l2_weight_decay * l2_weight / batch.extra['num_steps']
return data_loss + decay_loss, metrics
self._loss_with_decay = jax.jit(loss_with_decay)
optimizer = optax.adam(learning_rate)
# Forward network at random index
def forward(params: hk.Params,
inputs: enn_base.Array,
key: enn_base.RngKey) -> enn_base.Array:
index = self.enn.indexer(key)
return self.enn.apply(params, inputs, index)
self._forward = jax.jit(forward)
# Perform an SGD step on a batch of data
def sgd_step(
params: hk.Params,
opt_state: optax.OptState,
batch: enn_base.Batch,
key: enn_base.RngKey,
) -> Tuple[hk.Params, optax.OptState]:
grads, _ = jax.grad(loss_with_decay, has_aux=True)(params, batch, key)
updates, new_opt_state = optimizer.update(grads, opt_state)
new_params = optax.apply_updates(params, updates)
return new_params, new_opt_state
self._sgd_step = jax.jit(sgd_step)
# Generating the underlying function
self.rng = hk.PRNGSequence(seed)
self.actions = jax.random.normal(next(self.rng), [num_actions, input_dim])
logit_fn = generative.make_2layer_mlp_logit_fn(
input_dim=input_dim,
temperature=temperature,
hidden=50,
num_classes=2,
key=next(self.rng),
)
logits = logit_fn(self.actions)
# Vector of probabilities of rewards for each action
self.probs = jax.nn.softmax(logits)[:, 1]
chex.assert_shape(self.probs, [num_actions])
self.max_prob = jnp.max(self.probs)
# Initializing the network
index = self.enn.indexer(next(self.rng))
self.params = self.enn.init(next(self.rng), self.actions, index)
self.opt_state = optimizer.init(self.params)
self._steps_per_obs = steps_per_obs
self._temperature = temperature
self._batch_size = batch_size
self.l2_weight_decay = l2_weight_decay
self.replay = replay.Replay(capacity=replay_capacity)
self.logger = (
logger or loggers.make_default_logger('experiment', time_delta=0))
self.num_steps = 0
self.total_regret = 0
def select_action(params: hk.Params,
key: enn_base.RngKey) -> Dict[str, enn_base.Array]:
net_key, noise_key, selection_key = jax.random.split(key, 3)
net_out = forward(params, self.actions, net_key)
logits = utils.parse_net_output(net_out)
probs = jax.nn.softmax(logits)[:, 1]
action = _random_argmax(probs, selection_key)
chosen_prob = self.probs[action]
reward = jax.random.bernoulli(noise_key, chosen_prob)
regret = self.max_prob - chosen_prob
return {
'action': action,
'reward': reward,
'regret': regret,
'chosen_prob': chosen_prob, # for debugging
}
self._select_action = jax.jit(select_action)
def run(self, num_steps: int, log_freq: int = 1):
"""Run a TS experiment for num_steps."""
for _ in range(num_steps):
self.num_steps += 1
regret = self.step()
self.total_regret += regret
if self.num_steps % log_freq == 0:
self.logger.write({
'total_regret': self.total_regret,
't': self.num_steps,
'ave_regret': self.total_regret / self.num_steps,
'regret': regret,
})
for _ in range(self._steps_per_obs):
if self.num_steps >= 1:
self.params, self.opt_state = self._sgd_step(
self.params, self.opt_state, self._get_batch(), next(self.rng))
def step(self) -> float:
"""Select action, update replay and return the regret."""
results = self._select_action(self.params, next(self.rng))
self.replay.add([
self.actions[results['action']],
jnp.ones([1]) * results['reward'],
jnp.ones([1], dtype=jnp.int64) * self.num_steps,
])
return float(results['regret'])
def _get_batch(self) -> enn_base.Batch:
actions, rewards, indices = self.replay.sample(self._batch_size)
return enn_base.Batch(
actions, rewards, indices, extra={'num_steps': self.num_steps})
def _random_argmax(vals: chex.Array,
key: chex.PRNGKey,
scale: float = 1e-7) -> int:
"""Select argmax with additional random noise."""
noise = jax.random.uniform(key, vals.shape)
return jnp.argmax(vals + scale * noise, axis=0)
|
deepmind/neural_testbed
|
neural_testbed/bandit/thompson.py
|
Python
|
apache-2.0
| 7,000
|
# Copyright 2019 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
from collections import defaultdict
from ..const import SheetStyle
from .column_rsheet import ColumnRSheet
from .row_rsheet import RowRSheet
from .json_rsheet import JSONRSheet
render_class = {
SheetStyle.columns: ColumnRSheet,
SheetStyle.rows: RowRSheet,
SheetStyle.json: JSONRSheet,
}
use_json = False
def set_style_json(value=True):
global use_json
use_json = value
def get_style_json():
global use_json
return use_json
def render(
sheet,
title,
data_source,
style=None,
common=None,
description=None,
selectors=None,
title_repeat=False,
disable_aggregations=False,
dynamic_diff=False,
):
"""
Arguments:
sheet -- The decleration.sheet to render.
title -- Title for this render.
data_source -- Dictionary of data_sources to project fields from.
Keyword Arguments:
style -- 'SheetStyle.columns': Output fields as columns.
'SheetStyle.rows' : Output fields as rows.
'SheetStyle.json' : Output sheet as JSON.
common -- A dict of common information passed to each entry.
description -- A description of the sheet.
selectors -- List of regular expressions to select which fields from
dynamic fields.
title_repeat -- Repeat title and row headers every n columns.
disable_aggregations -- Disable sheet aggregations.
dynamic_diff -- Only show dynamic fields that aren't uniform.
"""
tcommon = defaultdict(lambda: None)
if common is not None:
tcommon.update(common)
assert set(sheet.from_sources) - set(data_source.keys()) == set()
if use_json:
style = SheetStyle.json
elif style is None:
style = sheet.default_style
return render_class[style](
sheet,
title,
data_source,
tcommon,
description=description,
selectors=selectors,
title_repeat=title_repeat,
disable_aggregations=disable_aggregations,
dynamic_diff=dynamic_diff,
).render()
|
aerospike/aerospike-admin
|
lib/view/sheet/render/__init__.py
|
Python
|
apache-2.0
| 2,681
|
from setuptools import setup, find_packages
setup(
name='mzident_writer',
version='0.0.5',
packages=find_packages(),
install_requires=["lxml"]
)
|
mobiusklein/mzidentml_writer
|
setup.py
|
Python
|
apache-2.0
| 174
|
#MenuTitle: Set Subscript and Superscript Parameters
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Measures your superior and inferior figures and derives subscript/superscript X/Y offset/size parameters.
"""
import vanilla, math
from Foundation import NSPoint
class CalculateSubscriptAndSuperscriptParameters( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 350
windowHeight = 170
windowWidthResize = 100 # user can resize width by this value
windowHeightResize = 0 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Set Subscript and Superscript Parameters", # window title
minSize = ( windowWidth, windowHeight ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.CalculateSubscriptAndSuperscriptParameters.mainwindow" # stores last window position and size
)
# UI elements:
linePos, inset, lineHeight = 12, 15, 22
self.w.descriptionText = vanilla.TextBox( (inset, linePos+2, -inset, 14), "Calculate custom parameters in Font Info > Masters:", sizeStyle='small', selectable=True )
linePos += lineHeight
self.w.subscriptCheck = vanilla.CheckBox( (inset, linePos, 80, 20), "Subscript:", value=True, callback=self.SavePreferences, sizeStyle='small' )
self.w.subscriptSample = vanilla.EditText( (inset+85, linePos, -inset-175, 19), "oneinferior", callback=self.SavePreferences, sizeStyle='small' )
self.w.subscriptReferenceText = vanilla.TextBox( (-inset-170, linePos+3, -inset-95, 14), "in relation to:", sizeStyle='small', selectable=True )
self.w.subscriptReference = vanilla.EditText( (-inset-95, linePos, -inset-25, 19), "one", callback=self.SavePreferences, sizeStyle='small' )
self.w.subscriptReset = vanilla.SquareButton( (-inset-20, linePos+0.5, -inset, 18), "↺", sizeStyle='small', callback=self.resetValues )
# tooltips:
tooltip = "If enabled, will calculate: subscriptXOffsetName, subscriptYOffsetName, subscriptXSizeName, subscriptYSizeName. The subscript glyph (on the left) will be measured in relation to the reference glyph on the right; offset and size scale will be computed from their differences."
self.w.subscriptSample.getNSTextField().setToolTip_(tooltip)
self.w.subscriptReference.getNSTextField().setToolTip_(tooltip)
self.w.subscriptCheck.getNSButton().setToolTip_(tooltip)
self.w.subscriptReferenceText.getNSTextField().setToolTip_(tooltip)
self.w.subscriptReset.getNSButton().setToolTip_("Resets the subscript reference glyphs to oneinferior vs. one.")
linePos += lineHeight
self.w.superscriptCheck = vanilla.CheckBox( (inset, linePos, 80, 20), "Superscript:", value=True, callback=self.SavePreferences, sizeStyle='small' )
self.w.superscriptSample = vanilla.EditText( (inset+85, linePos, -inset-175, 19), "onesuperior", callback=self.SavePreferences, sizeStyle='small' )
self.w.superscriptReferenceText = vanilla.TextBox( (-inset-170, linePos+3, -inset-95, 14), "in relation to:", sizeStyle='small', selectable=True )
self.w.superscriptReference = vanilla.EditText( (-inset-95, linePos, -inset-25, 19), "one", callback=self.SavePreferences, sizeStyle='small' )
self.w.superscriptReset = vanilla.SquareButton( (-inset-20, linePos, -inset, 18), "↺", sizeStyle='small', callback=self.resetValues )
# tooltips:
tooltip = "If enabled, will calculate: superscriptXOffsetName, superscriptYOffsetName, superscriptXSizeName, superscriptYSizeName. The superscript glyph (on the left) will be measured in relation to the reference glyph on the right; offset and size scale will be computed from their differences."
self.w.superscriptSample.getNSTextField().setToolTip_(tooltip)
self.w.superscriptReference.getNSTextField().setToolTip_(tooltip)
self.w.superscriptCheck.getNSButton().setToolTip_(tooltip)
self.w.superscriptReferenceText.getNSTextField().setToolTip_(tooltip)
self.w.superscriptReset.getNSButton().setToolTip_("Resets the superscript reference glyphs to onesuperior vs. one.")
linePos += lineHeight
self.w.roundValues = vanilla.CheckBox( (inset, linePos, 130, 20), "Round all values by:", value=False, callback=self.SavePreferences, sizeStyle='small' )
self.w.roundBy = vanilla.EditText( (inset+130, linePos, 50, 19), "10", callback=self.SavePreferences, sizeStyle='small' )
self.w.xSizeEqualsYSize = vanilla.CheckBox( (inset+200, linePos, -inset, 20), "xSize=ySize", value=False, callback=self.SavePreferences, sizeStyle='small' )
# tooltips:
tooltip = "If enabled, will round all calculated values by the given amount. Recommended: 5 or 10."
self.w.roundValues.getNSButton().setToolTip_(tooltip)
self.w.roundBy.getNSTextField().setToolTip_(tooltip)
self.w.xSizeEqualsYSize.getNSButton().setToolTip_("If enabled, will set the horizontal scale to the same value as the vertical scale, ensuring a proportional scale. Especially useful for italics.")
linePos += lineHeight
self.w.syncWithFirstMaster = vanilla.CheckBox( (inset, linePos, -inset, 20), "Sync all values with first master", value=False, callback=self.SavePreferences, sizeStyle='small' )
self.w.syncWithFirstMaster.getNSButton().setToolTip_("If enabled, will insert the same values in all masters.")
linePos += lineHeight
# Run Button:
self.w.runButton = vanilla.Button( (-100-inset, -20-inset, -inset, -inset), "Insert", sizeStyle='regular', callback=self.CalculateSubscriptAndSuperscriptParametersMain )
self.w.setDefaultButton( self.w.runButton )
# Load Settings:
if not self.LoadPreferences():
print("Note: 'Calculate Subscript and Superscript Parameters' could not load preferences. Will resort to defaults")
# Open window and focus on it:
self.updateUI()
self.w.open()
self.w.makeKey()
def resetValues(self, sender=None):
if sender == self.w.subscriptReset:
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptSample"] = "oneinferior"
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptReference"] = "one"
if sender == self.w.superscriptReset:
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptSample"] = "onesuperior"
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptReference"] = "one"
self.LoadPreferences()
def updateUI(self, sender=None):
self.w.runButton.enable(self.w.superscriptCheck.get() or self.w.subscriptCheck.get())
self.w.roundBy.enable(self.w.roundValues.get())
def SavePreferences( self, sender=None ):
try:
# write current settings into prefs:
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptCheck"] = self.w.subscriptCheck.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptSample"] = self.w.subscriptSample.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptReference"] = self.w.subscriptReference.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptCheck"] = self.w.superscriptCheck.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptSample"] = self.w.superscriptSample.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptReference"] = self.w.superscriptReference.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundValues"] = self.w.roundValues.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundBy"] = self.w.roundBy.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.xSizeEqualsYSize"] = self.w.xSizeEqualsYSize.get()
Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.syncWithFirstMaster"] = self.w.syncWithFirstMaster.get()
self.updateUI()
return True
except:
import traceback
print(traceback.format_exc())
return False
def LoadPreferences( self ):
try:
# register defaults:
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptCheck", 1)
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptSample", "oneinferior")
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptReference", "one")
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptCheck", 1)
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptSample", "onesuperior")
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptReference", "one")
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundValues", 0)
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundBy", 10)
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.xSizeEqualsYSize", 0)
Glyphs.registerDefault("com.mekkablue.CalculateSubscriptAndSuperscriptParameters.syncWithFirstMaster", 0)
# load previously written prefs:
self.w.subscriptCheck.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptCheck"] )
self.w.subscriptSample.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptSample"] )
self.w.subscriptReference.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.subscriptReference"] )
self.w.superscriptCheck.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptCheck"] )
self.w.superscriptSample.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptSample"] )
self.w.superscriptReference.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.superscriptReference"] )
self.w.roundValues.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundValues"] )
self.w.roundBy.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundBy"] )
self.w.xSizeEqualsYSize.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.xSizeEqualsYSize"] )
self.w.syncWithFirstMaster.set( Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.syncWithFirstMaster"] )
self.updateUI()
return True
except:
import traceback
print(traceback.format_exc())
return False
def roundByFactor(self, number, roundFactor):
if roundFactor > 1:
remainder = (number%roundFactor)
floor = number//roundFactor
roundUpOrDown = int(round(1.0*remainder/roundFactor)) # 0 or 1
number = (floor+roundUpOrDown) * roundFactor
return int(number)
def italicOffset( self, y, italicAngle=0.0, pivotalY=0.0 ):
yOffset = y - pivotalY # calculate vertical offset
italicAngle = math.radians( italicAngle ) # convert to radians
tangens = math.tan( italicAngle ) # math.tan needs radians
horizontalDeviance = tangens * yOffset # vertical distance from pivotal point
return int(horizontalDeviance)
def CalculateSubscriptAndSuperscriptParametersMain( self, sender=None ):
try:
# clear macro window log:
Glyphs.clearLog()
# update settings to the latest user input:
if not self.SavePreferences():
print("Note: 'Calculate Subscript and Superscript Parameters' could not write preferences.")
thisFont = Glyphs.font # frontmost font
if thisFont is None:
Message(title="No Font Open", message="The script requires a font. Open a font and run the script again.", OKButton=None)
else:
print("Calculate Subscript and Superscript Parameters Report for %s" % thisFont.familyName)
if thisFont.filepath:
print(thisFont.filepath)
else:
print("⚠️ The font file has not been saved yet.")
print()
syncWithFirstMaster = Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.syncWithFirstMaster"]
xSizeEqualsYSize = Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.xSizeEqualsYSize"]
if Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundValues"]:
roundFactor = max(1,int(Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.roundBy"]))
print("\nRounding all values by %i"%roundFactor)
else:
roundFactor = 1
for prefix in ("sub","super"):
check = Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.%sscriptCheck" % prefix]
sample = Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.%sscriptSample" % prefix]
reference = Glyphs.defaults["com.mekkablue.CalculateSubscriptAndSuperscriptParameters.%sscriptReference" % prefix]
xOffsetName = "%sscriptXOffset" % prefix
yOffsetName = "%sscriptYOffset" % prefix
xSizeName = "%sscriptXSize" % prefix
ySizeName = "%sscriptYSize" % prefix
upm = thisFont.upm
sampleGlyph = thisFont.glyphs[sample]
referenceGlyph = thisFont.glyphs[reference]
if not sampleGlyph:
print("❌ Sample glyph ‘%s’ not in font. Aborting %sscript calculation." % (sample,prefix))
elif not referenceGlyph:
print("❌ Reference glyph ‘%s’ not in font. Aborting %sscript calculation." % (reference,prefix))
else:
for i,thisMaster in enumerate(thisFont.masters):
if i==0 or not syncWithFirstMaster:
sampleLayer = sampleGlyph.layers[thisMaster.id]
sampleBottom = sampleLayer.bounds.origin.y
sampleLeft = sampleLayer.bounds.origin.x
sampleWidth = sampleLayer.bounds.size.width
sampleHeight = sampleLayer.bounds.size.height
referenceLayer = referenceGlyph.layers[thisMaster.id]
referenceBottom = referenceLayer.bounds.origin.y
referenceLeft = referenceLayer.bounds.origin.x
referenceWidth = referenceLayer.bounds.size.width
referenceHeight = referenceLayer.bounds.size.height
italicAngle = thisMaster.italicAngle
ySize = upm * sampleHeight / referenceHeight
xSize = ySize if xSizeEqualsYSize else upm*sampleWidth/referenceWidth
yOffset = sampleBottom - referenceBottom
xOffset = self.italicOffset(yOffset, italicAngle=italicAngle) if italicAngle!=0.0 else 0
thisMaster.customParameters[xOffsetName] = self.roundByFactor( int(xOffset), roundFactor )
thisMaster.customParameters[yOffsetName] = self.roundByFactor( int(yOffset), roundFactor )
thisMaster.customParameters[xSizeName] = self.roundByFactor( int(xSize), roundFactor )
thisMaster.customParameters[ySizeName] = self.roundByFactor( int(ySize), roundFactor )
print("\n✅ Master %s:"%thisMaster.name)
print(" 🔢 %s: %i" % (xOffsetName, xOffset) )
print(" 🔢 %s: %i" % (yOffsetName, yOffset) )
print(" 🔢 %s: %i" % (xSizeName, xSize) )
print(" 🔢 %s: %i" % (ySizeName, ySize) )
print("\nDone.")
# Final report:
Glyphs.showNotification(
"%s: Done" % (thisFont.familyName),
"Calculate Subscript and Superscript Parameters is finished. Details in Macro Window",
)
print("\nDone.")
except Exception as e:
# brings macro window to front and reports error:
Glyphs.showMacroWindow()
print("Calculate Subscript and Superscript Parameters Error: %s" % e)
import traceback
print(traceback.format_exc())
CalculateSubscriptAndSuperscriptParameters()
|
mekkablue/Glyphs-Scripts
|
Font Info/Set Subscript and Superscript Parameters.py
|
Python
|
apache-2.0
| 15,776
|
import unittest
import simple_db_migrate
import os
import sys
from StringIO import StringIO
from mock import patch, Mock
class RunTest(unittest.TestCase):
def setUp(self):
config_file = '''
DATABASE_HOST = os.getenv('DB_HOST') or 'localhost'
DATABASE_USER = os.getenv('DB_USERNAME') or 'root'
DATABASE_PASSWORD = os.getenv('DB_PASSWORD') or ''
DATABASE_NAME = os.getenv('DB_DATABASE') or 'migration_example'
ENV1_DATABASE_NAME = 'migration_example_env1'
DATABASE_MIGRATIONS_DIR = os.getenv('DATABASE_MIGRATIONS_DIR') or 'example'
UTC_TIMESTAMP = os.getenv("UTC_TIMESTAMP") or True
DATABASE_ANY_CUSTOM_VARIABLE = 'Some Value'
SOME_ENV_DATABASE_ANY_CUSTOM_VARIABLE = 'Other Value'
DATABASE_OTHER_CUSTOM_VARIABLE = 'Value'
'''
f = open('sample.conf', 'w')
f.write(config_file)
f.close()
self.stdout_mock = patch('sys.stdout', new_callable=StringIO)
self.stdout_mock.start()
def tearDown(self):
os.remove('sample.conf')
if os.path.exists('simple-db-migrate.conf'):
os.remove('simple-db-migrate.conf')
self.stdout_mock.stop()
@patch('codecs.getwriter')
@patch('sys.stdout', encoding='iso-8859-1')
def test_it_should_ensure_stdout_is_using_an_utf8_encoding(self, stdout_mock, codecs_mock):
new_stdout = Mock()
codecs_mock.return_value = Mock(**{'return_value':new_stdout})
reload(simple_db_migrate)
codecs_mock.assert_called_with('utf-8')
self.assertEqual(new_stdout, sys.stdout)
@patch('sys.stdout', new_callable=object)
def test_it_should_not_break_when_sys_stdout_has_not_encoding_property(self, stdout_mock):
reload(simple_db_migrate)
self.assertIs(stdout_mock, sys.stdout)
def test_it_should_define_a_version_string(self):
self.assertTrue(isinstance(simple_db_migrate.SIMPLE_DB_MIGRATE_VERSION, str))
@patch('simple_db_migrate.cli.CLI.parse')
def test_it_should_use_cli_to_parse_arguments(self, parse_mock):
parse_mock.return_value = (Mock(simple_db_migrate_version=True), [])
try:
simple_db_migrate.run_from_argv()
except SystemExit:
pass
parse_mock.assert_called_with(None)
def test_it_should_print_simple_db_migrate_version_and_exit(self):
try:
simple_db_migrate.run_from_argv(["-v"])
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual('simple-db-migrate v%s\n\n' % simple_db_migrate.SIMPLE_DB_MIGRATE_VERSION, sys.stdout.getvalue())
@patch('simple_db_migrate.cli.CLI.show_colors')
def test_it_should_activate_use_of_colors(self, show_colors_mock):
try:
simple_db_migrate.run_from_argv(["--color"])
except SystemExit:
pass
self.assertEqual(1, show_colors_mock.call_count)
@patch('simple_db_migrate.cli.CLI.show_colors')
def test_it_should_print_message_and_exit_when_user_interrupt_execution(self, show_colors_mock):
show_colors_mock.side_effect = KeyboardInterrupt()
try:
simple_db_migrate.run_from_argv(["--color"])
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual('\nExecution interrupted by user...\n\n', sys.stdout.getvalue())
@patch('simple_db_migrate.cli.CLI.show_colors')
def test_it_should_print_message_and_exit_when_user_an_error_happen(self, show_colors_mock):
show_colors_mock.side_effect = Exception('occur an error')
try:
simple_db_migrate.run_from_argv(["--color"])
except SystemExit as e:
self.assertEqual(1, e.code)
self.assertEqual('[ERROR] occur an error\n\n', sys.stdout.getvalue())
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
@patch.object(simple_db_migrate.helpers.Utils, 'get_variables_from_file', return_value = {'DATABASE_HOST':'host', 'DATABASE_PORT':'1234', 'DATABASE_USER': 'root', 'DATABASE_PASSWORD':'', 'DATABASE_NAME':'database', 'DATABASE_MIGRATIONS_DIR':'.'})
def test_it_should_read_configuration_file_using_fileconfig_class_and_execute_with_default_configuration(self, get_variables_from_file_mock, main_mock, execute_mock):
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf')])
get_variables_from_file_mock.assert_called_with(os.path.abspath('sample.conf'))
self.assertEqual(1, execute_mock.call_count)
execute_mock.assert_called_with()
self.assertEqual(1, main_mock.call_count)
config_used = main_mock.call_args[0][0]
self.assertTrue(isinstance(config_used, simple_db_migrate.config.FileConfig))
self.assertEqual('mysql', config_used.get('database_engine'))
self.assertEqual('root', config_used.get('database_user'))
self.assertEqual('', config_used.get('database_password'))
self.assertEqual('database', config_used.get('database_name'))
self.assertEqual('host', config_used.get('database_host'))
self.assertEqual(1234, config_used.get('database_port'))
self.assertEqual(False, config_used.get('utc_timestamp'))
self.assertEqual('__db_version__', config_used.get('database_version_table'))
self.assertEqual([os.path.abspath('.')], config_used.get("database_migrations_dir"))
self.assertEqual(None, config_used.get('schema_version'))
self.assertEqual(False, config_used.get('show_sql'))
self.assertEqual(False, config_used.get('show_sql_only'))
self.assertEqual(None, config_used.get('new_migration'))
self.assertEqual(False, config_used.get('drop_db_first'))
self.assertEqual(False, config_used.get('paused_mode'))
self.assertEqual(None, config_used.get('log_dir'))
self.assertEqual(None, config_used.get('label_version'))
self.assertEqual(False, config_used.get('force_use_files_on_down'))
self.assertEqual(False, config_used.get('force_execute_old_migrations_versions'))
self.assertEqual(1, config_used.get('log_level'))
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
def test_it_should_get_configuration_exclusively_from_args_if_not_use_configuration_file_using_config_class_and_execute_with_default_configuration(self, main_mock, execute_mock):
simple_db_migrate.run_from_argv(['--db-host', 'host', '--db-port', '4321', '--db-name', 'name', '--db-user', 'user', '--db-password', 'pass', '--db-engine', 'engine', '--db-migrations-dir', '.:/tmp:../migration'])
self.assertEqual(1, execute_mock.call_count)
execute_mock.assert_called_with()
self.assertEqual(1, main_mock.call_count)
config_used = main_mock.call_args[0][0]
self.assertTrue(isinstance(config_used, simple_db_migrate.config.Config))
self.assertEqual('engine', config_used.get('database_engine'))
self.assertEqual('user', config_used.get('database_user'))
self.assertEqual('pass', config_used.get('database_password'))
self.assertEqual('name', config_used.get('database_name'))
self.assertEqual('host', config_used.get('database_host'))
self.assertEqual(4321, config_used.get('database_port'))
self.assertEqual(False, config_used.get('utc_timestamp'))
self.assertEqual('__db_version__', config_used.get('database_version_table'))
self.assertEqual([os.path.abspath('.'), '/tmp', os.path.abspath('../migration')], config_used.get("database_migrations_dir"))
self.assertEqual(None, config_used.get('schema_version'))
self.assertEqual(False, config_used.get('show_sql'))
self.assertEqual(False, config_used.get('show_sql_only'))
self.assertEqual(None, config_used.get('new_migration'))
self.assertEqual(False, config_used.get('drop_db_first'))
self.assertEqual(False, config_used.get('paused_mode'))
self.assertEqual(None, config_used.get('log_dir'))
self.assertEqual(None, config_used.get('label_version'))
self.assertEqual(False, config_used.get('force_use_files_on_down'))
self.assertEqual(False, config_used.get('force_execute_old_migrations_versions'))
self.assertEqual(1, config_used.get('log_level'))
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
@patch.object(simple_db_migrate.helpers.Utils, 'get_variables_from_file', return_value = {'DATABASE_HOST':'host', 'DATABASE_USER': 'root', 'DATABASE_PASSWORD':'', 'DATABASE_NAME':'database', 'DATABASE_MIGRATIONS_DIR':'.'})
def test_it_should_use_log_level_as_specified(self, import_file_mock, main_mock, execute_mock):
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf'), '--log-level', 4])
config_used = main_mock.call_args[0][0]
self.assertEqual(4, config_used.get('log_level'))
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
@patch.object(simple_db_migrate.helpers.Utils, 'get_variables_from_file', return_value = {'DATABASE_HOST':'host', 'DATABASE_USER': 'root', 'DATABASE_PASSWORD':'', 'DATABASE_NAME':'database', 'DATABASE_MIGRATIONS_DIR':'.'})
def test_it_should_use_log_level_as_2_when_in_paused_mode(self, import_file_mock, main_mock, execute_mock):
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf'), '--pause'])
config_used = main_mock.call_args[0][0]
self.assertEqual(2, config_used.get('log_level'))
@patch('simple_db_migrate.getpass', return_value='password_asked')
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
@patch.object(simple_db_migrate.helpers.Utils, 'get_variables_from_file', return_value = {'DATABASE_HOST':'host', 'DATABASE_USER': 'root', 'DATABASE_PASSWORD':'<<ask_me>>', 'DATABASE_NAME':'database', 'DATABASE_MIGRATIONS_DIR':'.'})
def test_it_should_ask_for_password_when_configuration_is_as_ask_me(self, import_file_mock, main_mock, execute_mock, getpass_mock):
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf')])
config_used = main_mock.call_args[0][0]
self.assertEqual('password_asked', config_used.get('database_password'))
self.assertEqual('\nPlease inform password to connect to database "root@host:database"\n', sys.stdout.getvalue())
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
@patch.object(simple_db_migrate.helpers.Utils, 'get_variables_from_file', return_value = {'DATABASE_HOST':'host', 'DATABASE_USER': 'root', 'DATABASE_PASSWORD':'<<ask_me>>', 'DATABASE_NAME':'database', 'DATABASE_MIGRATIONS_DIR':'.'})
def test_it_should_use_password_from_command_line_when_configuration_is_as_ask_me(self, import_file_mock, main_mock, execute_mock):
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf'), '--password', 'xpto_pass'])
config_used = main_mock.call_args[0][0]
self.assertEqual('xpto_pass', config_used.get('database_password'))
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
@patch.object(simple_db_migrate.helpers.Utils, 'get_variables_from_file', return_value = {'force_execute_old_migrations_versions':True, 'label_version':'label', 'DATABASE_HOST':'host', 'DATABASE_USER': 'root', 'DATABASE_PASSWORD':'', 'DATABASE_NAME':'database', 'DATABASE_MIGRATIONS_DIR':'.'})
def test_it_should_use_values_from_config_file_in_replacement_for_command_line(self, import_file_mock, main_mock, execute_mock):
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf')])
config_used = main_mock.call_args[0][0]
self.assertEqual('label', config_used.get('label_version'))
self.assertEqual(True, config_used.get('force_execute_old_migrations_versions'))
@patch.object(simple_db_migrate.main.Main, 'execute')
@patch.object(simple_db_migrate.main.Main, '__init__', return_value=None)
def test_it_should_check_if_has_a_default_configuration_file(self, main_mock, execute_mock):
f = open('simple-db-migrate.conf', 'w')
f.write("DATABASE_HOST = 'host_on_default_configuration_filename'")
f.close()
simple_db_migrate.run_from_argv([])
self.assertEqual(1, main_mock.call_count)
config_used = main_mock.call_args[0][0]
self.assertTrue(isinstance(config_used, simple_db_migrate.config.FileConfig))
self.assertEqual('host_on_default_configuration_filename', config_used.get('database_host'))
main_mock.reset_mock()
f = open('sample.conf', 'w')
f.write("DATABASE_HOST = 'host_on_sample_configuration_filename'")
f.close()
simple_db_migrate.run_from_argv(["-c", os.path.abspath('sample.conf')])
self.assertEqual(1, main_mock.call_count)
config_used = main_mock.call_args[0][0]
self.assertTrue(isinstance(config_used, simple_db_migrate.config.FileConfig))
self.assertEqual('host_on_sample_configuration_filename', config_used.get('database_host'))
@patch.object(simple_db_migrate.main.Main, 'labels', return_value=["v1", "foo", "v3"])
def test_it_should_print_labels_on_database_and_exit(self, labels_mock):
try:
simple_db_migrate.run_from_argv(["--info", "labels", "-c", os.path.abspath('sample.conf')])
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual('v1\nfoo\nv3\n\n', sys.stdout.getvalue())
@patch.object(simple_db_migrate.main.Main, 'labels', return_value=[])
def test_it_should_print_none_when_there_are_no_labels_on_database_and_exit(self, labels_mock):
try:
simple_db_migrate.run_from_argv(["--info", "labels", "-c", os.path.abspath('sample.conf')])
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual('NONE\n\n', sys.stdout.getvalue())
@patch.object(simple_db_migrate.main.Main, 'last_label', return_value="v3")
def test_it_should_print_last_label_on_database_and_exit(self, last_label_mock):
try:
simple_db_migrate.run_from_argv(["--info", "last_label", "-c", os.path.abspath('sample.conf')])
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual('v3\n\n', sys.stdout.getvalue())
@patch.object(simple_db_migrate.main.Main, 'last_label', return_value=None)
def test_it_should_print_none_as_last_label_when_there_are_no_labels_on_database_and_exit(self, last_label_mock):
try:
simple_db_migrate.run_from_argv(["--info", "last_label", "-c", os.path.abspath('sample.conf')])
except SystemExit as e:
self.assertEqual(0, e.code)
self.assertEqual('NONE\n\n', sys.stdout.getvalue())
def test_it_should_print_error_message_and_exit_when_required_info_is_not_valid(self):
try:
simple_db_migrate.run_from_argv(["--info", "not_valid", "-c", os.path.abspath('sample.conf')])
except SystemExit as e:
self.assertEqual(1, e.code)
self.assertEqual("[ERROR] The 'not_valid' is a wrong parameter for info\n\n", sys.stdout.getvalue())
if __name__ == '__main__':
unittest.main()
|
johnnymo87/simple-db-migrate
|
tests/run_test.py
|
Python
|
apache-2.0
| 15,634
|
# Copyright (c) 2015 Hitachi Data Systems.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Helper class for Data Service operations."""
import os
from oslo_config import cfg
from oslo_log import log
from manila.common import constants
from manila import exception
from manila.i18n import _, _LW
from manila.share import rpcapi as share_rpc
from manila import utils
LOG = log.getLogger(__name__)
data_helper_opts = [
cfg.IntOpt(
'data_access_wait_access_rules_timeout',
default=180,
help="Time to wait for access rules to be allowed/denied on backends "
"when migrating a share (seconds)."),
cfg.StrOpt(
'data_node_access_ip',
help="The IP of the node interface connected to the admin network. "
"Used for allowing access to the mounting shares."),
cfg.StrOpt(
'data_node_access_cert',
help="The certificate installed in the data node in order to "
"allow access to certificate authentication-based shares."),
cfg.StrOpt(
'data_node_access_admin_user',
help="The admin user name registered in the security service in order "
"to allow access to user authentication-based shares."),
cfg.DictOpt(
'data_node_mount_options',
default={},
help="Mount options to be included in the mount command for share "
"protocols. Use dictionary format, example: "
"{'nfs': '-o nfsvers=3', 'cifs': '-o user=foo,pass=bar'}"),
]
CONF = cfg.CONF
CONF.register_opts(data_helper_opts)
class DataServiceHelper(object):
def __init__(self, context, db, share):
self.db = db
self.share = share
self.context = context
self.share_rpc = share_rpc.ShareAPI()
self.wait_access_rules_timeout = (
CONF.data_access_wait_access_rules_timeout)
def deny_access_to_data_service(self, access_ref_list, share_instance):
for access_ref in access_ref_list:
self._change_data_access_to_instance(
share_instance, access_ref, allow=False)
# NOTE(ganso): Cleanup methods do not throw exceptions, since the
# exceptions that should be thrown are the ones that call the cleanup
def cleanup_data_access(self, access_ref_list, share_instance_id):
try:
self.deny_access_to_data_service(
access_ref_list, share_instance_id)
except Exception:
LOG.warning(_LW("Could not cleanup access rule of share %s."),
self.share['id'])
def cleanup_temp_folder(self, instance_id, mount_path):
try:
path = os.path.join(mount_path, instance_id)
if os.path.exists(path):
os.rmdir(path)
self._check_dir_not_exists(path)
except Exception:
LOG.warning(_LW("Could not cleanup instance %(instance_id)s "
"temporary folders for data copy of "
"share %(share_id)s."), {
'instance_id': instance_id,
'share_id': self.share['id']})
def cleanup_unmount_temp_folder(self, unmount_template, mount_path,
share_instance_id):
try:
self.unmount_share_instance(unmount_template, mount_path,
share_instance_id)
except Exception:
LOG.warning(_LW("Could not unmount folder of instance"
" %(instance_id)s for data copy of "
"share %(share_id)s."), {
'instance_id': share_instance_id,
'share_id': self.share['id']})
def _change_data_access_to_instance(
self, instance, access_ref, allow=False):
self.db.share_instance_update_access_status(
self.context, instance['id'], constants.STATUS_OUT_OF_SYNC)
if allow:
self.share_rpc.allow_access(self.context, instance, access_ref)
else:
self.share_rpc.deny_access(self.context, instance, access_ref)
utils.wait_for_access_update(
self.context, self.db, instance, self.wait_access_rules_timeout)
def allow_access_to_data_service(
self, share_instance, connection_info_src,
dest_share_instance=None, connection_info_dest=None):
allow_access_to_destination_instance = (dest_share_instance and
connection_info_dest)
# NOTE(ganso): intersect the access type compatible with both instances
if allow_access_to_destination_instance:
access_mapping = {}
for a_type, protocols in (
connection_info_src['access_mapping'].items()):
for proto in protocols:
if (a_type in connection_info_dest['access_mapping'] and
proto in
connection_info_dest['access_mapping'][a_type]):
access_mapping[a_type] = access_mapping.get(a_type, [])
access_mapping[a_type].append(proto)
else:
access_mapping = connection_info_src['access_mapping']
access_list = self._get_access_entries_according_to_mapping(
access_mapping)
access_ref_list = []
for access in access_list:
values = {
'share_id': self.share['id'],
'access_type': access['access_type'],
'access_level': access['access_level'],
'access_to': access['access_to'],
}
old_access_list = self.db.share_access_get_all_by_type_and_access(
self.context, self.share['id'], access['access_type'],
access['access_to'])
for old_access in old_access_list:
self._change_data_access_to_instance(
share_instance, old_access, allow=False)
access_ref = self.db.share_instance_access_create(
self.context, values, share_instance['id'])
self._change_data_access_to_instance(
share_instance, access_ref, allow=True)
if allow_access_to_destination_instance:
access_ref = self.db.share_instance_access_create(
self.context, values, dest_share_instance['id'])
self._change_data_access_to_instance(
dest_share_instance, access_ref, allow=True)
access_ref_list.append(access_ref)
return access_ref_list
def _get_access_entries_according_to_mapping(self, access_mapping):
access_list = []
for access_type, protocols in access_mapping.items():
if access_type.lower() == 'cert':
access_to = CONF.data_node_access_cert
elif access_type.lower() == 'ip':
access_to = CONF.data_node_access_ip
elif access_type.lower() == 'user':
access_to = CONF.data_node_access_admin_user
else:
msg = _("Unsupported access type provided: %s.") % access_type
raise exception.ShareDataCopyFailed(reason=msg)
if not access_to:
msg = _("Configuration for Data node mounting access type %s "
"has not been set.") % access_type
raise exception.ShareDataCopyFailed(reason=msg)
access = {
'access_type': access_type,
'access_level': constants.ACCESS_LEVEL_RW,
'access_to': access_to,
}
access_list.append(access)
return access_list
@utils.retry(exception.NotFound, 0.1, 10, 0.1)
def _check_dir_exists(self, path):
if not os.path.exists(path):
raise exception.NotFound("Folder %s could not be found." % path)
@utils.retry(exception.Found, 0.1, 10, 0.1)
def _check_dir_not_exists(self, path):
if os.path.exists(path):
raise exception.Found("Folder %s was found." % path)
def mount_share_instance(self, mount_template, mount_path,
share_instance):
path = os.path.join(mount_path, share_instance['id'])
options = CONF.data_node_mount_options
options = {k.lower(): v for k, v in options.items()}
proto_options = options.get(share_instance['share_proto'].lower())
if not proto_options:
proto_options = ''
if not os.path.exists(path):
os.makedirs(path)
self._check_dir_exists(path)
mount_command = mount_template % {'path': path,
'options': proto_options}
utils.execute(*(mount_command.split()), run_as_root=True)
def unmount_share_instance(self, unmount_template, mount_path,
share_instance_id):
path = os.path.join(mount_path, share_instance_id)
unmount_command = unmount_template % {'path': path}
utils.execute(*(unmount_command.split()), run_as_root=True)
try:
if os.path.exists(path):
os.rmdir(path)
self._check_dir_not_exists(path)
except Exception:
LOG.warning(_LW("Folder %s could not be removed."), path)
|
NetApp/manila
|
manila/data/helper.py
|
Python
|
apache-2.0
| 10,000
|
"""Rocket.Chat notification service."""
import logging
from rocketchat_API.APIExceptions.RocketExceptions import (
RocketAuthenticationException,
RocketConnectionException,
)
from rocketchat_API.rocketchat import RocketChat
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import (
CONF_PASSWORD,
CONF_ROOM,
CONF_URL,
CONF_USERNAME,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_URL): vol.Url(),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_ROOM): cv.string,
}
)
def get_service(hass, config, discovery_info=None):
"""Return the notify service."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
url = config.get(CONF_URL)
room = config.get(CONF_ROOM)
try:
return RocketChatNotificationService(url, username, password, room)
except RocketConnectionException:
_LOGGER.warning("Unable to connect to Rocket.Chat server at %s", url)
except RocketAuthenticationException:
_LOGGER.warning("Rocket.Chat authentication failed for user %s", username)
_LOGGER.info("Please check your username/password")
return None
class RocketChatNotificationService(BaseNotificationService):
"""Implement the notification service for Rocket.Chat."""
def __init__(self, url, username, password, room):
"""Initialize the service."""
self._room = room
self._server = RocketChat(username, password, server_url=url)
def send_message(self, message="", **kwargs):
"""Send a message to Rocket.Chat."""
data = kwargs.get(ATTR_DATA) or {}
resp = self._server.chat_post_message(message, channel=self._room, **data)
if resp.status_code == HTTP_OK:
if not resp.json()["success"]:
_LOGGER.error("Unable to post Rocket.Chat message")
else:
_LOGGER.error(
"Incorrect status code when posting message: %d", resp.status_code
)
|
lukas-hetzenecker/home-assistant
|
homeassistant/components/rocketchat/notify.py
|
Python
|
apache-2.0
| 2,332
|
#!/usr/bin/env python2.7
import sys
from sqlalchemy import Table, Column, Integer, String, ForeignKey, Sequence
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
Base = declarative_base()
def init_db():
Base.metadata.create_all(engine)
def drop_db():
Base.metadata.drop_all(engine)
'''
Models: DRGlance, DRNova, DRNeutronNet, DRNeutronSubnet,
DRNeutronPort, DRNeutronFloatingip, DRNeutronRouter
'''
class DRGlance(Base):
'''
Model class DRGlance
'''
__tablename__ = "dr_glance"
id = Column(Integer, Sequence('dr_glance_id_seq'), primary_key = True)
primary_uuid = Column(String(50))
secondary_uuid = Column(String(50))
status = Column(String(20))
other = Column(String(50))
def __repr__(self):
return "<DRGlance(primary_uuid = '%s', secondary_uuid = '%s', status = '%s', other = '%s')>" % (self.primary_uuid, self.secondary_uuid, self.status, self.other)
class DRNova(Base):
'''
Model Class DRNova
'''
__tablename__ = "dr_nova"
id = Column(Integer, Sequence('dr_nova_id_seq'), primary_key = True)
primary_instance_uuid = Column(String(50))
secondary_instance_uuid = Column(String(50))
primary_image_uuid = Column(String(50))
secondary_image_uuid = Column(String(50))
primary_node_name = Column(String(50))
secondary_node_name = Column(String(50))
status = Column(String(20))
other = Column(String(50))
ports = relationship("DRNeutronPort", backref='dr_nova', cascade="all, delete-orphan", passive_deletes=True)
def __repr__(self):
return "<DRNova(primary_instance_uuid = '%s', secondary_instance_uuid = '%s',primary_image_uuid = '%s', secondary_image_uuid = '%s,\
primary_node_name = '%s',secondary_node_name ='%s', status = '%s', other = '%s')>" % \
(self.primary_instance_uuid, self.secondary_instance_uuid, self.primary_image_uuid, self.secondary_image_uuid, self.primary_node_name,\
self.secondary_node_name, self.status, self.other)
class DRNeutronNet(Base):
'''
Model class DRNeutronNet
'''
__tablename__ = "dr_neutron_net"
id = Column(Integer, Sequence('dr_neutron_net_id_seq'), primary_key = True)
primary_uuid = Column(String(50))
secondary_uuid = Column(String(50))
status = Column(String(20))
deleted_flag = Column(String(2))
other = Column(String(50))
def __repr__(self):
return "<DRNeutronNet(primary_uuid = '%s', secondary_uuid = '%s', status = '%s', deleted_flag = '%s', other = '%s')>" %\
(self.primary_uuid, self.secondary_uuid, self.status, self.deleted_flag, self.other)
class DRNeutronSubnet(Base):
'''
Model class DRNeutronSubnet.
'''
__tablename__ = "dr_neutron_subnet"
id = Column(Integer, Sequence('dr_neutron_subnet_id_seq'), primary_key = True)
primary_uuid = Column(String(50))
secondary_uuid = Column(String(50))
status = Column(String(20))
deleted_flag = Column(String(2))
# network_id relate to DRNeutron.secondary_uuid
network_id = Column(String(50))
other = Column(String(50))
def __repr__(self):
return "<DRNeutronSubnet(network_id = %s, primary_uuid = '%s', secondary_uuid = '%s', status = '%s',deleted_flag = '%s', other = '%s')>" %\
(self.network_id, self.primary_uuid, self.secondary_uuid, self.status, self.deleted_flag, self.other)
class DRNeutronPort(Base):
'''
Model Class DRNeutronPort.
'''
__tablename__ = "dr_neutron_port"
id = Column(Integer, Sequence('dr_neutron_port_id_seq'), primary_key = True)
primary_uuid = Column(String(50))
secondary_uuid = Column(String(50))
primary_floatingip_uuid = Column(String(50))
secondary_floatingip_uuid = Column(String(50))
primary_floating_ip_address = Column(String(30))
secondary_floating_ip_address = Column(String(30))
deleted_flag = Column(String(2))
other = Column(String(50))
nova_id = Column(Integer, ForeignKey('dr_nova.id', ondelete='CASCADE'))
def __repr__(self):
return "<DRNeutronPort(primary_uuid = '%s', secondary_uuid = '%s', primary_floatingip_uuid = '%s', secondary_floatingip_uuid ='%s',floating_ip_address ='%s', deleted_flag = '%s', other = '%s')>" %\
(self.primary_uuid, self.secondary_uuid,self.primary_floatingip_uuid,self.secondary_floatingip_uuid,self.floating_ip_address, self.deleted_flag, self.other)
class DRNeutronRouter(Base):
'''
Model Class DRNeutronRouter.
'''
__tablename__ = "dr_neutron_router"
id = Column(Integer, Sequence('dr_neutron_router_id_seq'), primary_key = True)
primary_uuid = Column(String(50))
secondary_uuid = Column(String(50))
other = Column(String(50))
def __repr__(self):
return "<DRNeutronRouter(primary_uuid = '%s', secondary_uuid = '%s', other = '%s')>" %\
(self.primary_uuid, self.secondary_uuid,self.other)
class DRNeutronFloatingip(Base):
'''
Model Class DRNeutronFloatingip.
'''
__tablename__ = "dr_neutron_floatingip"
id = Column(Integer, Sequence('dr_neutron_floatingip_id_seq'), primary_key = True)
primary_uuid = Column(String(50))
secondary_uuid = Column(String(50))
other = Column(String(50))
|
fs714/drcontroller
|
drcontroller/db/models.py
|
Python
|
apache-2.0
| 5,315
|
#!/usr/bin/env python
from brian import *
duration = 1*second
N_sims = 1
lif_eq = ['dV/dt = (V_rest-V)/tau_mem : volt']
V_rest = 0*mV
V_reset = 0*mV
V_th = 15*mV
t_refr = 2*ms
tau_mem = 10*msecond
N_in = 2000
f_in = 5*Hz
involt_exc = 0.35*mV
involt_inh = -0.2*mV
inp = PoissonGroup(N_in, f_in)
nrns = NeuronGroup(N_sims, lif_eq, threshold=V_th, reset=V_reset,\
refractory=t_refr)
con = Connection(inp, nrns, 'V')
N_exc = int(floor(N_in/2))
N_inh = N_in - N_exc
con[0:N_exc-1,0] = involt_exc
con[N_exc:N_in,0] = involt_inh
nrns.rest()
# monitors #
inp_exc_mon = SpikeMonitor(inp.subgroup(N_exc))
inp_inh_mon = SpikeMonitor(inp.subgroup(N_inh))
mem = StateMonitor(nrns, 'V', record=True)
st = SpikeMonitor(nrns)
###########
run(duration, report='stdout')
for n in range(N_sims):
f_out = len(st.spiketimes[n])/duration
print "Neuron %i firing rate: %s" % (n, f_out)
subplot(2,1,1)
raster_plot(inp_exc_mon, inp_inh_mon, showgrouplines=True,\
spacebetweengroups=0.1)
subplot(2,1,2)
plot(mem.times,mem[0],mem.times,ones(len(mem.times))*V_th)
title('Membrane voltage trace of neuron 0')
xlabel("Time (seconds)")
ylabel("Volts")
show()
|
achilleas-k/brian-scripts
|
lif_ing.py
|
Python
|
apache-2.0
| 1,159
|
from functools import lru_cache
from typing import List, Optional
from .constant import UNICODE_SECONDARY_RANGE_KEYWORD
from .utils import (
is_accentuated,
is_ascii,
is_case_variable,
is_cjk,
is_emoticon,
is_hangul,
is_hiragana,
is_katakana,
is_latin,
is_punctuation,
is_separator,
is_symbol,
is_thai,
remove_accent,
unicode_range,
)
class MessDetectorPlugin:
"""
Base abstract class used for mess detection plugins.
All detectors MUST extend and implement given methods.
"""
def eligible(self, character: str) -> bool:
"""
Determine if given character should be fed in.
"""
raise NotImplementedError # pragma: nocover
def feed(self, character: str) -> None:
"""
The main routine to be executed upon character.
Insert the logic in witch the text would be considered chaotic.
"""
raise NotImplementedError # pragma: nocover
def reset(self) -> None:
"""
Permit to reset the plugin to the initial state.
"""
raise NotImplementedError # pragma: nocover
@property
def ratio(self) -> float:
"""
Compute the chaos ratio based on what your feed() has seen.
Must NOT be lower than 0.; No restriction gt 0.
"""
raise NotImplementedError # pragma: nocover
class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin):
def __init__(self) -> None:
self._punctuation_count = 0 # type: int
self._symbol_count = 0 # type: int
self._character_count = 0 # type: int
self._last_printable_char = None # type: Optional[str]
self._frenzy_symbol_in_word = False # type: bool
def eligible(self, character: str) -> bool:
return character.isprintable()
def feed(self, character: str) -> None:
self._character_count += 1
if character != self._last_printable_char and character not in [
"<",
">",
"=",
":",
"/",
"&",
";",
"{",
"}",
"[",
"]",
",",
"|",
'"',
"-",
]:
if is_punctuation(character):
self._punctuation_count += 1
elif (
character.isdigit() is False
and is_symbol(character)
and is_emoticon(character) is False
):
self._symbol_count += 2
self._last_printable_char = character
def reset(self) -> None:
self._punctuation_count = 0
self._character_count = 0
self._symbol_count = 0
@property
def ratio(self) -> float:
if self._character_count == 0:
return 0.0
ratio_of_punctuation = (
self._punctuation_count + self._symbol_count
) / self._character_count # type: float
return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0
class TooManyAccentuatedPlugin(MessDetectorPlugin):
def __init__(self) -> None:
self._character_count = 0 # type: int
self._accentuated_count = 0 # type: int
def eligible(self, character: str) -> bool:
return character.isalpha()
def feed(self, character: str) -> None:
self._character_count += 1
if is_accentuated(character):
self._accentuated_count += 1
def reset(self) -> None:
self._character_count = 0
self._accentuated_count = 0
@property
def ratio(self) -> float:
if self._character_count == 0:
return 0.0
ratio_of_accentuation = (
self._accentuated_count / self._character_count
) # type: float
return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0
class UnprintablePlugin(MessDetectorPlugin):
def __init__(self) -> None:
self._unprintable_count = 0 # type: int
self._character_count = 0 # type: int
def eligible(self, character: str) -> bool:
return True
def feed(self, character: str) -> None:
if (
character not in {"\n", "\t", "\r", "\v"}
and character.isprintable() is False
and character.isspace() is False
and ord(character) != 0x1A # Why? Its the ASCII substitute character.
):
self._unprintable_count += 1
self._character_count += 1
def reset(self) -> None:
self._unprintable_count = 0
@property
def ratio(self) -> float:
if self._character_count == 0:
return 0.0
return (self._unprintable_count * 8) / self._character_count
class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin):
def __init__(self) -> None:
self._successive_count = 0 # type: int
self._character_count = 0 # type: int
self._last_latin_character = None # type: Optional[str]
def eligible(self, character: str) -> bool:
return character.isalpha() and is_latin(character)
def feed(self, character: str) -> None:
self._character_count += 1
if self._last_latin_character is not None:
if is_accentuated(character) and is_accentuated(self._last_latin_character):
if character.isupper() and self._last_latin_character.isupper():
self._successive_count += 1
# Worse if its the same char duplicated with different accent.
if remove_accent(character) == remove_accent(
self._last_latin_character
):
self._successive_count += 1
self._last_latin_character = character
def reset(self) -> None:
self._successive_count = 0
self._character_count = 0
self._last_latin_character = None
@property
def ratio(self) -> float:
if self._character_count == 0:
return 0.0
return (self._successive_count * 2) / self._character_count
class SuspiciousRange(MessDetectorPlugin):
def __init__(self) -> None:
self._suspicious_successive_range_count = 0 # type: int
self._character_count = 0 # type: int
self._last_printable_seen = None # type: Optional[str]
def eligible(self, character: str) -> bool:
return character.isprintable()
def feed(self, character: str) -> None:
self._character_count += 1
if (
character.isspace()
or is_punctuation(character)
or character
in [
"<",
">",
"=",
":",
"/",
"&",
";",
"{",
"}",
"[",
"]",
",",
"|",
'"',
"-",
]
):
self._last_printable_seen = None
return
if self._last_printable_seen is None:
self._last_printable_seen = character
return
unicode_range_a = unicode_range(
self._last_printable_seen
) # type: Optional[str]
unicode_range_b = unicode_range(character) # type: Optional[str]
if is_suspiciously_successive_range(unicode_range_a, unicode_range_b):
self._suspicious_successive_range_count += 1
self._last_printable_seen = character
def reset(self) -> None:
self._character_count = 0
self._suspicious_successive_range_count = 0
self._last_printable_seen = None
@property
def ratio(self) -> float:
if self._character_count == 0:
return 0.0
ratio_of_suspicious_range_usage = (
self._suspicious_successive_range_count * 2
) / self._character_count # type: float
if ratio_of_suspicious_range_usage < 0.1:
return 0.0
return ratio_of_suspicious_range_usage
class SuperWeirdWordPlugin(MessDetectorPlugin):
def __init__(self) -> None:
self._word_count = 0 # type: int
self._bad_word_count = 0 # type: int
self._is_current_word_bad = False # type: bool
self._foreign_long_watch = False # type: bool
self._character_count = 0 # type: int
self._bad_character_count = 0 # type: int
self._buffer = "" # type: str
self._buffer_accent_count = 0 # type: int
def eligible(self, character: str) -> bool:
return True
def feed(self, character: str) -> None:
if character.isalpha():
self._buffer = "".join([self._buffer, character])
if is_accentuated(character):
self._buffer_accent_count += 1
if (
self._foreign_long_watch is False
and is_latin(character) is False
and is_cjk(character) is False
and is_hangul(character) is False
and is_katakana(character) is False
and is_hiragana(character) is False
and is_thai(character) is False
):
self._foreign_long_watch = True
return
if not self._buffer:
return
if (
character.isspace() or is_punctuation(character) or is_separator(character)
) and self._buffer:
self._word_count += 1
buffer_length = len(self._buffer) # type: int
self._character_count += buffer_length
if buffer_length >= 4 and self._buffer_accent_count / buffer_length >= 0.3:
self._is_current_word_bad = True
if buffer_length >= 24 and self._foreign_long_watch:
self._is_current_word_bad = True
if self._is_current_word_bad:
self._bad_word_count += 1
self._bad_character_count += len(self._buffer)
self._is_current_word_bad = False
self._foreign_long_watch = False
self._buffer = ""
self._buffer_accent_count = 0
elif (
character not in {"<", ">", "-", "="}
and character.isdigit() is False
and is_symbol(character)
):
self._is_current_word_bad = True
self._buffer += character
def reset(self) -> None:
self._buffer = ""
self._is_current_word_bad = False
self._foreign_long_watch = False
self._bad_word_count = 0
self._word_count = 0
self._character_count = 0
self._bad_character_count = 0
@property
def ratio(self) -> float:
if self._word_count <= 10:
return 0.0
return self._bad_character_count / self._character_count
class CjkInvalidStopPlugin(MessDetectorPlugin):
"""
GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and
can be easily detected. Searching for the overuse of '丅' and '丄'.
"""
def __init__(self) -> None:
self._wrong_stop_count = 0 # type: int
self._cjk_character_count = 0 # type: int
def eligible(self, character: str) -> bool:
return True
def feed(self, character: str) -> None:
if character in ["丅", "丄"]:
self._wrong_stop_count += 1
return
if is_cjk(character):
self._cjk_character_count += 1
def reset(self) -> None:
self._wrong_stop_count = 0
self._cjk_character_count = 0
@property
def ratio(self) -> float:
if self._cjk_character_count < 16:
return 0.0
return self._wrong_stop_count / self._cjk_character_count
class ArchaicUpperLowerPlugin(MessDetectorPlugin):
def __init__(self) -> None:
self._buf = False # type: bool
self._character_count_since_last_sep = 0 # type: int
self._successive_upper_lower_count = 0 # type: int
self._successive_upper_lower_count_final = 0 # type: int
self._character_count = 0 # type: int
self._last_alpha_seen = None # type: Optional[str]
self._current_ascii_only = True # type: bool
def eligible(self, character: str) -> bool:
return True
def feed(self, character: str) -> None:
is_concerned = character.isalpha() and is_case_variable(character)
chunk_sep = is_concerned is False
if chunk_sep and self._character_count_since_last_sep > 0:
if (
self._character_count_since_last_sep <= 64
and character.isdigit() is False
and self._current_ascii_only is False
):
self._successive_upper_lower_count_final += (
self._successive_upper_lower_count
)
self._successive_upper_lower_count = 0
self._character_count_since_last_sep = 0
self._last_alpha_seen = None
self._buf = False
self._character_count += 1
self._current_ascii_only = True
return
if self._current_ascii_only is True and is_ascii(character) is False:
self._current_ascii_only = False
if self._last_alpha_seen is not None:
if (character.isupper() and self._last_alpha_seen.islower()) or (
character.islower() and self._last_alpha_seen.isupper()
):
if self._buf is True:
self._successive_upper_lower_count += 2
self._buf = False
else:
self._buf = True
else:
self._buf = False
self._character_count += 1
self._character_count_since_last_sep += 1
self._last_alpha_seen = character
def reset(self) -> None:
self._character_count = 0
self._character_count_since_last_sep = 0
self._successive_upper_lower_count = 0
self._successive_upper_lower_count_final = 0
self._last_alpha_seen = None
self._buf = False
self._current_ascii_only = True
@property
def ratio(self) -> float:
if self._character_count == 0:
return 0.0
return self._successive_upper_lower_count_final / self._character_count
def is_suspiciously_successive_range(
unicode_range_a: Optional[str], unicode_range_b: Optional[str]
) -> bool:
"""
Determine if two Unicode range seen next to each other can be considered as suspicious.
"""
if unicode_range_a is None or unicode_range_b is None:
return True
if unicode_range_a == unicode_range_b:
return False
if "Latin" in unicode_range_a and "Latin" in unicode_range_b:
return False
if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b:
return False
keywords_range_a, keywords_range_b = unicode_range_a.split(
" "
), unicode_range_b.split(" ")
for el in keywords_range_a:
if el in UNICODE_SECONDARY_RANGE_KEYWORD:
continue
if el in keywords_range_b:
return False
# Japanese Exception
if unicode_range_a in ["Katakana", "Hiragana"] and unicode_range_b in [
"Katakana",
"Hiragana",
]:
return False
if unicode_range_a in ["Katakana", "Hiragana"] or unicode_range_b in [
"Katakana",
"Hiragana",
]:
if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
return False
if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b:
if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
return False
if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
return False
# Chinese/Japanese use dedicated range for punctuation and/or separators.
if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or (
unicode_range_a in ["Katakana", "Hiragana"]
and unicode_range_b in ["Katakana", "Hiragana"]
):
if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b:
return False
if "Forms" in unicode_range_a or "Forms" in unicode_range_b:
return False
return True
@lru_cache(maxsize=2048)
def mess_ratio(
decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False
) -> float:
"""
Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier.
"""
detectors = [] # type: List[MessDetectorPlugin]
for md_class in MessDetectorPlugin.__subclasses__():
detectors.append(md_class())
length = len(decoded_sequence) # type: int
mean_mess_ratio = 0.0 # type: float
if length < 512:
intermediary_mean_mess_ratio_calc = 32 # type: int
elif length <= 1024:
intermediary_mean_mess_ratio_calc = 64
else:
intermediary_mean_mess_ratio_calc = 128
for character, index in zip(decoded_sequence, range(0, length)):
for detector in detectors:
if detector.eligible(character):
detector.feed(character)
if (
index > 0 and index % intermediary_mean_mess_ratio_calc == 0
) or index == length - 1:
mean_mess_ratio = sum([dt.ratio for dt in detectors])
if mean_mess_ratio >= maximum_threshold:
break
if debug:
for dt in detectors: # pragma: nocover
print(dt.__class__, dt.ratio)
return round(mean_mess_ratio, 3)
|
sonntagsgesicht/regtest
|
.aux/venv/lib/python3.9/site-packages/charset_normalizer/md.py
|
Python
|
apache-2.0
| 17,654
|
import random
class World:
MAX_WIDTH = 100
MAX_HEIGHT = 100
map = []
colors = ['#FDFDFD', 'AA00A0']
def __init__(self):
for i in range(self.MAX_WIDTH):
self.map.append([])
for j in range(self.MAX_HEIGHT):
self.map[i].append(random.choice(self.colors))
|
pugovok/lupland
|
world.py
|
Python
|
apache-2.0
| 324
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import attr
import numpy as np
import math
import torch
import torchvision
import topi
import topi.testing
import tvm
from tvm import relay
from tvm.contrib import graph_runtime
from nnvm.testing.config import ctx_list
import onnx
from onnx import helper, TensorProto
import unittest
def get_tvm_output(graph_def, input_data, target, ctx, output_shape=None, output_dtype='float32'):
""" Generic function to execute and get tvm output"""
target = 'llvm'
if isinstance(input_data, list):
input_names = {}
shape_dict = {}
dtype_dict = {}
for i, _ in enumerate(input_data):
input_names[i] = graph_def.graph.input[i].name
shape_dict[input_names[i]] = input_data[i].shape
dtype_dict[input_names[i]] = input_data[i].dtype
else:
input_names = graph_def.graph.input[0].name
shape_dict = {input_names: input_data.shape}
dtype_dict = {input_names: input_data.dtype}
mod, params = relay.frontend.from_onnx(graph_def, shape_dict)
with relay.build_config(opt_level=1):
graph, lib, params = relay.build(mod,
target,
params=params)
ctx = tvm.cpu(0)
from tvm.contrib import graph_runtime
m = graph_runtime.create(graph, lib, ctx)
# set inputs
if isinstance(input_data, list):
for i, e in enumerate(input_names):
m.set_input(input_names[i], tvm.nd.array(input_data[i].astype(input_data[i].dtype)))
else:
m.set_input(input_names, tvm.nd.array(input_data.astype(input_data.dtype)))
m.set_input(**params)
# execute
m.run()
# get outputs
if isinstance(output_shape, list) and isinstance(output_dtype, list):
tvm_output_list = []
for i, _ in enumerate(output_shape):
tvm_output = m.get_output(i)
tvm_output_list.append(tvm_output.asnumpy())
return tvm_output_list
else:
tvm_output = m.get_output(0)
return tvm_output.asnumpy()
def get_caffe2_output(model, x, dtype='float32'):
import caffe2.python.onnx.backend
prepared_backend = caffe2.python.onnx.backend.prepare(model)
W = {model.graph.input[0].name: x.astype(dtype)}
c2_out = prepared_backend.run(W)[0]
return c2_out
def verify_onnx_forward_impl(graph_file, data_shape, out_shape):
dtype = 'float32'
x = np.random.uniform(size=data_shape)
model = onnx.load_model(graph_file)
c2_out = get_caffe2_output(model, x, dtype)
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, x, target, ctx, out_shape, dtype)
tvm.testing.assert_allclose(c2_out, tvm_out, rtol=1e-5, atol=1e-5)
def verify_super_resolution_example():
verify_onnx_forward_impl(super_resolution, (1, 1, 224, 224), (1, 1, 672, 672))
def verify_squeezenet1_1():
verify_onnx_forward_impl(squeezenet1_1, (1, 3, 224, 224), (1, 1000))
def verify_lenet():
verify_onnx_forward_impl(lenet, (1, 1, 28, 28), (1, 10))
def verify_resnet18():
verify_onnx_forward_impl(resnet18_1_0, (1, 3, 224, 224), (1, 1000))
def test_reshape():
in_shape = (4, 3, 3, 4)
ref_shape = (6, 2, 4, 3)
ref_array = np.array(ref_shape)
ref_node = onnx.helper.make_node('Constant',
inputs=[],
outputs=['ref_in'],
value=onnx.helper.make_tensor(name = 'const_tensor',
data_type = onnx.TensorProto.INT32,
dims = ref_array.shape,
vals = ref_array.flatten().astype(int)))
reshape_node = helper.make_node("Reshape", ["in", "ref_in"], ["out"])
graph = helper.make_graph([ref_node, reshape_node],
"reshape_test",
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(ref_shape))])
model = helper.make_model(graph, producer_name='reshape_test')
for target, ctx in ctx_list():
x = np.random.uniform(size=in_shape).astype('int32')
tvm_out = get_tvm_output(model, x, target, ctx, ref_shape, 'float32')
tvm.testing.assert_allclose(ref_shape, tvm_out.shape)
def test_shape():
in_shape = (4, 3, 3, 4)
ref_shape = (6, 2, 4, 3)
ref_array = np.array(ref_shape)
ref_node = onnx.helper.make_node('Constant',
inputs=[],
outputs=['ref_in'],
value=onnx.helper.make_tensor(name = 'const_tensor',
data_type = onnx.TensorProto.INT32,
dims = ref_array.shape,
vals = ref_array.flatten().astype(int)))
reshape_node = helper.make_node("Reshape", ["in", "ref_in"], ["out"])
shape_node = helper.make_node("Shape", ['out'], ['final_out'])
graph = helper.make_graph([ref_node, reshape_node, shape_node],
"shape_test",
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("final_out",
TensorProto.FLOAT, list(ref_shape))])
model = helper.make_model(graph, producer_name='shape_test')
for target, ctx in ctx_list():
x = np.random.uniform(size=in_shape).astype('int32')
tvm_out = get_tvm_output(model, x, target, ctx, ref_shape, 'int32')
tvm.testing.assert_allclose(ref_shape, tvm_out)
def _test_power_iteration(x_shape, y_shape):
if isinstance(y_shape, int):
y_shape = [y_shape]
x = np.random.uniform(size=x_shape).astype(np.float32)
y = np.random.uniform(size=y_shape).astype(np.float32)
np_res = np.power(x, y).astype(np.float32)
res = helper.make_node("Pow", ['x', 'y'], ['out'])
graph = helper.make_graph([res],
'power_test',
inputs = [helper.make_tensor_value_info("x",
TensorProto.FLOAT, list(x_shape)),
helper.make_tensor_value_info("y",
TensorProto.FLOAT, list(y_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(np_res.shape))])
model = helper.make_model(graph, producer_name='power_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [x, y], target, ctx, np_res.shape)
tvm.testing.assert_allclose(np_res, tvm_out, rtol=1e-5, atol=1e-5)
def test_power():
_test_power_iteration((1, 3), (1))
_test_power_iteration((2, 3), (2, 3))
_test_power_iteration((2, 3), (1, 3))
def test_squeeze():
in_shape = (1, 3, 1, 3, 1, 1)
out_shape = (3, 3)
y = helper.make_node("Squeeze", ['in'], ['out'], axes=[0, 2, 4, 5])
graph = helper.make_graph([y],
'squeeze_test',
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='squeeze_test')
for target, ctx in ctx_list():
x = np.random.uniform(size=in_shape).astype('float32')
tvm_out = get_tvm_output(model, x, target, ctx, out_shape, 'float32')
tvm.testing.assert_allclose(out_shape, tvm_out.shape)
def test_flatten():
in_shape = (1, 3, 4, 4)
axis = 1
ref_shape = (1, 48)
flatten_node = helper.make_node("Flatten", ["in"], ["out"], axis = axis)
graph = helper.make_graph([flatten_node],
"flatten_test",
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(ref_shape))])
model = helper.make_model(graph, producer_name='flatten_test')
for target, ctx in ctx_list():
x = np.random.uniform(size=in_shape).astype('int32')
tvm_out = get_tvm_output(model, x, target, ctx, ref_shape, 'float32')
tvm.testing.assert_allclose(ref_shape, tvm_out.shape)
def test_unsqueeze():
in_shape = (3, 3)
axis = (0, 3, 4)
out_shape = (1, 3, 3, 1, 1)
y = helper.make_node("Unsqueeze", ['in'], ['out'], axes=list(axis))
graph = helper.make_graph([y],
'squeeze_test',
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='squeeze_test')
for target, ctx in ctx_list():
x = np.random.uniform(size=in_shape).astype('float32')
tvm_out = get_tvm_output(model, x, target, ctx, out_shape, 'float32')
tvm.testing.assert_allclose(out_shape, tvm_out.shape)
def verify_gather(in_shape, indices, axis, dtype):
x = np.random.uniform(size=in_shape).astype(dtype)
indices = np.array(indices, dtype="int32")
out_np = np.take(x, indices, axis=axis)
y = helper.make_node("Gather", ['in', 'indices'], ['out'], axis=axis)
graph = helper.make_graph([y],
'gather_test',
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(in_shape)),
helper.make_tensor_value_info("indices",
TensorProto.INT32, list(indices.shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out_np.shape))])
model = helper.make_model(graph, producer_name='gather_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [x, indices], target, ctx, out_np.shape)
tvm.testing.assert_allclose(out_np, tvm_out)
def test_gather():
verify_gather((4,), [1], 0, 'int32')
verify_gather((1,4), [0], 0, 'int32')
verify_gather((4,), [[[1,0],[0,1]]], 0, 'float32')
verify_gather((2,2), [[[1,0],[0,1]]], 1, 'int32')
verify_gather((3,3,3), [[[1,0]]], -1, 'int32')
verify_gather((4,3,5,6), [[2,1,0,0]], 0, 'float32')
def _test_slice_iteration(indata, outdata, starts, ends, axes=None):
if axes:
y = helper.make_node("Slice", ['in'], ['out'], axes=axes, starts=starts, ends=ends)
else:
y = helper.make_node("Slice", ['in'], ['out'], starts=starts, ends=ends)
graph = helper.make_graph([y],
'slice_test',
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(indata.shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(outdata.shape))])
model = helper.make_model(graph, producer_name='slice_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, indata, target, ctx, outdata.shape, 'float32')
tvm.testing.assert_allclose(outdata, tvm_out)
def test_slice():
x = np.random.randn(20, 10, 5).astype(np.float32)
_test_slice_iteration(x, x[0:3, 0:10], (0, 0), (3, 10), (0, 1))
_test_slice_iteration(x, x[:, :, 3:4], (0, 0, 3), (20, 10, 4))
_test_slice_iteration(x, x[:, 1:1000], (1), (1000), (1))
_test_slice_iteration(x, x[:, 0:-1], (0), (-1), (1))
def _test_onnx_op_elementwise(inshape, outfunc, npargs, dtype, opname, kwargs):
indata = np.random.uniform(-1, 1, size=inshape).astype(dtype)
outdata = outfunc(indata, **npargs)
y = helper.make_node(opname, ['in'], ['out'], **kwargs)
graph = helper.make_graph([y],
opname+'_test',
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(indata.shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(outdata.shape))])
model = helper.make_model(graph, producer_name=opname+'_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, indata, target, ctx, outdata.shape, dtype)
tvm.testing.assert_allclose(outdata, tvm_out)
def test_floor():
_test_onnx_op_elementwise((2, 4, 5, 6), np.floor, {}, 'float32', 'Floor', {})
def test_ceil():
_test_onnx_op_elementwise((2, 4, 5, 6), np.ceil, {}, 'float32', 'Ceil', {})
def test_clip():
_test_onnx_op_elementwise((2, 4, 5, 6),
np.clip,
{'a_min': -1.0, 'a_max': 1.0},
'float32',
'Clip',
{'min': -1.0, 'max': 1.0})
def test_matmul():
a_shape = (4, 3)
b_shape = (3, 4)
a_array = np.random.uniform(size=a_shape).astype('float32')
b_array = np.random.uniform(size=b_shape).astype('float32')
out_np = np.matmul(a_array, b_array)
mul_node = helper.make_node("MatMul", ["a", "b"], ["out"])
graph = helper.make_graph([mul_node],
"matmul_test",
inputs = [helper.make_tensor_value_info("a",
TensorProto.FLOAT, list(a_shape)),
helper.make_tensor_value_info("b",
TensorProto.FLOAT, list(b_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out_np.shape))])
model = helper.make_model(graph, producer_name='matmul_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_array, b_array], target, ctx, out_np.shape)
tvm.testing.assert_allclose(out_np, tvm_out, rtol=1e-5, atol=1e-5)
def verify_lrn(shape, nsize, dtype, alpha=None, beta=None, bias=None):
in_array = np.random.uniform(size=shape).astype(dtype)
if alpha == None and beta == None and bias==None:
alpha = 0.0001
beta = 0.75
bias = 1.0
node = onnx.helper.make_node('LRN', inputs=['in'], outputs=['out'], size=nsize)
else:
node = onnx.helper.make_node('LRN', inputs=['in'], outputs=['out'], alpha=alpha,
beta=beta, bias=bias, size=nsize)
graph = helper.make_graph([node],
"lrn_test",
inputs = [helper.make_tensor_value_info("in", TensorProto.FLOAT, list(shape))],
outputs = [helper.make_tensor_value_info("out", TensorProto.FLOAT, list(shape))])
model = helper.make_model(graph, producer_name='lrn_test')
def _get_python_lrn():
square_sum = np.zeros(shape).astype(dtype)
for n, c, h, w in np.ndindex(in_array.shape):
square_sum[n, c, h, w] = sum(in_array[n,
max(0, c - int(math.floor((nsize - 1) / 2))): \
min(5, c + int(math.ceil((nsize - 1) / 2)) + 1),
h,
w] ** 2)
py_out = in_array / ((bias + (alpha / nsize) * square_sum) ** beta)
return py_out
for target, ctx in ctx_list():
input_name = model.graph.input[0].name
py_out = _get_python_lrn()
tvm_out = get_tvm_output(model, in_array, target, ctx, py_out.shape, 'float32')
tvm.testing.assert_allclose(py_out, tvm_out, rtol=1e-5, atol=1e-5)
def test_lrn():
verify_lrn((5, 5, 5, 5), 3, 'float32')
verify_lrn((5, 5, 5, 5), 3, 'float32', alpha=0.0002, beta=0.5, bias=2.0)
def _test_upsample_nearest():
scale = 2
in_shape = (1, 1, 3, 3)
out_shape = (1, 1, 3*scale, 3*scale)
y = helper.make_node("Upsample", ['in'], ['out'], mode='nearest', scales=[1.0, 1.0, 2.0, 2.0])
in_array = np.random.uniform(size=in_shape).astype(np.float32)
out_array = topi.testing.upsampling_python(in_array, (scale, scale), "NCHW")
graph = helper.make_graph([y],
'upsample_nearest_test',
inputs = [helper.make_tensor_value_info("in", TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out", TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='upsample_nearest_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, in_array, target, ctx, out_shape, 'float32')
tvm.testing.assert_allclose(out_array, tvm_out)
def _test_upsample_bilinear():
scale = 2
in_shape = (1, 1, 3, 3)
out_shape = (1, 1, 3*scale, 3*scale)
y = helper.make_node("Upsample", ['in'], ['out'], mode='linear', scales=[1.0, 1.0, 2.0, 2.0])
in_array = np.random.uniform(size=in_shape).astype(np.float32)
out_array = topi.testing.bilinear_resize_python(in_array, (3*scale, 3*scale), "NCHW")
graph = helper.make_graph([y],
'upsample_bilinear_test',
inputs = [helper.make_tensor_value_info("in", TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out", TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='upsample_bilinear_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, in_array, target, ctx, out_shape, 'float32')
tvm.testing.assert_allclose(out_array, tvm_out, rtol=1e-5, atol=1e-5)
def _test_upsample_bilinear_opset9():
scale = 2
in_shape = (1, 1, 3, 3)
out_shape = (1, 1, 3*scale, 3*scale)
y = helper.make_node("Upsample", ['in','scales'], ['out'], mode='linear')
scales=[1.0, 1.0, 2.0, 2.0]
in_array = np.random.uniform(size=in_shape).astype(np.float32)
out_array = topi.testing.bilinear_resize_python(in_array, (3*scale, 3*scale), "NCHW")
ref_array = np.array(scales)
ref_node = helper.make_node('Constant',
inputs=[],
outputs=['scales'],
value=onnx.helper.make_tensor(name = 'const_tensor',
data_type = TensorProto.FLOAT,
dims = ref_array.shape,
vals = ref_array.flatten().astype(float)))
graph = helper.make_graph([ref_node, y],
'upsample_bilinear_opset9_test',
inputs = [helper.make_tensor_value_info("in", TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out", TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='upsample_bilinear_opset9_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, in_array, target, ctx, out_shape, 'float32')
tvm.testing.assert_allclose(out_array, tvm_out, rtol=1e-5, atol=1e-5)
def test_upsample():
_test_upsample_nearest()
_test_upsample_bilinear()
_test_upsample_bilinear_opset9()
def _test_softmax(inshape, axis):
opname = 'Softmax'
indata = np.random.uniform(size=inshape).astype(np.float32)
outshape = inshape
outdata = topi.testing.softmax_python(indata)
if isinstance(axis, int):
y = helper.make_node(opname, ['in'], ['out'], axis = axis)
elif axis is None:
y = helper.make_node(opname, ['in'], ['out'])
graph = helper.make_graph([y],
opname+'_test',
inputs = [helper.make_tensor_value_info("in",
TensorProto.FLOAT, list(indata.shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(outdata.shape))])
model = helper.make_model(graph, producer_name=opname+'_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, indata, target, ctx, outshape, 'float32')
tvm.testing.assert_allclose(outdata, tvm_out, rtol=1e-5, atol=1e-5)
def test_softmax():
_test_softmax((1, 10), None)
_test_softmax((1, 10), 1)
def verify_min(input_dim):
dtype = 'float32'
a_np1 = np.random.uniform(size=input_dim).astype(dtype)
a_np2 = np.random.uniform(size=input_dim).astype(dtype)
a_np3 = np.random.uniform(size=input_dim).astype(dtype)
b_np = np.min((a_np1, a_np2, a_np3), axis=0)
min_node = helper.make_node("Min", ["a_np1", "a_np2", "a_np3"], ["out"])
graph = helper.make_graph([min_node],
"Min_test",
inputs = [helper.make_tensor_value_info("a_np1",
TensorProto.FLOAT, list(input_dim)),
helper.make_tensor_value_info("a_np2",
TensorProto.FLOAT, list(input_dim)),
helper.make_tensor_value_info("a_np3",
TensorProto.FLOAT, list(input_dim))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(b_np.shape))])
model = helper.make_model(graph, producer_name='Min_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_np1, a_np2, a_np3], target, ctx, b_np.shape)
tvm.testing.assert_allclose(b_np, tvm_out, rtol=1e-5, atol=1e-5)
def test_forward_min():
verify_min((1, 3, 20, 20))
verify_min((20, 20))
def verify_max(input_dim):
dtype = 'float32'
a_np1 = np.random.uniform(size=input_dim).astype(dtype)
a_np2 = np.random.uniform(size=input_dim).astype(dtype)
a_np3 = np.random.uniform(size=input_dim).astype(dtype)
b_np = np.max((a_np1, a_np2, a_np3), axis=0)
max_node = helper.make_node("Max", ["a_np1", "a_np2", "a_np3"], ["out"])
graph = helper.make_graph([max_node],
"Max_test",
inputs = [helper.make_tensor_value_info("a_np1",
TensorProto.FLOAT, list(input_dim)),
helper.make_tensor_value_info("a_np2",
TensorProto.FLOAT, list(input_dim)),
helper.make_tensor_value_info("a_np3",
TensorProto.FLOAT, list(input_dim))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(b_np.shape))])
model = helper.make_model(graph, producer_name='Max_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_np1, a_np2, a_np3], target, ctx, b_np.shape)
tvm.testing.assert_allclose(b_np, tvm_out, rtol=1e-5, atol=1e-5)
def test_forward_max():
verify_max((1, 3, 20, 20))
verify_max((20, 20))
def verify_mean(input_dim):
dtype = 'float32'
a_np1 = np.random.uniform(size=input_dim).astype(dtype)
a_np2 = np.random.uniform(size=input_dim).astype(dtype)
a_np3 = np.random.uniform(size=input_dim).astype(dtype)
b_np = np.mean((a_np1, a_np2, a_np3), axis=0)
mean_node = helper.make_node("Mean", ["a_np1", "a_np2", "a_np3"], ["out"])
graph = helper.make_graph([mean_node],
"Mean_test",
inputs = [helper.make_tensor_value_info("a_np1",
TensorProto.FLOAT, list(input_dim)),
helper.make_tensor_value_info("a_np2",
TensorProto.FLOAT, list(input_dim)),
helper.make_tensor_value_info("a_np3",
TensorProto.FLOAT, list(input_dim))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(b_np.shape))])
model = helper.make_model(graph, producer_name='Mean_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_np1, a_np2, a_np3], target, ctx, b_np.shape)
tvm.testing.assert_allclose(b_np, tvm_out, rtol=1e-5, atol=1e-5)
def test_forward_mean():
verify_mean((1, 3, 20, 20))
verify_mean((20, 20))
def verify_hardsigmoid(input_dim, alpha, beta):
dtype = 'float32'
a_np1 = np.random.uniform(size=input_dim).astype(dtype)
b_np = np.clip(a_np1 * alpha + beta, 0, 1)
hardsigmoid_node = helper.make_node("HardSigmoid", ["a_np1"], ["out"], alpha=alpha, beta=beta)
graph = helper.make_graph([hardsigmoid_node],
"HardSigmoid_test",
inputs = [helper.make_tensor_value_info("a_np1",
TensorProto.FLOAT, list(input_dim))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(b_np.shape))])
model = helper.make_model(graph, producer_name='HardSigmoid_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_np1], target, ctx, b_np.shape)
tvm.testing.assert_allclose(b_np, tvm_out, rtol=1e-5, atol=1e-5)
def test_forward_hardsigmoid():
verify_hardsigmoid((1, 3, 20, 20), 0.5, 0.6)
verify_hardsigmoid((20, 20), 0.3, 0.4)
def verify_argmin(input_dim, axis=None, keepdims=None):
def _argmin_numpy(data, axis=0, keepdims=True):
result = np.argmin(data, axis=axis)
if (keepdims == 1):
result = np.expand_dims(result, axis)
return result.astype(data.dtype)
a_np1 = np.random.uniform(-10, 10, input_dim).astype(np.int32)
if keepdims is None and axis is None:
b_np = _argmin_numpy(a_np1)
node = onnx.helper.make_node('ArgMin',
inputs=['a_np1'],
outputs=['out'])
elif axis is None:
b_np = _argmin_numpy(a_np1, keepdims=keepdims)
node = onnx.helper.make_node('ArgMin',
inputs=['a_np1'],
outputs=['out'],
keepdims=keepdims)
elif keepdims is None:
b_np = _argmin_numpy(a_np1, axis=axis)
node = onnx.helper.make_node('ArgMin',
inputs=['a_np1'],
outputs=['out'],
axis=axis)
else:
b_np = _argmin_numpy(a_np1, axis=axis, keepdims=keepdims)
node = onnx.helper.make_node('ArgMin',
inputs=['a_np1'],
outputs=['out'],
axis=axis,
keepdims=keepdims)
graph = helper.make_graph([node],
"argmin_test",
inputs = [helper.make_tensor_value_info("a_np1",
TensorProto.INT32, list(a_np1.shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.INT32, list(b_np.shape))])
model = helper.make_model(graph, producer_name='argmin_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_np1], target, ctx, b_np.shape, b_np.dtype)
tvm.testing.assert_allclose(b_np, tvm_out, rtol=1e-5, atol=1e-5)
def verify_argmax(input_dim, axis=None, keepdims=None):
def _argmax_numpy(data, axis=0, keepdims=True):
result = np.argmax(data, axis=axis)
if (keepdims == 1):
result = np.expand_dims(result, axis)
return result.astype(data.dtype)
a_np1 = np.random.uniform(-10, 10, input_dim).astype(np.int32)
if keepdims is None and axis is None:
b_np = _argmax_numpy(a_np1)
node = onnx.helper.make_node('ArgMax',
inputs=['a_np1'],
outputs=['out'])
elif axis is None:
b_np = _argmax_numpy(a_np1, keepdims=keepdims)
node = onnx.helper.make_node('ArgMax',
inputs=['a_np1'],
outputs=['out'],
keepdims=keepdims)
elif keepdims is None:
b_np = _argmax_numpy(a_np1, axis=axis)
node = onnx.helper.make_node('ArgMax',
inputs=['a_np1'],
outputs=['out'],
axis=axis)
else:
b_np = _argmax_numpy(a_np1, axis=axis, keepdims=keepdims)
node = onnx.helper.make_node('ArgMax',
inputs=['a_np1'],
outputs=['out'],
axis=axis,
keepdims=keepdims)
graph = helper.make_graph([node],
"argmax_test",
inputs = [helper.make_tensor_value_info("a_np1",
TensorProto.INT32, list(a_np1.shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.INT32, list(b_np.shape))])
model = helper.make_model(graph, producer_name='argmax_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [a_np1], target, ctx, b_np.shape, b_np.dtype)
tvm.testing.assert_allclose(b_np, tvm_out, rtol=1e-5, atol=1e-5)
def test_forward_arg_min_max():
'''Verify argmin and argmax'''
verify_argmin([3,4,4])
verify_argmax([3,4,4])
verify_argmin([3,4,4], axis=1)
verify_argmax([3,4,4], axis=0)
verify_argmin([3,4,4], keepdims=0)
verify_argmax([3,4,4], keepdims=1)
for axis in [None, 0,1,2]:
for keepdims in [None, True,False]:
verify_argmin([3,4,4], axis, keepdims)
verify_argmax([3,4,4], axis, keepdims)
def verify_constantfill(is_shape, input_dim, out_dim, value, dtype, **kwargs):
input_a = np.random.uniform(size=input_dim).astype(dtype)
out = np.empty(shape=out_dim, dtype=dtype)
out.fill(value)
if is_shape == True:
fill_node = helper.make_node("ConstantFill", [], ["out"], shape=input_dim, value=value, **kwargs)
else:
fill_node = helper.make_node("ConstantFill", ["input_a"], ["out"], value=value, dtype=dtype, **kwargs)
if is_shape == True:
inputs = []
else:
inputs = [helper.make_tensor_value_info("input_a",
TensorProto.FLOAT, list(input_dim))]
graph = helper.make_graph([fill_node],
"fill_test",
inputs,
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out.shape))])
model = helper.make_model(graph, producer_name='fill_test')
for target, ctx in ctx_list():
if is_shape == True:
tvm_out = get_tvm_output(model, [], target, ctx, out.shape)
else:
tvm_out = get_tvm_output(model, [input_a], target, ctx, out.shape)
tvm.testing.assert_allclose(out, tvm_out, rtol=1e-5, atol=1e-5)
def test_constantfill():
verify_constantfill(True, (2, 3, 4, 5), (2, 3, 4, 5), 10, 'float32')
verify_constantfill(False, (2, 3, 4, 5), (2, 3, 4, 5), 10, 'float32')
verify_constantfill(True, (2, 3, 4, 5), (2, 3, 4, 5, 4, 5, 6), 10, 'float32', extra_shape=(4, 5, 6))
def verify_pad(indata, pads, value=0.0):
indata = np.array(indata).astype(np.float32)
# numpy expect result
len_dim = len(pads) // 2
np_pads = [(pads[i], pads[i+len_dim]) for i in range(len_dim)]
outdata = np.pad(indata, pad_width=np_pads, mode='constant', constant_values=value)
# onnx graph
node = helper.make_node(
'Pad',
inputs=['input'],
outputs=['output'],
mode='constant',
pads=pads,
value=value
)
graph = helper.make_graph([node],
'pad_test',
inputs = [helper.make_tensor_value_info("input",
TensorProto.FLOAT, list(indata.shape))],
outputs = [helper.make_tensor_value_info("output",
TensorProto.FLOAT, list(outdata.shape))])
model = helper.make_model(graph, producer_name='pad_test')
# tvm result
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, indata, target, ctx, outdata.shape, 'float32')
tvm.testing.assert_allclose(outdata, tvm_out, rtol=1e-5, atol=1e-5)
def test_pad():
verify_pad(np.random.randn(2, 2).astype(np.float32), [0, 1, 0, 0], 0.0)
verify_pad(np.random.randn(2, 3).astype(np.float32), [1, 0, 0, 1], 0.0)
verify_pad(np.random.randn(3, 2).astype(np.float32), [0, 0, 1, 0], 5.0)
def verify_reduce_x(name, indata, axis, keepdims):
indata = np.array(indata).astype(np.float32)
# numpy expect result
if name == 'ReduceMax':
outdata = np.maximum.reduce(indata, axis=axis, keepdims=keepdims == 1)
elif name == 'ReduceMin':
outdata = np.minimum.reduce(indata, axis=axis, keepdims=keepdims == 1)
elif name == 'ReduceSum':
outdata = np.sum(indata, axis=axis, keepdims=keepdims == 1)
elif name == 'ReduceMean':
outdata = np.mean(indata, axis=axis, keepdims=keepdims == 1)
else:
raise Exception('unsupport op: {}'.format(name))
if len(np.asarray(outdata).shape) == 0:
outdata = np.asarray([outdata])
# onnx graph
if axis is None:
node = helper.make_node(name, inputs=['input'], outputs=['output'],
keepdims=keepdims)
else:
node = helper.make_node(name, inputs=['input'], outputs=['output'],
axes=axis, keepdims=keepdims)
graph = helper.make_graph([node],
'{}_test'.format(name),
inputs = [helper.make_tensor_value_info("input",
TensorProto.FLOAT, list(indata.shape))],
outputs = [helper.make_tensor_value_info("output",
TensorProto.FLOAT, list(outdata.shape))])
model = helper.make_model(graph, producer_name='{}_test'.format(name))
# tvm result
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, indata, target, ctx, outdata.shape, 'float32')
tvm.testing.assert_allclose(outdata, tvm_out, rtol=1e-5, atol=1e-5)
def test_reduce_max():
verify_reduce_x("ReduceMax",
np.random.randn(3, 2, 2).astype(np.float32),
axis=None, keepdims=1)
verify_reduce_x("ReduceMax",
np.random.randn(3, 2, 3).astype(np.float32),
axis=None, keepdims=0)
verify_reduce_x("ReduceMax",
np.random.randn(3, 3, 3).astype(np.float32),
axis=(1,), keepdims=1)
def test_reduce_min():
verify_reduce_x("ReduceMin",
np.random.randn(3, 2, 2).astype(np.float32),
axis=None, keepdims=1)
verify_reduce_x("ReduceMin",
np.random.randn(3, 2, 3).astype(np.float32),
axis=None, keepdims=0)
verify_reduce_x("ReduceMin",
np.random.randn(3, 3, 3).astype(np.float32),
axis=(1,), keepdims=1)
def test_reduce_sum():
verify_reduce_x("ReduceSum",
np.random.randn(3, 2, 2).astype(np.float32),
axis=None, keepdims=1)
verify_reduce_x("ReduceSum",
np.random.randn(3, 2, 3).astype(np.float32),
axis=None, keepdims=0)
verify_reduce_x("ReduceSum",
np.random.randn(3, 3, 3).astype(np.float32),
axis=(1,), keepdims=1)
def test_reduce_mean():
verify_reduce_x("ReduceMean",
np.random.randn(3, 2, 2).astype(np.float32),
axis=None, keepdims=1)
verify_reduce_x("ReduceMean",
np.random.randn(3, 2, 3).astype(np.float32),
axis=None, keepdims=0)
verify_reduce_x("ReduceMean",
np.random.randn(3, 3, 3).astype(np.float32),
axis=(1,), keepdims=1)
def verify_split(indata, outdatas, split, axis=0):
indata = np.array(indata).astype(np.float32)
outdatas = [np.array(o).astype(np.float32) for o in outdatas]
node = helper.make_node(
'Split',
inputs=['input'],
outputs=['output_{}'.format(i) for i in range(len(split))],
axis=axis,
split=split
)
graph = helper.make_graph([node],
'split_test',
inputs = [helper.make_tensor_value_info("input",
TensorProto.FLOAT, list(indata.shape))],
outputs = [helper.make_tensor_value_info("output_{}".format(i),
TensorProto.FLOAT, list(outdatas[i].shape))
for i in range(len(split))
])
model = helper.make_model(graph, producer_name='split_test')
for target, ctx in ctx_list():
output_shape = [o.shape for o in outdatas]
output_type = ['float32', 'float32', 'float32']
tvm_out = get_tvm_output(model, indata, target, ctx, output_shape, output_type)
for o, t in zip(outdatas, tvm_out):
tvm.testing.assert_allclose(o, t)
def test_split():
# 1D
verify_split([1., 2., 3., 4., 5., 6.], [[1., 2.], [3., 4.], [5., 6.]], [2, 2, 2], 0)
verify_split([1., 2., 3., 4., 5., 6.], [[1., 2.], [3.], [4., 5., 6.]], [2, 1, 3], 0)
# 2D
verify_split([[1., 2., 3., 4.], [7., 8., 9., 10.]],
[[[1., 2.], [7., 8.]], [[3., 4.], [9., 10.]]], [2, 2], 1)
def test_binary_ops():
in_shape = (1, 2, 3, 3)
dtype = "float32"
out_shape = in_shape
def verify_binary_ops(op, x, y, out_np, broadcast=None):
if broadcast is None:
z = helper.make_node(op, ['in1', 'in2'], ['out'])
else:
z = helper.make_node(op, ['in1', 'in2'], ['out'], broadcast=1)
graph = helper.make_graph([z],
'_test',
inputs = [helper.make_tensor_value_info("in1",
TensorProto.FLOAT, list(in_shape)),
helper.make_tensor_value_info("in2",
TensorProto.FLOAT, list(in_shape))],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [x, y], target, ctx)
tvm.testing.assert_allclose(out_np, tvm_out, rtol=1e-5, atol=1e-5)
x = np.random.uniform(size=in_shape).astype(dtype)
y = np.random.uniform(size=in_shape).astype(dtype)
z = np.random.uniform(size=(3,)).astype(dtype)
verify_binary_ops("Add",x, y, x + y, broadcast=None)
verify_binary_ops("Add", x, z, x + z, broadcast=True)
verify_binary_ops("Sub", x, y, x - y, broadcast=None)
verify_binary_ops("Sub", x, z, x - z, broadcast=True)
verify_binary_ops("Mul",x, y, x * y, broadcast=None)
verify_binary_ops("Mul", x, z, x * z, broadcast=True)
verify_binary_ops("Div", x, y, x / y, broadcast=None)
verify_binary_ops("Div", x, z, x / z, broadcast=True)
verify_binary_ops("Sum", x, y, x + y, broadcast=None)
verify_binary_ops("Greater", x, y, x > y, broadcast=True)
verify_binary_ops("Less", x, y, x < y, broadcast=True)
verify_binary_ops("Equal", x, y, x == y, broadcast=True)
def test_single_ops():
in_shape = (1, 2, 3, 3)
dtype = "float32"
out_shape = in_shape
def verify_single_ops(op, x, out_np, rtol=1e-5, atol=1e-5):
z = helper.make_node(op, ['in1'], ['out'])
graph = helper.make_graph([z],
'_test',
inputs = [helper.make_tensor_value_info("in1",
TensorProto.FLOAT, list(in_shape)),],
outputs = [helper.make_tensor_value_info("out",
TensorProto.FLOAT, list(out_shape))])
model = helper.make_model(graph, producer_name='_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [x], target, ctx)
tvm.testing.assert_allclose(out_np, tvm_out, rtol=rtol, atol=atol)
x = np.random.uniform(size=in_shape).astype(dtype)
verify_single_ops("Neg",x, -x)
verify_single_ops("Abs",x, np.abs(x))
verify_single_ops("Reciprocal",x, 1/x)
verify_single_ops("Sqrt",x, np.sqrt(x))
verify_single_ops("Relu",x, np.maximum(x, 0))
verify_single_ops("Exp",x, np.exp(x))
verify_single_ops("Log",x, np.log(x))
verify_single_ops("Log",x, np.log(x))
verify_single_ops("Tanh",x, np.tanh(x))
verify_single_ops("Sigmoid",x, 1 / (1 + np.exp(-x)))
verify_single_ops("Softsign",x, x / (1 + np.abs(x)))
verify_single_ops("SoftPlus",x, np.log(1 + np.exp(x)))
def test_leaky_relu():
def leaky_relu_x(x, alpha):
return np.where(x >= 0, x, x * alpha)
_test_onnx_op_elementwise((2, 4, 5, 6),
leaky_relu_x,
{'alpha': 0.25},
'float32',
'LeakyRelu',
{'alpha': 0.25})
def test_elu():
def elu_x(x, alpha):
return np.where(x > 0, x, alpha * (np.exp(x) - 1.0))
_test_onnx_op_elementwise((2, 4, 5, 6),
elu_x,
{'alpha': 0.25},
'float32',
'Elu',
{'alpha': 0.25})
def test_selu():
def selu_x(x, alpha, gamma):
return gamma * np.where(x > 0, x, alpha * (np.exp(x) - 1.0))
_test_onnx_op_elementwise((2, 4, 5, 6),
selu_x,
{'alpha': 0.25, 'gamma': 0.3},
'float32',
'Selu',
{'alpha': 0.25, 'gamma': 0.3})
def test_ThresholdedRelu():
def ThresholdedRelu_x(x, alpha):
out_np = np.clip(x, alpha, np.inf)
out_np[out_np == alpha] = 0
return out_np
_test_onnx_op_elementwise((2, 4, 5, 6),
ThresholdedRelu_x,
{'alpha': 0.25},
'float32',
'ThresholdedRelu',
{'alpha': 0.25})
def test_ScaledTanh():
def ScaledTanh_x(x, alpha, beta):
return alpha * np.tanh(beta * x)
_test_onnx_op_elementwise((2, 4, 5, 6),
ScaledTanh_x,
{'alpha': 0.25, 'beta': 0.3},
'float32',
'ScaledTanh',
{'alpha': 0.25, 'beta': 0.3})
def test_ParametricSoftplus():
def ParametricSoftplus_x(x, alpha, beta):
return alpha * np.log(np.exp(beta * x) + 1)
_test_onnx_op_elementwise((2, 4, 5, 6),
ParametricSoftplus_x,
{'alpha': 0.25, 'beta': 0.3},
'float32',
'ParametricSoftplus',
{'alpha': 0.25, 'beta': 0.3})
def test_Scale():
def Scale_x(x, scale):
return scale * x
_test_onnx_op_elementwise((2, 4, 5, 6),
Scale_x,
{'scale': 0.25},
'float32',
'Scale',
{'scale': 0.25})
def test_LogSoftmax():
_test_onnx_op_elementwise((1, 4),
topi.testing.log_softmax_python,
{},
'float32',
'LogSoftmax',
{'axis': 1})
def check_torch_conversion(model, input_size):
dummy_input = torch.randn(*input_size)
file_name = '{}.onnx'.format(model.__name__)
# Set verbose=True for more output
torch.onnx.export(model(), dummy_input, file_name, export_params=True, verbose=False)
onnx_model = onnx.load(file_name)
for target, ctx in ctx_list():
input_data = np.random.uniform(size=input_size).astype('int32')
c2_out = get_caffe2_output(onnx_model, input_data)
tvm_out = get_tvm_output(onnx_model, input_data, target, ctx)
tvm.testing.assert_allclose(c2_out, tvm_out)
def test_resnet():
check_torch_conversion(torchvision.models.resnet18, (1,3,224,224))
# check_torch_conversion(torchvision.models.resnet101, (1,3,224,224))
# def test_alexnet():
# Torch's ONNX export does not support the adaptive pooling used by AlexNet?
# check_torch_conversion(torchvision.models.alexnet, (1,3,224,224))
# Torch's ONNX export does not support the adaptive pooling used by vgg16?
# def test_vgg16():
# check_torch_conversion(torchvision.models.vgg16, (1,3,224,224))
# TODO(@jroesch): Update Torch + ONNX to support this import.
# def test_squeezenet():
# # Torch's ONNX export does not support the max pooling used by Squezenet
# check_torch_conversion(torchvision.models.squeezenet1_0, (1,3,224,224))
def test_densenet():
check_torch_conversion(torchvision.models.densenet161, (1,3,224,224))
def test_inception():
check_torch_conversion(torchvision.models.inception_v3, (1,3,224,224))
# TODO(@jroesch): Update Torch + ONNX to support this import.
# def test_googlenet():
# check_torch_conversion(torchvision.models.googlenet, (1,3,224,224))
# TODO(@jroesch): Update Torch + ONNX to support this import.
# def test_shufflenetv2():
# check_torch_conversion(torchvision.models.shufflenetv2, (1,3,224,224))
def test_sign():
def Sign_x(x):
return np.sign(x)
_test_onnx_op_elementwise((3, 4, 5, 6),
Sign_x,
{},
'float32',
'Sign',
{})
def verify_not(indata, dtype):
x = indata.astype(dtype)
outdata = np.logical_not(x)
node = helper.make_node('Not', inputs=['in'], outputs=['out'],)
graph = helper.make_graph([node],
'not_test',
inputs=[helper.make_tensor_value_info("in", TensorProto.BOOL, list(x.shape))],
outputs=[helper.make_tensor_value_info("out", TensorProto.BOOL, list(outdata.shape))])
model = helper.make_model(graph, producer_name='not_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [x], target, ctx, outdata.shape)
tvm.testing.assert_allclose(outdata, tvm_out)
def test_not():
# 2d
verify_not(indata=(np.random.randn(3, 4) > 0), dtype=bool)
# 3d
verify_not(indata=(np.random.randn(3, 4, 5) > 0), dtype=bool)
# 4d
verify_not(indata=(np.random.randn(3, 4, 5, 6) > 0), dtype=bool)
def verify_and(indata, dtype):
x = indata[0].astype(dtype)
y = indata[1].astype(dtype)
outdata = np.logical_and(x, y)
node = helper.make_node('And', inputs=['in1', 'in2'], outputs=['out'], )
graph = helper.make_graph([node],
'and_test',
inputs=[helper.make_tensor_value_info("in1", TensorProto.BOOL, list(x.shape)),
helper.make_tensor_value_info("in2", TensorProto.BOOL, list(y.shape))],
outputs=[helper.make_tensor_value_info("out", TensorProto.BOOL, list(outdata.shape))])
model = helper.make_model(graph, producer_name='and_test')
for target, ctx in ctx_list():
tvm_out = get_tvm_output(model, [x, y], target, ctx, outdata.shape)
tvm.testing.assert_allclose(outdata, tvm_out)
def test_and():
# 2d
x = (np.random.randn(3, 4) > 0)
y = (np.random.randn(3, 4) > 0)
verify_and(indata=[x, y], dtype=bool)
# 3d
x = (np.random.randn(3, 4, 5) > 0)
y = (np.random.randn(3, 4, 5) > 0)
verify_and(indata=[x, y], dtype=bool)
# 4d
x = (np.random.randn(3, 4, 5, 6) > 0)
y = (np.random.randn(3, 4, 5, 6) > 0)
verify_and(indata=[x, y], dtype=bool)
# 3d vs 1d
x = (np.random.randn(3, 4, 5) > 0)
y = (np.random.randn(5) > 0)
verify_and(indata=[x, y], dtype=bool)
# 3d vs 2d
x = (np.random.randn(3, 4, 5) > 0)
y = (np.random.randn(4, 5) > 0)
verify_and(indata=[x, y], dtype=bool)
if __name__ == '__main__':
test_flatten()
test_reshape()
test_shape()
test_power()
test_squeeze()
test_unsqueeze()
test_slice()
test_floor()
test_ceil()
test_clip()
test_matmul()
test_gather()
test_lrn()
test_upsample()
test_forward_min()
test_forward_max()
test_forward_mean()
test_forward_hardsigmoid()
test_forward_arg_min_max()
test_softmax()
test_constantfill()
test_pad()
test_reduce_max()
test_reduce_min()
test_reduce_sum()
test_reduce_mean()
test_pad()
test_split()
test_binary_ops()
test_single_ops()
test_leaky_relu()
test_elu()
test_selu()
test_ThresholdedRelu()
test_ScaledTanh()
test_ParametricSoftplus()
test_Scale()
test_LogSoftmax()
test_resnet()
test_inception()
test_densenet()
test_sign()
test_not()
test_and()
|
Huyuwei/tvm
|
tests/python/frontend/onnx/test_forward.py
|
Python
|
apache-2.0
| 52,747
|
# Copyright 2015 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from django.conf import settings
from django import http
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables
from cloudkittydashboard.api import cloudkitty as api
from cloudkittydashboard.dashboards.project.rating \
import tables as rating_tables
from cloudkittydashboard.utils import TemplatizableDict
class IndexView(tables.DataTableView):
table_class = rating_tables.SummaryTable
template_name = 'project/rating/index.html'
def get_data(self):
summary = api.cloudkittyclient(self.request).report.get_summary(
tenant_id=self.request.user.tenant_id,
groupby=['tenant_id', 'res_type'])['summary']
summary = api.identify(summary, key='res_type', name=True)
summary.append(TemplatizableDict({
'id': 'ALL',
'res_type': 'TOTAL',
'name': 'ALL',
'rate': sum([float(i['rate']) for i in summary]),
}))
return summary
def quote(request):
pricing = 0.0
if request.is_ajax():
if request.method == 'POST':
json_data = json.loads(request.body)
def __update_quotation_data(element, service):
if isinstance(element, dict):
element['service'] = service
else:
for elem in element:
__update_quotation_data(elem, service)
try:
service = getattr(
settings, 'CLOUDKITTY_QUOTATION_SERVICE', 'instance')
__update_quotation_data(json_data, service)
pricing = float(api.cloudkittyclient(request)
.rating.get_quotation(res_data=json_data))
except Exception as ex:
exceptions.handle(request,
_('Unable to retrieve price: %s') % str(ex))
return http.HttpResponse(json.dumps(pricing),
content_type='application/json')
|
stackforge/cloudkitty-dashboard
|
cloudkittydashboard/dashboards/project/rating/views.py
|
Python
|
apache-2.0
| 2,650
|
import pytest
from dcos import jsonitem
from dcos.errors import DCOSException
@pytest.fixture(params=range(6))
def bad_object(request):
return [
'{"key":value}',
'this is a string',
'4.5',
'4',
'true',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(4))
def bad_number(request):
return [
'this is a string',
'true',
'{"key":"value"}',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(5))
def bad_integer(request):
return [
'this is a string',
'true',
'{"key":"value"}',
'45.0',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(5))
def bad_boolean(request):
return [
'this is a string',
'45',
'{"key":"value"}',
'45.0',
'[1,2,3]',
][request.param]
@pytest.fixture(params=range(6))
def bad_array(request):
return [
'this is a string',
'45',
'{"key":"value"}',
'45.0',
'true',
'[1,2,3',
][request.param]
@pytest.fixture(params=[
('string', 'this is a string', 'this is a string'),
('string', 'null', None),
('object', '{"key":"value"}', {'key': 'value'}),
('object', 'null', None),
('number', '4.2', 4.2),
('number', 'null', None),
('integer', '42', 42),
('integer', 'null', None),
('boolean', 'true', True),
('boolean', 'True', True),
('boolean', 'FaLsE', False),
('boolean', 'false', False),
('boolean', 'null', None),
('array', '[1,2,3]', [1, 2, 3]),
('array', 'null', None),
('url', 'http://test.com', 'http://test.com')
])
def jsonitem_tuple(request):
return request.param
@pytest.fixture(params=range(13))
def parse_tuple(request):
return [
('string=null', ('"string"', None)),
('string="this is a string with ="',
('"string"', 'this is a string with =')),
("string='this is a string with ='",
('"string"', 'this is a string with =')),
('object=null', ('"object"', None)),
("""object='{"key":"value"}'""", ('"object"', {'key': 'value'})),
('number=null', ('"number"', None)),
('number=4.2', ('"number"', 4.2)),
('integer=null', ('"integer"', None)),
('integer=42', ('"integer"', 42)),
('boolean=null', ('"boolean"', None)),
('boolean=true', ('"boolean"', True)),
('array=null', ('"array"', None)),
("array='[1,2,3]'", ('"array"', [1, 2, 3])),
][request.param]
@pytest.fixture(params=range(6))
def bad_parse(request):
return [
"====",
"no equals",
"object=[]",
"something=cool",
"integer=",
"integer=45.0",
][request.param]
@pytest.fixture
def schema():
return {
'type': 'object',
'properties': {
'integer': {
'type': 'integer'
},
'number': {
'type': 'number'
},
'string': {
'type': 'string',
},
'object': {
'type': 'object'
},
'array': {
'type': 'array'
},
'boolean': {
'type': 'boolean',
},
'url': {
'type': 'string',
'format': 'url',
}
}
}
def test_parse_string():
string = 'this is a string "'
assert jsonitem._parse_string(string) == string
def test_parse_object():
assert jsonitem._parse_object('{"key": "value"}') == {'key': 'value'}
def test_parse_invalid_objects(bad_object):
with pytest.raises(DCOSException):
jsonitem._parse_object(bad_object)
def test_parse_number():
assert jsonitem._parse_number('45') == 45
assert jsonitem._parse_number('45.0') == 45.0
def test_parse_invalid_numbers(bad_number):
with pytest.raises(DCOSException):
jsonitem._parse_number(bad_number)
def test_parse_integer():
assert jsonitem._parse_integer('45') == 45
def test_parse_invalid_integers(bad_integer):
with pytest.raises(DCOSException):
jsonitem._parse_integer(bad_integer)
def test_parse_boolean():
assert jsonitem._parse_boolean('true') is True
assert jsonitem._parse_boolean('false') is False
def test_parse_invalid_booleans(bad_boolean):
with pytest.raises(DCOSException):
jsonitem._parse_boolean(bad_boolean)
def test_parse_array():
assert jsonitem._parse_array('[1,2,3]') == [1, 2, 3]
def test_parse_invalid_arrays(bad_array):
with pytest.raises(DCOSException):
jsonitem._parse_array(bad_array)
def test_parse_url():
assert jsonitem._parse_url('http://test.com:12') == 'http://test.com:12'
def test_find_parser(schema, jsonitem_tuple):
key, string_value, value = jsonitem_tuple
assert jsonitem.find_parser(key, schema)(string_value) == value
def test_parse_json_item(schema, parse_tuple):
arg, result = parse_tuple
assert jsonitem.parse_json_item(arg, schema) == result
def test_parse_bad_json_item(schema, bad_parse):
with pytest.raises(DCOSException):
jsonitem.parse_json_item(bad_parse, schema)
|
mesosphere/dcos-cli
|
tests/test_jsonitem.py
|
Python
|
apache-2.0
| 5,239
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetEvaluationJob
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datalabeling
# [START datalabeling_v1beta1_generated_DataLabelingService_GetEvaluationJob_sync]
from google.cloud import datalabeling_v1beta1
def sample_get_evaluation_job():
# Create a client
client = datalabeling_v1beta1.DataLabelingServiceClient()
# Initialize request argument(s)
request = datalabeling_v1beta1.GetEvaluationJobRequest(
name="name_value",
)
# Make the request
response = client.get_evaluation_job(request=request)
# Handle the response
print(response)
# [END datalabeling_v1beta1_generated_DataLabelingService_GetEvaluationJob_sync]
|
googleapis/python-datalabeling
|
samples/generated_samples/datalabeling_v1beta1_generated_data_labeling_service_get_evaluation_job_sync.py
|
Python
|
apache-2.0
| 1,533
|
# Copyright 2017 CodiLime
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class BaseRegister:
"""
Represents an ISA register. Fields:
- name: a name displayed for this register, should be unique.
- width: width in bits.
"""
def __init__(self, name, width):
self.name = name
self.width = width
def __str__(self):
return "${}".format(self.name)
__repr__ = __str__
class Register(BaseRegister):
"""
Represents a general-purpose, top-level ISA register. Writes to this
register will not be considered interesting on their own, and will be
converted wholly to SSA.
"""
class RegisterPC(BaseRegister):
"""
Represents a program counter register, ie. one that always points to
some place in the current bundle, and thus will return a known value
when read.
"""
def __init__(self, name, width, anchor='start', offset=0):
super().__init__(name, width)
self.anchor = anchor
self.offset = offset
class RegisterSP(BaseRegister):
"""
Represents a hardware stack pointer register.
"""
# XXX: does this warrant a special class? Might be better to handle it
# elsewhere.
# "You know what "special" means, right?" -- mupuf
class RegisterSpecial(BaseRegister):
"""
Represents a special register. Reads and writes of this register will be
considered to be interesting events, and will not be converted to SSA.
"""
class RegisterSplit(BaseRegister):
"""
Represents a split register, ie. one that is really multiple other
registers accessed together as a single entity. ``parts`` is a list
of (start bit, register) tuples. When accessed, the access is converted
to multiple smaller accesses to the parts.
"""
def __init__(self, name, width, parts):
super().__init__(name, width)
self.parts = parts
class SubRegister(BaseRegister):
"""
Represents a subregister of another register. Defined by starting bit
position in the parent and width.
"""
def __init__(self, name, parent, start, width):
super().__init__(name, width)
self.parent = parent
self.start = start
|
codilime/veles
|
python/veles/dis/reg.py
|
Python
|
apache-2.0
| 2,710
|
input = """
1 2 0 0
1 3 0 0
1 4 0 0
1 5 0 0
1 6 0 0
1 7 0 0
1 8 0 0
1 9 0 0
1 10 0 0
1 11 0 0
1 12 0 0
1 13 0 0
1 14 0 0
1 15 0 0
1 16 0 0
1 17 0 0
1 18 0 0
1 19 0 0
1 20 0 0
1 21 0 0
1 22 0 0
1 23 0 0
1 24 0 0
1 25 0 0
1 26 0 0
1 27 0 0
1 28 0 0
1 29 0 0
1 30 0 0
1 31 0 0
1 32 0 0
1 33 0 0
1 34 0 0
1 35 0 0
1 36 0 0
1 37 0 0
1 38 0 0
1 39 0 0
1 40 0 0
1 41 0 0
1 42 0 0
1 43 0 0
1 44 0 0
1 45 0 0
1 46 0 0
1 47 0 0
1 48 0 0
1 49 0 0
1 50 0 0
1 51 0 0
1 52 0 0
1 53 0 0
1 54 0 0
1 55 0 0
1 56 0 0
1 57 0 0
1 58 0 0
1 59 0 0
1 60 0 0
1 61 0 0
1 62 0 0
1 63 0 0
1 64 0 0
1 65 0 0
1 66 0 0
1 67 0 0
1 68 0 0
1 69 0 0
1 70 0 0
1 71 0 0
1 72 0 0
1 73 0 0
1 74 0 0
1 75 0 0
1 76 0 0
1 77 0 0
1 78 0 0
1 79 0 0
1 80 0 0
1 81 0 0
1 82 0 0
1 83 0 0
1 84 0 0
1 85 0 0
1 86 0 0
1 87 0 0
1 88 0 0
1 89 0 0
1 90 0 0
1 91 0 0
1 92 0 0
1 93 0 0
1 94 0 0
1 95 0 0
1 96 0 0
1 97 0 0
1 98 0 0
1 99 0 0
1 100 0 0
1 101 0 0
1 102 0 0
1 103 0 0
1 104 0 0
1 105 0 0
1 106 0 0
1 107 0 0
1 108 0 0
1 109 0 0
1 110 0 0
1 111 0 0
1 112 0 0
1 113 0 0
1 114 0 0
1 115 0 0
1 116 0 0
1 117 0 0
1 118 0 0
1 119 0 0
1 120 0 0
1 121 0 0
1 122 0 0
1 123 0 0
1 124 0 0
1 125 0 0
1 126 0 0
1 127 0 0
1 128 0 0
1 129 0 0
1 130 0 0
1 131 0 0
1 132 0 0
1 133 0 0
1 134 0 0
1 135 0 0
1 136 0 0
1 137 0 0
1 138 0 0
1 139 0 0
1 140 0 0
1 141 0 0
1 142 0 0
1 143 0 0
1 144 0 0
1 145 0 0
1 146 0 0
1 147 0 0
1 148 0 0
1 149 0 0
1 150 0 0
1 151 0 0
1 152 0 0
1 153 0 0
1 154 0 0
1 155 0 0
1 156 0 0
1 157 0 0
1 158 0 0
1 159 0 0
1 160 0 0
1 161 0 0
1 162 0 0
1 163 0 0
1 164 0 0
1 165 0 0
1 166 0 0
1 167 0 0
1 168 0 0
1 169 0 0
1 170 0 0
1 171 0 0
1 172 0 0
1 173 0 0
1 174 0 0
1 175 0 0
1 176 0 0
1 177 0 0
1 178 0 0
1 179 0 0
1 180 0 0
1 181 0 0
1 182 0 0
1 183 0 0
1 184 0 0
1 185 0 0
1 186 0 0
1 187 0 0
1 188 0 0
1 189 0 0
1 190 0 0
1 191 0 0
1 192 0 0
1 193 0 0
1 194 0 0
1 195 0 0
1 196 0 0
1 197 0 0
1 198 0 0
1 199 0 0
1 200 0 0
1 201 0 0
1 202 0 0
1 203 0 0
1 204 0 0
1 205 0 0
1 206 0 0
1 207 0 0
1 208 0 0
1 209 0 0
1 210 0 0
1 211 0 0
1 212 0 0
1 213 0 0
1 214 0 0
1 215 0 0
1 216 0 0
1 217 0 0
1 218 0 0
1 219 0 0
1 220 0 0
1 221 0 0
1 222 0 0
1 223 0 0
1 224 0 0
1 225 0 0
1 226 0 0
1 227 0 0
1 228 0 0
1 229 0 0
1 230 0 0
1 231 0 0
1 232 0 0
1 233 0 0
1 234 0 0
1 235 0 0
1 236 0 0
1 237 0 0
1 238 0 0
1 239 0 0
1 240 0 0
1 241 0 0
1 242 0 0
1 243 0 0
1 244 0 0
1 245 0 0
1 246 0 0
1 247 0 0
1 248 0 0
1 249 0 0
1 250 0 0
1 251 0 0
1 252 0 0
1 253 0 0
1 254 0 0
1 255 0 0
1 256 0 0
1 257 0 0
1 258 0 0
1 259 0 0
1 260 0 0
1 261 0 0
1 262 0 0
1 263 0 0
1 264 0 0
1 265 0 0
1 266 0 0
1 267 0 0
1 268 0 0
1 269 0 0
1 270 0 0
1 271 0 0
1 272 0 0
1 273 0 0
1 274 0 0
1 275 0 0
1 276 0 0
1 277 0 0
1 278 0 0
1 279 0 0
1 280 0 0
1 281 0 0
1 282 0 0
1 283 0 0
1 284 0 0
1 285 0 0
1 286 0 0
1 287 0 0
1 288 0 0
1 289 0 0
1 290 0 0
1 291 0 0
1 292 0 0
1 293 0 0
1 294 0 0
1 295 0 0
1 296 0 0
1 297 0 0
1 298 0 0
1 299 0 0
1 300 0 0
1 301 0 0
1 302 0 0
1 303 0 0
1 304 0 0
1 305 0 0
1 306 0 0
1 307 0 0
1 308 0 0
1 309 0 0
1 310 0 0
1 311 0 0
1 312 0 0
1 313 0 0
1 314 0 0
1 315 0 0
1 316 0 0
1 317 0 0
1 318 0 0
1 319 0 0
1 320 0 0
1 321 0 0
1 322 0 0
1 323 0 0
1 324 0 0
1 325 0 0
1 326 0 0
1 327 0 0
1 328 0 0
1 329 0 0
1 330 0 0
1 331 2 1 332 333
1 332 2 1 331 333
1 333 0 0
1 334 2 1 335 336
1 335 2 1 334 336
1 336 0 0
1 337 2 1 338 339
1 338 2 1 337 339
1 339 0 0
1 340 2 1 341 342
1 341 2 1 340 342
1 342 0 0
1 343 2 1 344 345
1 344 2 1 343 345
1 345 0 0
1 346 2 1 347 348
1 347 2 1 346 348
1 348 0 0
1 349 2 1 350 351
1 350 2 1 349 351
1 351 0 0
1 352 2 1 353 354
1 353 2 1 352 354
1 354 0 0
1 355 2 1 356 357
1 356 2 1 355 357
1 357 0 0
1 358 2 1 359 360
1 359 2 1 358 360
1 360 0 0
1 361 2 1 362 363
1 362 2 1 361 363
1 363 0 0
1 364 2 1 365 366
1 365 2 1 364 366
1 366 0 0
1 367 2 1 368 369
1 368 2 1 367 369
1 369 0 0
1 370 2 1 371 372
1 371 2 1 370 372
1 372 0 0
1 373 2 1 374 375
1 374 2 1 373 375
1 375 0 0
1 376 2 1 377 378
1 377 2 1 376 378
1 378 0 0
1 379 2 1 380 381
1 380 2 1 379 381
1 381 0 0
1 382 2 1 383 384
1 383 2 1 382 384
1 384 0 0
1 385 2 1 386 387
1 386 2 1 385 387
1 387 0 0
1 388 2 1 389 390
1 389 2 1 388 390
1 390 0 0
1 391 2 1 392 393
1 392 2 1 391 393
1 393 0 0
1 394 2 1 395 396
1 395 2 1 394 396
1 396 0 0
1 397 2 1 398 399
1 398 2 1 397 399
1 399 0 0
1 400 2 1 401 402
1 401 2 1 400 402
1 402 0 0
1 403 2 1 404 405
1 404 2 1 403 405
1 405 0 0
1 406 2 1 407 408
1 407 2 1 406 408
1 408 0 0
1 409 2 1 410 411
1 410 2 1 409 411
1 411 0 0
1 412 2 1 413 414
1 413 2 1 412 414
1 414 0 0
1 415 2 1 416 417
1 416 2 1 415 417
1 417 0 0
1 418 2 1 419 420
1 419 2 1 418 420
1 420 0 0
1 421 2 1 422 423
1 422 2 1 421 423
1 423 0 0
1 424 2 1 425 426
1 425 2 1 424 426
1 426 0 0
1 427 2 1 428 429
1 428 2 1 427 429
1 429 0 0
1 430 2 1 431 432
1 431 2 1 430 432
1 432 0 0
1 433 2 1 434 435
1 434 2 1 433 435
1 435 0 0
1 436 2 1 437 438
1 437 2 1 436 438
1 438 0 0
1 439 2 1 440 441
1 440 2 1 439 441
1 441 0 0
1 442 2 1 443 444
1 443 2 1 442 444
1 444 0 0
1 445 2 1 446 447
1 446 2 1 445 447
1 447 0 0
1 448 2 1 449 450
1 449 2 1 448 450
1 450 0 0
1 451 2 1 452 453
1 452 2 1 451 453
1 453 0 0
1 454 2 1 455 456
1 455 2 1 454 456
1 456 0 0
1 457 2 1 458 459
1 458 2 1 457 459
1 459 0 0
1 460 2 1 461 462
1 461 2 1 460 462
1 462 0 0
1 463 2 1 464 465
1 464 2 1 463 465
1 465 0 0
1 466 2 1 467 468
1 467 2 1 466 468
1 468 0 0
1 469 2 1 470 471
1 470 2 1 469 471
1 471 0 0
1 472 2 1 473 474
1 473 2 1 472 474
1 474 0 0
1 475 2 1 476 477
1 476 2 1 475 477
1 477 0 0
1 478 2 1 479 480
1 479 2 1 478 480
1 480 0 0
1 481 2 1 482 483
1 482 2 1 481 483
1 483 0 0
1 484 2 1 485 486
1 485 2 1 484 486
1 486 0 0
1 487 2 1 488 489
1 488 2 1 487 489
1 489 0 0
1 490 2 1 491 492
1 491 2 1 490 492
1 492 0 0
1 493 2 1 494 495
1 494 2 1 493 495
1 495 0 0
1 496 2 1 497 498
1 497 2 1 496 498
1 498 0 0
1 499 2 1 500 501
1 500 2 1 499 501
1 501 0 0
1 502 2 1 503 504
1 503 2 1 502 504
1 504 0 0
1 505 2 1 506 507
1 506 2 1 505 507
1 507 0 0
1 508 2 1 509 510
1 509 2 1 508 510
1 510 0 0
1 511 2 1 512 513
1 512 2 1 511 513
1 513 0 0
1 514 2 1 515 516
1 515 2 1 514 516
1 516 0 0
1 517 2 1 518 519
1 518 2 1 517 519
1 519 0 0
1 520 2 1 521 522
1 521 2 1 520 522
1 522 0 0
1 523 2 1 524 525
1 524 2 1 523 525
1 525 0 0
1 526 2 1 527 528
1 527 2 1 526 528
1 528 0 0
1 529 2 1 530 531
1 530 2 1 529 531
1 531 0 0
1 532 2 1 533 534
1 533 2 1 532 534
1 534 0 0
1 535 2 1 536 537
1 536 2 1 535 537
1 537 0 0
1 538 2 1 539 540
1 539 2 1 538 540
1 540 0 0
1 541 2 1 542 543
1 542 2 1 541 543
1 543 0 0
1 544 2 1 545 546
1 545 2 1 544 546
1 546 0 0
1 547 2 1 548 549
1 548 2 1 547 549
1 549 0 0
1 550 2 1 551 552
1 551 2 1 550 552
1 552 0 0
1 553 2 1 554 555
1 554 2 1 553 555
1 555 0 0
1 556 2 1 557 558
1 557 2 1 556 558
1 558 0 0
1 559 2 1 560 561
1 560 2 1 559 561
1 561 0 0
1 562 2 1 563 564
1 563 2 1 562 564
1 564 0 0
1 565 2 1 566 567
1 566 2 1 565 567
1 567 0 0
1 568 2 1 569 570
1 569 2 1 568 570
1 570 0 0
1 571 2 1 572 573
1 572 2 1 571 573
1 573 0 0
1 574 2 1 575 576
1 575 2 1 574 576
1 576 0 0
1 577 2 1 578 579
1 578 2 1 577 579
1 579 0 0
1 580 2 1 581 582
1 581 2 1 580 582
1 582 0 0
1 583 2 1 584 585
1 584 2 1 583 585
1 585 0 0
1 586 2 1 587 588
1 587 2 1 586 588
1 588 0 0
1 589 2 1 590 591
1 590 2 1 589 591
1 591 0 0
1 592 2 1 593 594
1 593 2 1 592 594
1 594 0 0
1 595 2 1 596 597
1 596 2 1 595 597
1 597 0 0
1 598 2 1 599 600
1 599 2 1 598 600
1 600 0 0
1 601 2 1 602 603
1 602 2 1 601 603
1 603 0 0
1 604 2 1 605 606
1 605 2 1 604 606
1 606 0 0
1 607 2 1 608 609
1 608 2 1 607 609
1 609 0 0
1 610 2 1 611 612
1 611 2 1 610 612
1 612 0 0
1 613 2 1 614 615
1 614 2 1 613 615
1 615 0 0
1 616 2 1 617 618
1 617 2 1 616 618
1 618 0 0
1 619 2 1 620 621
1 620 2 1 619 621
1 621 0 0
1 622 2 1 623 624
1 623 2 1 622 624
1 624 0 0
1 625 2 1 626 627
1 626 2 1 625 627
1 627 0 0
1 628 2 1 629 630
1 629 2 1 628 630
1 630 0 0
1 631 2 1 632 633
1 632 2 1 631 633
1 633 0 0
1 634 2 1 635 636
1 635 2 1 634 636
1 636 0 0
1 637 2 1 638 639
1 638 2 1 637 639
1 639 0 0
1 640 2 1 641 642
1 641 2 1 640 642
1 642 0 0
1 643 2 1 644 645
1 644 2 1 643 645
1 645 0 0
1 646 2 1 647 648
1 647 2 1 646 648
1 648 0 0
1 649 2 1 650 651
1 650 2 1 649 651
1 651 0 0
1 652 2 1 653 654
1 653 2 1 652 654
1 654 0 0
1 655 2 1 656 657
1 656 2 1 655 657
1 657 0 0
1 658 2 1 659 660
1 659 2 1 658 660
1 660 0 0
1 661 2 1 662 663
1 662 2 1 661 663
1 663 0 0
1 664 2 1 665 666
1 665 2 1 664 666
1 666 0 0
1 667 2 1 668 669
1 668 2 1 667 669
1 669 0 0
1 670 2 1 671 672
1 671 2 1 670 672
1 672 0 0
1 673 2 1 674 675
1 674 2 1 673 675
1 675 0 0
1 676 2 1 677 678
1 677 2 1 676 678
1 678 0 0
1 679 2 1 680 681
1 680 2 1 679 681
1 681 0 0
1 682 2 1 683 684
1 683 2 1 682 684
1 684 0 0
1 685 2 1 686 687
1 686 2 1 685 687
1 687 0 0
1 688 2 1 689 690
1 689 2 1 688 690
1 690 0 0
1 691 1 0 688
1 692 1 0 685
1 693 1 0 682
1 694 1 0 679
1 695 1 0 676
1 696 1 0 673
1 697 1 0 670
1 698 1 0 667
1 699 1 0 664
1 700 1 0 661
1 701 1 0 658
1 702 1 0 655
1 703 1 0 652
1 704 1 0 649
1 705 1 0 646
1 706 1 0 643
1 707 1 0 640
1 708 1 0 637
1 709 1 0 634
1 710 1 0 631
1 711 1 0 628
1 712 1 0 625
1 713 1 0 622
1 714 1 0 619
1 715 1 0 616
1 716 1 0 613
1 717 1 0 610
1 718 1 0 607
1 719 1 0 604
1 720 1 0 601
1 721 1 0 598
1 722 1 0 595
1 723 1 0 592
1 724 1 0 589
1 725 1 0 586
1 726 1 0 583
1 727 1 0 580
1 728 1 0 577
1 729 1 0 574
1 730 1 0 571
1 691 1 0 568
1 692 1 0 565
1 693 1 0 562
1 694 1 0 559
1 695 1 0 556
1 696 1 0 553
1 697 1 0 550
1 698 1 0 547
1 699 1 0 544
1 700 1 0 541
1 701 1 0 538
1 702 1 0 535
1 703 1 0 532
1 704 1 0 529
1 705 1 0 526
1 706 1 0 523
1 707 1 0 520
1 708 1 0 517
1 709 1 0 514
1 710 1 0 511
1 711 1 0 508
1 712 1 0 505
1 713 1 0 502
1 714 1 0 499
1 715 1 0 496
1 716 1 0 493
1 717 1 0 490
1 718 1 0 487
1 719 1 0 484
1 720 1 0 481
1 721 1 0 478
1 722 1 0 475
1 723 1 0 472
1 724 1 0 469
1 725 1 0 466
1 726 1 0 463
1 727 1 0 460
1 728 1 0 457
1 729 1 0 454
1 730 1 0 451
1 691 1 0 448
1 692 1 0 445
1 693 1 0 442
1 694 1 0 439
1 695 1 0 436
1 696 1 0 433
1 697 1 0 430
1 698 1 0 427
1 699 1 0 424
1 700 1 0 421
1 701 1 0 418
1 702 1 0 415
1 703 1 0 412
1 704 1 0 409
1 705 1 0 406
1 706 1 0 403
1 707 1 0 400
1 708 1 0 397
1 709 1 0 394
1 710 1 0 391
1 711 1 0 388
1 712 1 0 385
1 713 1 0 382
1 714 1 0 379
1 715 1 0 376
1 716 1 0 373
1 717 1 0 370
1 718 1 0 367
1 719 1 0 364
1 720 1 0 361
1 721 1 0 358
1 722 1 0 355
1 723 1 0 352
1 724 1 0 349
1 725 1 0 346
1 726 1 0 343
1 727 1 0 340
1 728 1 0 337
1 729 1 0 334
1 730 1 0 331
1 1 1 1 730
1 1 1 1 729
1 1 1 1 728
1 1 1 1 727
1 1 1 1 726
1 1 1 1 725
1 1 1 1 724
1 1 1 1 723
1 1 1 1 722
1 1 1 1 721
1 1 1 1 720
1 1 1 1 719
1 1 1 1 718
1 1 1 1 717
1 1 1 1 716
1 1 1 1 715
1 1 1 1 714
1 1 1 1 713
1 1 1 1 712
1 1 1 1 711
1 1 1 1 710
1 1 1 1 709
1 1 1 1 708
1 1 1 1 707
1 1 1 1 706
1 1 1 1 705
1 1 1 1 704
1 1 1 1 703
1 1 1 1 702
1 1 1 1 701
1 1 1 1 700
1 1 1 1 699
1 1 1 1 698
1 1 1 1 697
1 1 1 1 696
1 1 1 1 695
1 1 1 1 694
1 1 1 1 693
1 1 1 1 692
1 1 1 1 691
1 1 2 0 688 568
1 1 2 0 688 448
1 1 2 0 685 565
1 1 2 0 685 445
1 1 2 0 682 562
1 1 2 0 682 442
1 1 2 0 679 559
1 1 2 0 679 439
1 1 2 0 676 556
1 1 2 0 676 436
1 1 2 0 673 553
1 1 2 0 673 433
1 1 2 0 670 550
1 1 2 0 670 430
1 1 2 0 667 547
1 1 2 0 667 427
1 1 2 0 664 544
1 1 2 0 664 424
1 1 2 0 661 541
1 1 2 0 661 421
1 1 2 0 658 538
1 1 2 0 658 418
1 1 2 0 655 535
1 1 2 0 655 415
1 1 2 0 652 532
1 1 2 0 652 412
1 1 2 0 649 529
1 1 2 0 649 409
1 1 2 0 646 526
1 1 2 0 646 406
1 1 2 0 643 523
1 1 2 0 643 403
1 1 2 0 640 520
1 1 2 0 640 400
1 1 2 0 637 517
1 1 2 0 637 397
1 1 2 0 634 514
1 1 2 0 634 394
1 1 2 0 631 511
1 1 2 0 631 391
1 1 2 0 628 508
1 1 2 0 628 388
1 1 2 0 625 505
1 1 2 0 625 385
1 1 2 0 622 502
1 1 2 0 622 382
1 1 2 0 619 499
1 1 2 0 619 379
1 1 2 0 616 496
1 1 2 0 616 376
1 1 2 0 613 493
1 1 2 0 613 373
1 1 2 0 610 490
1 1 2 0 610 370
1 1 2 0 607 487
1 1 2 0 607 367
1 1 2 0 604 484
1 1 2 0 604 364
1 1 2 0 601 481
1 1 2 0 601 361
1 1 2 0 598 478
1 1 2 0 598 358
1 1 2 0 595 475
1 1 2 0 595 355
1 1 2 0 592 472
1 1 2 0 592 352
1 1 2 0 589 469
1 1 2 0 589 349
1 1 2 0 586 466
1 1 2 0 586 346
1 1 2 0 583 463
1 1 2 0 583 343
1 1 2 0 580 460
1 1 2 0 580 340
1 1 2 0 577 457
1 1 2 0 577 337
1 1 2 0 574 454
1 1 2 0 574 334
1 1 2 0 571 451
1 1 2 0 571 331
1 1 2 0 568 688
1 1 2 0 568 448
1 1 2 0 565 685
1 1 2 0 565 445
1 1 2 0 562 682
1 1 2 0 562 442
1 1 2 0 559 679
1 1 2 0 559 439
1 1 2 0 556 676
1 1 2 0 556 436
1 1 2 0 553 673
1 1 2 0 553 433
1 1 2 0 550 670
1 1 2 0 550 430
1 1 2 0 547 667
1 1 2 0 547 427
1 1 2 0 544 664
1 1 2 0 544 424
1 1 2 0 541 661
1 1 2 0 541 421
1 1 2 0 538 658
1 1 2 0 538 418
1 1 2 0 535 655
1 1 2 0 535 415
1 1 2 0 532 652
1 1 2 0 532 412
1 1 2 0 529 649
1 1 2 0 529 409
1 1 2 0 526 646
1 1 2 0 526 406
1 1 2 0 523 643
1 1 2 0 523 403
1 1 2 0 520 640
1 1 2 0 520 400
1 1 2 0 517 637
1 1 2 0 517 397
1 1 2 0 514 634
1 1 2 0 514 394
1 1 2 0 511 631
1 1 2 0 511 391
1 1 2 0 508 628
1 1 2 0 508 388
1 1 2 0 505 625
1 1 2 0 505 385
1 1 2 0 502 622
1 1 2 0 502 382
1 1 2 0 499 619
1 1 2 0 499 379
1 1 2 0 496 616
1 1 2 0 496 376
1 1 2 0 493 613
1 1 2 0 493 373
1 1 2 0 490 610
1 1 2 0 490 370
1 1 2 0 487 607
1 1 2 0 487 367
1 1 2 0 484 604
1 1 2 0 484 364
1 1 2 0 481 601
1 1 2 0 481 361
1 1 2 0 478 598
1 1 2 0 478 358
1 1 2 0 475 595
1 1 2 0 475 355
1 1 2 0 472 592
1 1 2 0 472 352
1 1 2 0 469 589
1 1 2 0 469 349
1 1 2 0 466 586
1 1 2 0 466 346
1 1 2 0 463 583
1 1 2 0 463 343
1 1 2 0 460 580
1 1 2 0 460 340
1 1 2 0 457 577
1 1 2 0 457 337
1 1 2 0 454 574
1 1 2 0 454 334
1 1 2 0 451 571
1 1 2 0 451 331
1 1 2 0 448 688
1 1 2 0 448 568
1 1 2 0 445 685
1 1 2 0 445 565
1 1 2 0 442 682
1 1 2 0 442 562
1 1 2 0 439 679
1 1 2 0 439 559
1 1 2 0 436 676
1 1 2 0 436 556
1 1 2 0 433 673
1 1 2 0 433 553
1 1 2 0 430 670
1 1 2 0 430 550
1 1 2 0 427 667
1 1 2 0 427 547
1 1 2 0 424 664
1 1 2 0 424 544
1 1 2 0 421 661
1 1 2 0 421 541
1 1 2 0 418 658
1 1 2 0 418 538
1 1 2 0 415 655
1 1 2 0 415 535
1 1 2 0 412 652
1 1 2 0 412 532
1 1 2 0 409 649
1 1 2 0 409 529
1 1 2 0 406 646
1 1 2 0 406 526
1 1 2 0 403 643
1 1 2 0 403 523
1 1 2 0 400 640
1 1 2 0 400 520
1 1 2 0 397 637
1 1 2 0 397 517
1 1 2 0 394 634
1 1 2 0 394 514
1 1 2 0 391 631
1 1 2 0 391 511
1 1 2 0 388 628
1 1 2 0 388 508
1 1 2 0 385 625
1 1 2 0 385 505
1 1 2 0 382 622
1 1 2 0 382 502
1 1 2 0 379 619
1 1 2 0 379 499
1 1 2 0 376 616
1 1 2 0 376 496
1 1 2 0 373 613
1 1 2 0 373 493
1 1 2 0 370 610
1 1 2 0 370 490
1 1 2 0 367 607
1 1 2 0 367 487
1 1 2 0 364 604
1 1 2 0 364 484
1 1 2 0 361 601
1 1 2 0 361 481
1 1 2 0 358 598
1 1 2 0 358 478
1 1 2 0 355 595
1 1 2 0 355 475
1 1 2 0 352 592
1 1 2 0 352 472
1 1 2 0 349 589
1 1 2 0 349 469
1 1 2 0 346 586
1 1 2 0 346 466
1 1 2 0 343 583
1 1 2 0 343 463
1 1 2 0 340 580
1 1 2 0 340 460
1 1 2 0 337 577
1 1 2 0 337 457
1 1 2 0 334 574
1 1 2 0 334 454
1 1 2 0 331 571
1 1 2 0 331 451
1 1 2 0 688 685
1 1 2 0 688 658
1 1 2 0 688 646
1 1 2 0 688 643
1 1 2 0 688 589
1 1 2 0 688 574
1 1 2 0 688 571
1 1 2 0 685 670
1 1 2 0 685 652
1 1 2 0 685 646
1 1 2 0 685 643
1 1 2 0 685 622
1 1 2 0 685 619
1 1 2 0 685 574
1 1 2 0 682 679
1 1 2 0 682 670
1 1 2 0 682 640
1 1 2 0 682 625
1 1 2 0 682 604
1 1 2 0 682 601
1 1 2 0 682 580
1 1 2 0 679 676
1 1 2 0 679 673
1 1 2 0 679 655
1 1 2 0 679 634
1 1 2 0 679 610
1 1 2 0 679 577
1 1 2 0 676 664
1 1 2 0 676 658
1 1 2 0 676 652
1 1 2 0 676 613
1 1 2 0 676 610
1 1 2 0 676 595
1 1 2 0 676 592
1 1 2 0 673 670
1 1 2 0 673 649
1 1 2 0 673 643
1 1 2 0 673 607
1 1 2 0 670 655
1 1 2 0 670 649
1 1 2 0 670 637
1 1 2 0 670 631
1 1 2 0 667 652
1 1 2 0 667 649
1 1 2 0 667 646
1 1 2 0 667 643
1 1 2 0 667 607
1 1 2 0 667 601
1 1 2 0 667 577
1 1 2 0 667 571
1 1 2 0 664 643
1 1 2 0 664 640
1 1 2 0 664 625
1 1 2 0 664 589
1 1 2 0 664 586
1 1 2 0 661 655
1 1 2 0 661 631
1 1 2 0 661 628
1 1 2 0 661 625
1 1 2 0 661 613
1 1 2 0 661 574
1 1 2 0 658 652
1 1 2 0 658 646
1 1 2 0 658 613
1 1 2 0 658 610
1 1 2 0 658 601
1 1 2 0 655 652
1 1 2 0 655 637
1 1 2 0 655 634
1 1 2 0 655 613
1 1 2 0 655 601
1 1 2 0 655 595
1 1 2 0 652 649
1 1 2 0 652 625
1 1 2 0 652 622
1 1 2 0 652 616
1 1 2 0 652 613
1 1 2 0 652 604
1 1 2 0 649 637
1 1 2 0 649 601
1 1 2 0 649 583
1 1 2 0 649 574
1 1 2 0 646 607
1 1 2 0 646 574
1 1 2 0 646 571
1 1 2 0 643 637
1 1 2 0 643 616
1 1 2 0 643 598
1 1 2 0 643 595
1 1 2 0 643 589
1 1 2 0 640 637
1 1 2 0 637 631
1 1 2 0 637 619
1 1 2 0 637 613
1 1 2 0 637 598
1 1 2 0 637 592
1 1 2 0 637 583
1 1 2 0 637 574
1 1 2 0 634 583
1 1 2 0 634 571
1 1 2 0 631 628
1 1 2 0 631 625
1 1 2 0 631 619
1 1 2 0 631 598
1 1 2 0 631 583
1 1 2 0 628 625
1 1 2 0 628 604
1 1 2 0 628 598
1 1 2 0 628 577
1 1 2 0 625 592
1 1 2 0 625 577
1 1 2 0 625 574
1 1 2 0 622 604
1 1 2 0 622 598
1 1 2 0 622 571
1 1 2 0 619 616
1 1 2 0 619 613
1 1 2 0 619 610
1 1 2 0 619 604
1 1 2 0 616 610
1 1 2 0 616 598
1 1 2 0 616 583
1 1 2 0 613 610
1 1 2 0 613 604
1 1 2 0 613 601
1 1 2 0 613 574
1 1 2 0 610 607
1 1 2 0 610 577
1 1 2 0 607 601
1 1 2 0 604 601
1 1 2 0 604 595
1 1 2 0 604 586
1 1 2 0 604 577
1 1 2 0 601 589
1 1 2 0 601 574
1 1 2 0 601 571
1 1 2 0 598 595
1 1 2 0 598 577
1 1 2 0 595 574
1 1 2 0 592 574
1 1 2 0 589 580
1 1 2 0 589 571
1 1 2 0 586 577
1 1 2 0 583 580
1 1 2 0 577 574
1 1 2 0 574 571
1 1 2 0 568 565
1 1 2 0 568 538
1 1 2 0 568 526
1 1 2 0 568 523
1 1 2 0 568 469
1 1 2 0 568 454
1 1 2 0 568 451
1 1 2 0 565 550
1 1 2 0 565 532
1 1 2 0 565 526
1 1 2 0 565 523
1 1 2 0 565 502
1 1 2 0 565 499
1 1 2 0 565 454
1 1 2 0 562 559
1 1 2 0 562 550
1 1 2 0 562 520
1 1 2 0 562 505
1 1 2 0 562 484
1 1 2 0 562 481
1 1 2 0 562 460
1 1 2 0 559 556
1 1 2 0 559 553
1 1 2 0 559 535
1 1 2 0 559 514
1 1 2 0 559 490
1 1 2 0 559 457
1 1 2 0 556 544
1 1 2 0 556 538
1 1 2 0 556 532
1 1 2 0 556 493
1 1 2 0 556 490
1 1 2 0 556 475
1 1 2 0 556 472
1 1 2 0 553 550
1 1 2 0 553 529
1 1 2 0 553 523
1 1 2 0 553 487
1 1 2 0 550 535
1 1 2 0 550 529
1 1 2 0 550 517
1 1 2 0 550 511
1 1 2 0 547 532
1 1 2 0 547 529
1 1 2 0 547 526
1 1 2 0 547 523
1 1 2 0 547 487
1 1 2 0 547 481
1 1 2 0 547 457
1 1 2 0 547 451
1 1 2 0 544 523
1 1 2 0 544 520
1 1 2 0 544 505
1 1 2 0 544 469
1 1 2 0 544 466
1 1 2 0 541 535
1 1 2 0 541 511
1 1 2 0 541 508
1 1 2 0 541 505
1 1 2 0 541 493
1 1 2 0 541 454
1 1 2 0 538 532
1 1 2 0 538 526
1 1 2 0 538 493
1 1 2 0 538 490
1 1 2 0 538 481
1 1 2 0 535 532
1 1 2 0 535 517
1 1 2 0 535 514
1 1 2 0 535 493
1 1 2 0 535 481
1 1 2 0 535 475
1 1 2 0 532 529
1 1 2 0 532 505
1 1 2 0 532 502
1 1 2 0 532 496
1 1 2 0 532 493
1 1 2 0 532 484
1 1 2 0 529 517
1 1 2 0 529 481
1 1 2 0 529 463
1 1 2 0 529 454
1 1 2 0 526 487
1 1 2 0 526 454
1 1 2 0 526 451
1 1 2 0 523 517
1 1 2 0 523 496
1 1 2 0 523 478
1 1 2 0 523 475
1 1 2 0 523 469
1 1 2 0 520 517
1 1 2 0 517 511
1 1 2 0 517 499
1 1 2 0 517 493
1 1 2 0 517 478
1 1 2 0 517 472
1 1 2 0 517 463
1 1 2 0 517 454
1 1 2 0 514 463
1 1 2 0 514 451
1 1 2 0 511 508
1 1 2 0 511 505
1 1 2 0 511 499
1 1 2 0 511 478
1 1 2 0 511 463
1 1 2 0 508 505
1 1 2 0 508 484
1 1 2 0 508 478
1 1 2 0 508 457
1 1 2 0 505 472
1 1 2 0 505 457
1 1 2 0 505 454
1 1 2 0 502 484
1 1 2 0 502 478
1 1 2 0 502 451
1 1 2 0 499 496
1 1 2 0 499 493
1 1 2 0 499 490
1 1 2 0 499 484
1 1 2 0 496 490
1 1 2 0 496 478
1 1 2 0 496 463
1 1 2 0 493 490
1 1 2 0 493 484
1 1 2 0 493 481
1 1 2 0 493 454
1 1 2 0 490 487
1 1 2 0 490 457
1 1 2 0 487 481
1 1 2 0 484 481
1 1 2 0 484 475
1 1 2 0 484 466
1 1 2 0 484 457
1 1 2 0 481 469
1 1 2 0 481 454
1 1 2 0 481 451
1 1 2 0 478 475
1 1 2 0 478 457
1 1 2 0 475 454
1 1 2 0 472 454
1 1 2 0 469 460
1 1 2 0 469 451
1 1 2 0 466 457
1 1 2 0 463 460
1 1 2 0 457 454
1 1 2 0 454 451
1 1 2 0 448 445
1 1 2 0 448 418
1 1 2 0 448 406
1 1 2 0 448 403
1 1 2 0 448 349
1 1 2 0 448 334
1 1 2 0 448 331
1 1 2 0 445 430
1 1 2 0 445 412
1 1 2 0 445 406
1 1 2 0 445 403
1 1 2 0 445 382
1 1 2 0 445 379
1 1 2 0 445 334
1 1 2 0 442 439
1 1 2 0 442 430
1 1 2 0 442 400
1 1 2 0 442 385
1 1 2 0 442 364
1 1 2 0 442 361
1 1 2 0 442 340
1 1 2 0 439 436
1 1 2 0 439 433
1 1 2 0 439 415
1 1 2 0 439 394
1 1 2 0 439 370
1 1 2 0 439 337
1 1 2 0 436 424
1 1 2 0 436 418
1 1 2 0 436 412
1 1 2 0 436 373
1 1 2 0 436 370
1 1 2 0 436 355
1 1 2 0 436 352
1 1 2 0 433 430
1 1 2 0 433 409
1 1 2 0 433 403
1 1 2 0 433 367
1 1 2 0 430 415
1 1 2 0 430 409
1 1 2 0 430 397
1 1 2 0 430 391
1 1 2 0 427 412
1 1 2 0 427 409
1 1 2 0 427 406
1 1 2 0 427 403
1 1 2 0 427 367
1 1 2 0 427 361
1 1 2 0 427 337
1 1 2 0 427 331
1 1 2 0 424 403
1 1 2 0 424 400
1 1 2 0 424 385
1 1 2 0 424 349
1 1 2 0 424 346
1 1 2 0 421 415
1 1 2 0 421 391
1 1 2 0 421 388
1 1 2 0 421 385
1 1 2 0 421 373
1 1 2 0 421 334
1 1 2 0 418 412
1 1 2 0 418 406
1 1 2 0 418 373
1 1 2 0 418 370
1 1 2 0 418 361
1 1 2 0 415 412
1 1 2 0 415 397
1 1 2 0 415 394
1 1 2 0 415 373
1 1 2 0 415 361
1 1 2 0 415 355
1 1 2 0 412 409
1 1 2 0 412 385
1 1 2 0 412 382
1 1 2 0 412 376
1 1 2 0 412 373
1 1 2 0 412 364
1 1 2 0 409 397
1 1 2 0 409 361
1 1 2 0 409 343
1 1 2 0 409 334
1 1 2 0 406 367
1 1 2 0 406 334
1 1 2 0 406 331
1 1 2 0 403 397
1 1 2 0 403 376
1 1 2 0 403 358
1 1 2 0 403 355
1 1 2 0 403 349
1 1 2 0 400 397
1 1 2 0 397 391
1 1 2 0 397 379
1 1 2 0 397 373
1 1 2 0 397 358
1 1 2 0 397 352
1 1 2 0 397 343
1 1 2 0 397 334
1 1 2 0 394 343
1 1 2 0 394 331
1 1 2 0 391 388
1 1 2 0 391 385
1 1 2 0 391 379
1 1 2 0 391 358
1 1 2 0 391 343
1 1 2 0 388 385
1 1 2 0 388 364
1 1 2 0 388 358
1 1 2 0 388 337
1 1 2 0 385 352
1 1 2 0 385 337
1 1 2 0 385 334
1 1 2 0 382 364
1 1 2 0 382 358
1 1 2 0 382 331
1 1 2 0 379 376
1 1 2 0 379 373
1 1 2 0 379 370
1 1 2 0 379 364
1 1 2 0 376 370
1 1 2 0 376 358
1 1 2 0 376 343
1 1 2 0 373 370
1 1 2 0 373 364
1 1 2 0 373 361
1 1 2 0 373 334
1 1 2 0 370 367
1 1 2 0 370 337
1 1 2 0 367 361
1 1 2 0 364 361
1 1 2 0 364 355
1 1 2 0 364 346
1 1 2 0 364 337
1 1 2 0 361 349
1 1 2 0 361 334
1 1 2 0 361 331
1 1 2 0 358 355
1 1 2 0 358 337
1 1 2 0 355 334
1 1 2 0 352 334
1 1 2 0 349 340
1 1 2 0 349 331
1 1 2 0 346 337
1 1 2 0 343 340
1 1 2 0 337 334
1 1 2 0 334 331
0
331 col(39,3)
334 col(38,3)
337 col(37,3)
340 col(36,3)
343 col(35,3)
346 col(34,3)
349 col(33,3)
352 col(32,3)
355 col(31,3)
358 col(30,3)
361 col(29,3)
364 col(28,3)
367 col(27,3)
370 col(26,3)
373 col(25,3)
376 col(24,3)
379 col(23,3)
382 col(22,3)
385 col(21,3)
388 col(20,3)
391 col(19,3)
394 col(18,3)
397 col(17,3)
400 col(16,3)
403 col(15,3)
406 col(14,3)
409 col(13,3)
412 col(12,3)
415 col(11,3)
418 col(10,3)
421 col(9,3)
424 col(8,3)
427 col(7,3)
430 col(6,3)
433 col(5,3)
436 col(4,3)
439 col(3,3)
442 col(2,3)
445 col(1,3)
448 col(0,3)
451 col(39,2)
454 col(38,2)
457 col(37,2)
460 col(36,2)
463 col(35,2)
466 col(34,2)
469 col(33,2)
472 col(32,2)
475 col(31,2)
478 col(30,2)
481 col(29,2)
484 col(28,2)
487 col(27,2)
490 col(26,2)
493 col(25,2)
496 col(24,2)
499 col(23,2)
502 col(22,2)
505 col(21,2)
508 col(20,2)
511 col(19,2)
514 col(18,2)
517 col(17,2)
520 col(16,2)
523 col(15,2)
526 col(14,2)
529 col(13,2)
532 col(12,2)
535 col(11,2)
538 col(10,2)
541 col(9,2)
544 col(8,2)
547 col(7,2)
550 col(6,2)
553 col(5,2)
556 col(4,2)
559 col(3,2)
562 col(2,2)
565 col(1,2)
568 col(0,2)
571 col(39,1)
574 col(38,1)
577 col(37,1)
580 col(36,1)
583 col(35,1)
586 col(34,1)
589 col(33,1)
592 col(32,1)
595 col(31,1)
598 col(30,1)
601 col(29,1)
604 col(28,1)
607 col(27,1)
610 col(26,1)
613 col(25,1)
616 col(24,1)
619 col(23,1)
622 col(22,1)
625 col(21,1)
628 col(20,1)
631 col(19,1)
634 col(18,1)
637 col(17,1)
640 col(16,1)
643 col(15,1)
646 col(14,1)
649 col(13,1)
652 col(12,1)
655 col(11,1)
658 col(10,1)
661 col(9,1)
664 col(8,1)
667 col(7,1)
670 col(6,1)
673 col(5,1)
676 col(4,1)
679 col(3,1)
682 col(2,1)
685 col(1,1)
688 col(0,1)
0
B+
0
B-
1
0
1
"""
output = """
INCOHERENT
"""
|
alviano/wasp
|
tests/asp/cautious/graph.colouring.3.40.5_4.asp.cautious.asp.test.py
|
Python
|
apache-2.0
| 23,424
|
import logging
from functools import partial
from multiprocessing.dummy import Pool as ThreadPool
from taskcat._s3_sync import S3Sync
from taskcat.exceptions import TaskCatException
LOG = logging.getLogger(__name__)
class S3APIResponse:
def __init__(self, x):
self._http_code = x["ResponseMetadata"]["HTTPStatusCode"]
@property
def ok(self):
if self._http_code == 200:
return True
return False
class S3BucketCreatorException(TaskCatException):
pass
def stage_in_s3(buckets, project_name, project_root):
distinct_buckets = {}
for test in buckets.values():
for bucket in test.values():
distinct_buckets[f"{bucket.name}-{bucket.partition}"] = bucket
pool = ThreadPool(32)
func = partial(_sync_wrap, project_name=project_name, project_root=project_root)
pool.map(func, distinct_buckets.values())
pool.close()
pool.join()
def _sync_wrap(bucket, project_name, project_root):
S3Sync(bucket.s3_client, bucket.name, project_name, project_root, bucket.object_acl)
|
aws-quickstart/taskcat
|
taskcat/_s3_stage.py
|
Python
|
apache-2.0
| 1,070
|
from copy import deepcopy
from indy_node.test import waits
from stp_core.loop.eventually import eventually
from plenum.common.constants import VERSION
from indy_common.constants import FORCE
from indy_node.test.upgrade.helper import bumpedVersion, checkUpgradeScheduled, \
check_no_loop, sdk_ensure_upgrade_sent
from indy_node.server.upgrade_log import UpgradeLog
import indy_node
whitelist = ['Failed to upgrade node']
def test_upgrade_does_not_get_into_loop_force(looper, tconf, nodeSet,
validUpgrade, sdk_pool_handle,
sdk_wallet_trustee, monkeypatch):
new_version = bumpedVersion()
upgr1 = deepcopy(validUpgrade)
upgr1[VERSION] = new_version
upgr1[FORCE] = True
# An upgrade scheduled, it should pass
sdk_ensure_upgrade_sent(looper, sdk_pool_handle, sdk_wallet_trustee, upgr1)
looper.run(
eventually(
checkUpgradeScheduled,
nodeSet,
upgr1[VERSION],
retryWait=1,
timeout=waits.expectedUpgradeScheduled()))
# here we make nodes think they have upgraded successfully
monkeypatch.setattr(indy_node.__metadata__, '__version__', new_version)
check_no_loop(nodeSet, UpgradeLog.SUCCEEDED)
|
spivachuk/sovrin-node
|
indy_node/test/upgrade/test_pool_upgrade_no_loop_force.py
|
Python
|
apache-2.0
| 1,295
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
import tempfile
import unittest
import six
from pathspec import PathSpec
from pathspec.patterns import GitWildMatchPattern
from twitter.common.collections import OrderedSet
from pants.base.build_file import BuildFile
from pants.base.file_system_project_tree import FileSystemProjectTree
from pants.base.project_tree import ProjectTree
from pants.util.dirutil import safe_mkdir, safe_open, touch
class FilesystemBuildFileTest(unittest.TestCase):
def fullpath(self, path):
return os.path.join(self.root_dir, path)
def makedirs(self, path):
safe_mkdir(self.fullpath(path))
def touch(self, path):
touch(self.fullpath(path))
def _create_ignore_spec(self, build_ignore_patterns):
return PathSpec.from_lines(GitWildMatchPattern, build_ignore_patterns or [])
def scan_buildfiles(self, base_relpath, build_ignore_patterns=None):
return BuildFile.scan_build_files(self._project_tree, base_relpath,
build_ignore_patterns=self._create_ignore_spec(build_ignore_patterns))
def create_buildfile(self, relpath):
return BuildFile(self._project_tree, relpath)
def get_build_files_family(self, relpath, build_ignore_patterns=None):
return BuildFile.get_build_files_family(self._project_tree, relpath,
build_ignore_patterns=self._create_ignore_spec(build_ignore_patterns))
def setUp(self):
self.base_dir = tempfile.mkdtemp()
# Seed a BUILD outside the build root that should not be detected
touch(os.path.join(self.base_dir, 'BUILD'))
self.root_dir = os.path.join(self.base_dir, 'root')
self.touch('grandparent/parent/BUILD')
self.touch('grandparent/parent/BUILD.twitter')
# Tricky! This is a directory
self.makedirs('grandparent/parent/BUILD.dir')
self.makedirs('grandparent/BUILD')
self.touch('BUILD')
self.touch('BUILD.twitter')
self.touch('grandparent/parent/child1/BUILD')
self.touch('grandparent/parent/child1/BUILD.twitter')
self.touch('grandparent/parent/child2/child3/BUILD')
self.makedirs('grandparent/parent/child2/BUILD')
self.makedirs('grandparent/parent/child4')
self.touch('grandparent/parent/child5/BUILD')
self.makedirs('path-that-does-exist')
self.touch('path-that-does-exist/BUILD.invalid.suffix')
# This exercises https://github.com/pantsbuild/pants/issues/1742
# Prior to that fix, BUILD directories were handled, but not if there was a valid BUILD file
# sibling.
self.makedirs('issue_1742/BUILD')
self.touch('issue_1742/BUILD.sibling')
self._project_tree = FileSystemProjectTree(self.root_dir)
self.buildfile = self.create_buildfile('grandparent/parent/BUILD')
def tearDown(self):
shutil.rmtree(self.base_dir)
def test_build_files_family_lookup_1(self):
buildfile = self.create_buildfile('grandparent/parent/BUILD.twitter')
self.assertEqual({buildfile, self.buildfile},
set(self.get_build_files_family('grandparent/parent')))
self.assertEqual({buildfile, self.buildfile},
set(self.get_build_files_family('grandparent/parent/')))
self.assertEqual({self.create_buildfile('grandparent/parent/child2/child3/BUILD')},
set(self.get_build_files_family('grandparent/parent/child2/child3')))
def test_build_files_family_lookup_2(self):
self.assertEqual(OrderedSet([
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
]), self.get_build_files_family('grandparent/parent'))
buildfile = self.create_buildfile('grandparent/parent/child2/child3/BUILD')
self.assertEqual(OrderedSet([buildfile]), self.get_build_files_family('grandparent/parent/child2/child3'))
def test_build_files_family_lookup_with_ignore(self):
self.assertEqual(OrderedSet([
self.create_buildfile('grandparent/parent/BUILD'),
]), self.get_build_files_family('grandparent/parent', build_ignore_patterns=['*.twitter']))
def test_build_files_scan(self):
self.assertEqual(OrderedSet([
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child1/BUILD'),
self.create_buildfile('grandparent/parent/child1/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
]), self.scan_buildfiles('grandparent/parent'))
def test_build_files_scan_with_relpath_ignore(self):
buildfiles = self.scan_buildfiles('', build_ignore_patterns=[
'grandparent/parent/child1',
'grandparent/parent/child2'])
self.assertEqual(OrderedSet([
self.create_buildfile('BUILD'),
self.create_buildfile('BUILD.twitter'),
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
self.create_buildfile('issue_1742/BUILD.sibling'),
]), buildfiles)
buildfiles = self.scan_buildfiles('grandparent/parent', build_ignore_patterns=['grandparent/parent/child1'])
self.assertEqual(OrderedSet([
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
]), buildfiles)
def test_build_files_scan_with_abspath_ignore(self):
self.touch('parent/BUILD')
self.assertEqual(OrderedSet([
self.create_buildfile('BUILD'),
self.create_buildfile('BUILD.twitter'),
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child1/BUILD'),
self.create_buildfile('grandparent/parent/child1/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
self.create_buildfile('issue_1742/BUILD.sibling'),
]), self.scan_buildfiles('', build_ignore_patterns=['/parent']))
def test_build_files_scan_with_wildcard_ignore(self):
self.assertEqual(OrderedSet([
self.create_buildfile('BUILD'),
self.create_buildfile('BUILD.twitter'),
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('issue_1742/BUILD.sibling'),
]), self.scan_buildfiles('', build_ignore_patterns=['**/child*']))
def test_build_files_scan_with_ignore_patterns(self):
self.assertEqual(OrderedSet([
self.create_buildfile('BUILD'),
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/child1/BUILD'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
self.create_buildfile('issue_1742/BUILD.sibling'),
]), self.scan_buildfiles('', build_ignore_patterns=['BUILD.twitter']))
def test_subdir_ignore(self):
self.touch('grandparent/child1/BUILD')
self.assertEqual(OrderedSet([
self.create_buildfile('BUILD'),
self.create_buildfile('BUILD.twitter'),
self.create_buildfile('grandparent/child1/BUILD'),
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
self.create_buildfile('issue_1742/BUILD.sibling'),
]), self.scan_buildfiles('', build_ignore_patterns=['**/parent/child1']))
def test_subdir_file_pattern_ignore(self):
self.assertEqual(OrderedSet([
self.create_buildfile('BUILD'),
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/child1/BUILD'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
]), self.scan_buildfiles('', build_ignore_patterns=['BUILD.*']))
def test_build_files_scan_with_non_default_relpath_ignore(self):
self.assertEqual(OrderedSet([
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
]), self.scan_buildfiles('grandparent/parent', build_ignore_patterns=['**/parent/child1']))
def test_must_exist_true(self):
with self.assertRaises(BuildFile.MissingBuildFileError):
self.create_buildfile("path-that-does-not-exist/BUILD")
with self.assertRaises(BuildFile.MissingBuildFileError):
self.create_buildfile("path-that-does-exist/BUILD")
with self.assertRaises(BuildFile.MissingBuildFileError):
self.create_buildfile("path-that-does-exist/BUILD.invalid.suffix")
def test_suffix_only(self):
self.makedirs('suffix-test')
self.touch('suffix-test/BUILD.suffix')
self.touch('suffix-test/BUILD.suffix2')
self.makedirs('suffix-test/child')
self.touch('suffix-test/child/BUILD.suffix3')
buildfile = self.create_buildfile('suffix-test/BUILD.suffix')
self.assertEqual(OrderedSet([buildfile, self.create_buildfile('suffix-test/BUILD.suffix2')]),
OrderedSet(self.get_build_files_family('suffix-test')))
self.assertEqual(OrderedSet([self.create_buildfile('suffix-test/BUILD.suffix'),
self.create_buildfile('suffix-test/BUILD.suffix2')]),
self.get_build_files_family('suffix-test'))
self.assertEqual(OrderedSet([self.create_buildfile('suffix-test/child/BUILD.suffix3')]),
self.scan_buildfiles('suffix-test/child'))
def test_directory_called_build_skipped(self):
# Ensure the buildfiles found do not include grandparent/BUILD since it is a dir.
buildfiles = self.scan_buildfiles('grandparent')
self.assertEqual(OrderedSet([
self.create_buildfile('grandparent/parent/BUILD'),
self.create_buildfile('grandparent/parent/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child1/BUILD'),
self.create_buildfile('grandparent/parent/child1/BUILD.twitter'),
self.create_buildfile('grandparent/parent/child2/child3/BUILD'),
self.create_buildfile('grandparent/parent/child5/BUILD'),
]), buildfiles)
def test_dir_is_primary(self):
self.assertEqual([self.create_buildfile('issue_1742/BUILD.sibling')],
list(self.get_build_files_family('issue_1742')))
def test_invalid_root_dir_error(self):
self.touch('BUILD')
with self.assertRaises(ProjectTree.InvalidBuildRootError):
BuildFile(FileSystemProjectTree('tmp'), 'grandparent/BUILD')
def test_exception_class_hierarchy(self):
"""Exception handling code depends on the fact that all exceptions from BuildFile are
subclassed from the BuildFileError base class.
"""
self.assertIsInstance(BuildFile.MissingBuildFileError(), BuildFile.BuildFileError)
def test_code(self):
with safe_open(self.fullpath('BUILD.code'), 'w') as fp:
fp.write('lib = java_library(name="jake", age=42)')
build_file = self.create_buildfile('BUILD.code')
parsed_locals = {}
six.exec_(build_file.code(), {'java_library': dict}, parsed_locals)
lib = parsed_locals.pop('lib', None)
self.assertEqual(dict(name='jake', age=42), lib)
def test_code_syntax_error(self):
with safe_open(self.fullpath('BUILD.badsyntax'), 'w') as fp:
fp.write('java_library(name=if)')
build_file = self.create_buildfile('BUILD.badsyntax')
with self.assertRaises(SyntaxError) as e:
build_file.code()
self.assertEqual(build_file.full_path, e.exception.filename)
|
twitter/pants
|
tests/python/pants_test/base/test_filesystem_build_file.py
|
Python
|
apache-2.0
| 12,278
|
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import random
import uuid
from oslo_config import cfg
import six
from rally.common import broker
from rally.common.i18n import _
from rally.common import logging
from rally.common import objects
from rally.common import utils as rutils
from rally import consts
from rally import exceptions
from rally import osclients
from rally.plugins.openstack.wrappers import keystone
from rally.plugins.openstack.wrappers import network
from rally.task import context
from rally.task import utils
LOG = logging.getLogger(__name__)
USER_CONTEXT_OPTS = [
cfg.IntOpt("resource_management_workers",
default=30,
help="How many concurrent threads use for serving users "
"context"),
cfg.StrOpt("project_domain",
default="default",
help="ID of domain in which projects will be created."),
cfg.StrOpt("user_domain",
default="default",
help="ID of domain in which users will be created."),
]
CONF = cfg.CONF
CONF.register_opts(USER_CONTEXT_OPTS,
group=cfg.OptGroup(name="users_context",
title="benchmark context options"))
class UserContextMixin(object):
def map_for_scenario(self, context_obj):
"""Pass only context of one user and related to it tenant to scenario.
We are choosing on each iteration one user
"""
scenario_ctx = {}
for key, value in six.iteritems(context_obj):
if key not in ["users", "tenants"]:
scenario_ctx[key] = value
user = random.choice(context_obj["users"])
tenant = context_obj["tenants"][user["tenant_id"]]
scenario_ctx["user"], scenario_ctx["tenant"] = user, tenant
return scenario_ctx
@context.configure(name="users", order=100)
class UserGenerator(UserContextMixin, context.Context):
"""Context class for generating temporary users/tenants for benchmarks."""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"tenants": {
"type": "integer",
"minimum": 1
},
"users_per_tenant": {
"type": "integer",
"minimum": 1
},
"resource_management_workers": {
"type": "integer",
"minimum": 1
},
"project_domain": {
"type": "string",
},
"user_domain": {
"type": "string",
},
},
"additionalProperties": False
}
DEFAULT_CONFIG = {
"tenants": 1,
"users_per_tenant": 1,
"resource_management_workers":
cfg.CONF.users_context.resource_management_workers,
"project_domain": cfg.CONF.users_context.project_domain,
"user_domain": cfg.CONF.users_context.user_domain
}
def __init__(self, context):
super(UserGenerator, self).__init__(context)
self.credential = self.context["admin"]["credential"]
def _remove_default_security_group(self):
"""Delete default security group for tenants."""
clients = osclients.Clients(self.credential)
if consts.Service.NEUTRON not in clients.services().values():
return
use_sg, msg = network.wrap(clients, self).supports_extension(
"security-group")
if not use_sg:
LOG.debug("Security group context is disabled: %s" % msg)
return
for user, tenant_id in rutils.iterate_per_tenants(
self.context["users"]):
with logging.ExceptionLogger(
LOG, _("Unable to delete default security group")):
uclients = osclients.Clients(user["credential"])
sg = uclients.nova().security_groups.find(name="default")
clients.neutron().delete_security_group(sg.id)
def _remove_associated_networks(self):
"""Delete associated Nova networks from tenants."""
# NOTE(rmk): Ugly hack to deal with the fact that Nova Network
# networks can only be disassociated in an admin context. Discussed
# with boris-42 before taking this approach [LP-Bug #1350517].
clients = osclients.Clients(self.credential)
if consts.Service.NOVA not in clients.services().values():
return
nova_admin = clients.nova()
if not utils.check_service_status(nova_admin, "nova-network"):
return
for net in nova_admin.networks.list():
network_tenant_id = nova_admin.networks.get(net).project_id
if network_tenant_id in self.context["tenants"]:
try:
nova_admin.networks.disassociate(net)
except Exception as ex:
LOG.warning("Failed disassociate net: %(tenant_id)s. "
"Exception: %(ex)s" %
{"tenant_id": network_tenant_id, "ex": ex})
def _create_tenants(self):
threads = self.config["resource_management_workers"]
tenants = collections.deque()
def publish(queue):
for i in range(self.config["tenants"]):
args = (self.config["project_domain"], self.task["uuid"], i)
queue.append(args)
def consume(cache, args):
domain, task_id, i = args
if "client" not in cache:
clients = osclients.Clients(self.credential)
cache["client"] = keystone.wrap(clients.keystone())
tenant = cache["client"].create_project(
self.generate_random_name(), domain)
tenant_dict = {"id": tenant.id, "name": tenant.name}
tenants.append(tenant_dict)
# NOTE(msdubov): consume() will fill the tenants list in the closure.
broker.run(publish, consume, threads)
tenants_dict = {}
for t in tenants:
tenants_dict[t["id"]] = t
return tenants_dict
def _create_users(self):
# NOTE(msdubov): This should be called after _create_tenants().
threads = self.config["resource_management_workers"]
users_per_tenant = self.config["users_per_tenant"]
users = collections.deque()
def publish(queue):
for tenant_id in self.context["tenants"]:
for user_id in range(users_per_tenant):
username = self.generate_random_name()
password = str(uuid.uuid4())
args = (username, password, self.config["project_domain"],
self.config["user_domain"], tenant_id)
queue.append(args)
def consume(cache, args):
username, password, project_dom, user_dom, tenant_id = args
if "client" not in cache:
clients = osclients.Clients(self.credential)
cache["client"] = keystone.wrap(clients.keystone())
client = cache["client"]
user = client.create_user(username, password,
"%s@email.me" % username,
tenant_id, user_dom)
user_credential = objects.Credential(
client.auth_url, user.name, password,
self.context["tenants"][tenant_id]["name"],
consts.EndpointPermission.USER, client.region_name,
project_domain_name=project_dom, user_domain_name=user_dom,
endpoint_type=self.credential.endpoint_type,
https_insecure=self.credential.insecure,
https_cacert=self.credential.cacert)
users.append({"id": user.id,
"credential": user_credential,
"tenant_id": tenant_id})
# NOTE(msdubov): consume() will fill the users list in the closure.
broker.run(publish, consume, threads)
return list(users)
def _delete_tenants(self):
threads = self.config["resource_management_workers"]
self._remove_associated_networks()
def publish(queue):
for tenant_id in self.context["tenants"]:
queue.append(tenant_id)
def consume(cache, tenant_id):
if "client" not in cache:
clients = osclients.Clients(self.credential)
cache["client"] = keystone.wrap(clients.keystone())
cache["client"].delete_project(tenant_id)
broker.run(publish, consume, threads)
self.context["tenants"] = {}
def _delete_users(self):
threads = self.config["resource_management_workers"]
def publish(queue):
for user in self.context["users"]:
queue.append(user["id"])
def consume(cache, user_id):
if "client" not in cache:
clients = osclients.Clients(self.credential)
cache["client"] = keystone.wrap(clients.keystone())
cache["client"].delete_user(user_id)
broker.run(publish, consume, threads)
self.context["users"] = []
@logging.log_task_wrapper(LOG.info, _("Enter context: `users`"))
def setup(self):
"""Create tenants and users, using the broker pattern."""
self.context["users"] = []
self.context["tenants"] = {}
threads = self.config["resource_management_workers"]
LOG.debug("Creating %(tenants)d tenants using %(threads)s threads" %
{"tenants": self.config["tenants"], "threads": threads})
self.context["tenants"] = self._create_tenants()
if len(self.context["tenants"]) < self.config["tenants"]:
raise exceptions.ContextSetupFailure(
ctx_name=self.get_name(),
msg=_("Failed to create the requested number of tenants."))
users_num = self.config["users_per_tenant"] * self.config["tenants"]
LOG.debug("Creating %(users)d users using %(threads)s threads" %
{"users": users_num, "threads": threads})
self.context["users"] = self._create_users()
if len(self.context["users"]) < users_num:
raise exceptions.ContextSetupFailure(
ctx_name=self.get_name(),
msg=_("Failed to create the requested number of users."))
@logging.log_task_wrapper(LOG.info, _("Exit context: `users`"))
def cleanup(self):
"""Delete tenants and users, using the broker pattern."""
self._remove_default_security_group()
self._delete_users()
self._delete_tenants()
|
amit0701/rally
|
rally/plugins/openstack/context/keystone/users.py
|
Python
|
apache-2.0
| 11,325
|
from .codes import (
PASS,
FAILED
)
from .utils import (
validate_list
)
class Response:
def __init__(self, items):
if isinstance(items, dict):
self.__dict__.update(items)
def __getitem__(self, item):
return self.__dict__[item]
def __setitem__(self, key, value):
self.__dict__[key] = value
@staticmethod
def list(items):
if items is None:
return items
new_items = []
for item in items:
new_items.append(Response(item))
return new_items
def get_zone(api_client, zone_name=None, zone_id=None):
"""
@name : get_zone
@Desc :Returns the Zone Information for a given zone id or Zone Name
@Input : zone_name: Name of the Zone
zone_id : Id of the zone
@Output : 1. Zone Information for the passed inputs else first zone
2. FAILED In case the cmd failed
"""
cmd = {}
response = "zone"
if zone_name is not None:
cmd['name'] = zone_name
if zone_id is not None:
cmd['id'] = zone_id
cmd_out = api_client.listZones(**cmd)
if validate_list(cmd_out, response)[0] != PASS:
return FAILED
'''
Check if input zone name and zone id is None,
then return first element of List Zones command
'''
return Response(cmd_out[response][0])
def get_domain(api_client, domain_id=None, domain_name=None):
"""
@name : get_domain
@Desc : Returns the Domain Information for a given domain id or domain name
@Input : domain id : Id of the Domain
domain_name : Name of the Domain
@Output : 1. Domain Information for the passed inputs else first Domain
2. FAILED In case the cmd failed
"""
cmd = {}
response = "domain"
if domain_name is not None:
cmd['name'] = domain_name
if domain_id is not None:
cmd['id'] = domain_id
cmd_out = api_client.listDomains(**cmd)
if validate_list(cmd_out, response)[0] != PASS:
return FAILED
return Response(cmd_out[response][0])
def get_template(api_client, zone_id=None, template_filter="featured", template_type='BUILTIN', template_id=None,
template_name=None, account=None, domain_id=None, project_id=None, hypervisor=None):
"""
@Name : get_template
@Desc : Retrieves the template Information based upon inputs provided
Template is retrieved based upon either of the inputs matched
condition
@Input : returns a template"
@Output : FAILED in case of any failure
template Information matching the inputs
"""
cmd = {}
response = "template"
cmd['templatefilter'] = template_filter
if domain_id is not None:
cmd['domainid'] = domain_id
if zone_id is not None:
cmd['zoneid'] = zone_id
if template_id is not None:
cmd['id'] = template_id
if template_name is not None:
cmd['name'] = template_name
if hypervisor is not None:
cmd['hypervisor'] = hypervisor
if project_id is not None:
cmd['projectid'] = project_id
if account is not None:
cmd['account'] = account
'''
Get the Templates pertaining to the inputs provided
'''
list_templatesout = api_client.listTemplates(**cmd)
if validate_list(list_templatesout, response)[0] != PASS:
return FAILED
for template in list_templatesout[response]:
if template['isready'] and template['templatetype'] == template_type:
return Response(template)
'''
Return default first template, if no template matched
'''
return Response(list_templatesout[response][0])
def list_routers(api_client, **kwargs):
"""List all Routers matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listRouters(**cmd).get('router', []))
def list_zones(api_client, **kwargs):
"""List all Zones matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listZones(**cmd).get('zone', []))
def list_networks(api_client, **kwargs):
"""List all Networks matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return api_client.listNetworks(**cmd)
def list_vpcs(api_client, **kwargs):
"""List all VPCs matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return api_client.listVPCs(**cmd)
def list_ssvms(api_client, **kwargs):
"""List all SSVMs matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listSystemVms(**cmd).get('systemvm', []))
def list_virtual_machines(api_client, **kwargs):
"""List all VMs matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return api_client.listVirtualMachines(**cmd)
def list_hosts(api_client, **kwargs):
"""List all Hosts matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listHosts(**cmd).get('host', []))
def list_configurations(api_client, **kwargs):
"""List configuration with specified name"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listConfigurations(**cmd)['configuration'])
def list_public_ip(api_client, **kwargs):
"""List all Public IPs matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listPublicIpAddresses(**cmd).get('publicipaddress', []))
def list_nat_rules(api_client, **kwargs):
"""List all NAT rules matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return api_client.listPortForwardingRules(**cmd)
def list_lb_rules(api_client, **kwargs):
"""List all Load balancing rules matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listLoadBalancerRules(**cmd)['loadbalancerrule'])
def list_lb_instances(api_client, **kwargs):
"""List all Load balancing instances matching criteria"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listLoadBalancerRuleInstances(**cmd).get('loadbalancerruleinstance', []))
def list_service_offering(api_client, **kwargs):
"""Lists all available service offerings."""
cmd = {}
cmd.update(kwargs)
# [setattr(cmd, k, v) for k, v in kwargs.items()]
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listServiceOfferings(**cmd)['serviceoffering'])
def list_vlan_ipranges(api_client, **kwargs):
"""Lists all VLAN IP ranges."""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listVlanIpRanges(**cmd).get('vlaniprange', []))
def list_network_offerings(api_client, **kwargs):
"""Lists network offerings"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listNetworkOfferings(**cmd)['networkoffering'])
def list_vpngateways(api_client, **kwargs):
""" Lists VPN gateways """
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return api_client.listVpnGateways(**cmd)
def list_vpc_offerings(api_client, **kwargs):
""" Lists VPC offerings """
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listVPCOfferings(**cmd)['vpcoffering'])
def list_network_acl_lists(api_client, **kwargs):
"""List Network ACL lists"""
cmd = {}
cmd.update(kwargs)
if 'account' in kwargs.keys() and 'domainid' in kwargs.keys():
cmd['listall'] = True
return Response.list(api_client.listNetworkACLLists(**cmd)['networkacllist'])
def get_hypervisor_type(api_client):
"""Return the hypervisor type of the hosts in setup"""
cmd = {'type': 'Routing', 'listall': True}
hosts = api_client.listHosts(**cmd)['host']
hosts_list_validation_result = validate_list(hosts, 'host')
assert hosts_list_validation_result[0] == PASS, "host list validation failed"
return hosts_list_validation_result[1].hypervisor
def get_vpc_offering(api_client, name):
offerings = list_vpc_offerings(api_client, name=name)
return find_exact_match_by_name(offerings, name)
def get_default_vpc_offering(api_client):
return get_vpc_offering(api_client, 'Default VPC offering')
def get_default_redundant_vpc_offering(api_client):
return get_vpc_offering(api_client, 'Redundant VPC offering')
def get_network_offering(api_client, name):
offerings = list_network_offerings(api_client, name=name)
return find_exact_match_by_name(offerings, name)
def get_default_network_offering(api_client):
return get_network_offering(api_client, 'DefaultIsolatedNetworkOfferingForVpcNetworks')
def get_default_guest_network_offering(api_client):
return get_network_offering(api_client, 'DefaultIsolatedNetworkOfferingWithSourceNatService')
def get_default_network_offering_no_load_balancer(api_client):
return get_network_offering(api_client, 'DefaultIsolatedNetworkOfferingForVpcNetworksNoLB')
def get_default_isolated_network_offering(api_client):
return get_network_offering(api_client, 'DefaultIsolatedNetworkOffering')
def get_default_isolated_network_offering_with_egress(api_client):
return get_network_offering(api_client, 'DefaultIsolatedNetworkOfferingWithEgress')
def get_default_redundant_isolated_network_offering(api_client):
return get_network_offering(api_client, 'DefaultRedundantIsolatedNetworkOffering')
def get_default_redundant_isolated_network_offering_with_egress(api_client):
return get_network_offering(api_client, 'DefaultRedundantIsolatedNetworkOfferingWithEgress')
def get_default_private_network_offering(api_client):
return get_network_offering(api_client, 'DefaultPrivateGatewayNetworkOffering')
def get_default_virtual_machine_offering(api_client):
return get_virtual_machine_offering(api_client, 'Small Instance')
def get_virtual_machine_offering(api_client, name):
offerings = list_service_offering(api_client, name=name)
return find_exact_match_by_name(offerings, name)
def get_network_acl(api_client, name=None, acl_id=None, vpc=None):
if vpc:
acls = list_network_acl_lists(api_client, name=name, id=acl_id, vpcid=vpc.id, listall=True)
else:
acls = list_network_acl_lists(api_client, name=name, id=acl_id, listall=True)
return find_exact_match_by_name(acls, name if name else acls['networkacllist'][0])
def get_default_allow_vpc_acl(api_client, vpc):
return get_network_acl(api_client, 'default_allow', vpc)
def get_default_deny_vpc_acl(api_client, vpc):
return get_network_acl(api_client, 'default_deny', vpc)
def get_vpc(api_client, name):
vpcs = list_vpcs(api_client, name=name, listall=True)
return find_exact_match_by_name(vpcs, name)
def get_network(api_client, name=None, nw_id=None, vpc=None):
if vpc:
networks = list_networks(api_client, name=name, id=nw_id, vpcid=vpc.id)
else:
networks = list_networks(api_client, name=name, id=nw_id)
return find_exact_match_by_name(networks, name) if name else networks[0]
def get_virtual_machine(api_client, name, network=None):
if network:
virtual_machines = list_virtual_machines(api_client, name=name, networkid=network.id, listall=True)
else:
virtual_machines = list_virtual_machines(api_client, name=name, listall=True)
return find_exact_match_by_name(virtual_machines, name)
def get_vpngateway(api_client, vpc=None):
vpngateways = list_vpngateways(api_client, vpcid=vpc.id, listall=True)
return next(iter(vpngateways or []), None)
def find_exact_match_by_name(items, name):
items = [item for item in items if item.name == name]
return next(iter(items or []), None)
|
MissionCriticalCloud/cosmic
|
cosmic-core/test/integration/tests/cosmic/common.py
|
Python
|
apache-2.0
| 13,182
|
import os.path
from setuptools import setup
about = {}
version_path = os.path.join(os.path.dirname(__file__), 'staticconf', 'version.py')
with open(version_path) as f:
exec(f.read(), about)
setup(
name="PyStaticConfiguration",
version=about['version'],
provides=["staticconf"],
author="Daniel Nephin",
author_email="dnephin@gmail.com",
url="https://github.com/dnephin/PyStaticConfiguration",
description='A python library for loading static configuration',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Operating System :: OS Independent",
"License :: OSI Approved :: Apache Software License",
"Intended Audience :: Developers",
"Development Status :: 5 - Production/Stable",
],
extras_require={
'yaml': ['pyyaml'],
},
packages=['staticconf'],
install_requires=['six'],
license='APACHE20',
)
|
dnephin/PyStaticConfiguration
|
setup.py
|
Python
|
apache-2.0
| 1,197
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Generates unique ids for use in Falken."""
import base64
import uuid
def generate_unique_id():
return str(uuid.uuid4())
def generate_base64_id():
return base64.b64encode(uuid.uuid4().bytes).decode('utf-8')
|
google-research/falken
|
service/api/unique_id.py
|
Python
|
apache-2.0
| 813
|
#
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
from mock import call, Mock
import unittest2
from google.cloud.datastore.entity import Entity as GCDEntity
from google.cloud.exceptions import GCloudError
from google.cloud.streaming.exceptions import Error as GCloudStreamingError
from common import config
from common import constants
from common import datastore_schema as ds
from common.eclipse2017_exceptions import CouldNotObtainCredentialsError
from common import util
from app import uploader
from common_tests.stub import KeyStub, Stub
class UploaderTests(unittest2.TestCase):
"""
Tests for the Uploader class.
"""
directory = '/tmp/eclipse_upload_daemon_temp_dir'
state = dict()
@classmethod
def setUpClass(cls):
os.mkdir(cls.directory)
cls.state['storage'] = uploader.storage
cls.state['datastore'] = uploader.datastore
cls.state['datetime'] = uploader.datetime
cls.state['Pool'] = uploader.Pool
cls.state['sa.get_credentials'] = uploader.sa.get_credentials
@classmethod
def tearDownClass(cls):
os.rmdir(cls.directory)
uploader.storage = cls.state['storage']
uploader.datastore = cls.state['datastore']
uploader.datetime = cls.state['datetime']
uploader.Pool = cls.state['Pool']
uploader.sa.get_credentials = cls.state['sa.get_credentials']
def setUp(self):
self.file_not_ready_suffix = '.notready'
self._temp_files = list()
uploader.storage = Mock()
uploader.datastore = Mock()
uploader.datetime = Mock()
uploader.Pool = Mock()
uploader.sa.get_credentials = Mock()
def tearDown(self):
for fpath in self._temp_files:
os.remove(fpath)
self._temp_files = list()
def test_heal_files_failed_to_upload(self):
temp = uploader._record_status_in_datastore
uploader._record_status_in_datastore = Mock()
fnames = self._get_file_names(5)
errors = uploader.UploadErrors()
errors.failed_to_upload = fnames
pool = Mock()
# Don't care about the return value here, just that it is iterable
# so that uploader.upload doesn't complain
pool.map = Mock(return_value=[])
uploader.Pool = Mock(return_value=pool)
# Call under test
uploader.heal(errors)
# Ensure upload was called correctly
pool.map.assert_called_with(uploader._upload_single, fnames)
# Clean up
uploader._record_status_in_datastore = temp
def test_heal_files_failed_to_delete(self):
num_files = 5
self._write_files(num_files, num_not_ready=0)
errors = uploader.UploadErrors()
errors.failed_to_delete = self._temp_files
self.assertEqual(len(os.listdir(self.directory)), num_files)
# Call under test
uploader.heal(errors)
self.assertEqual(len(os.listdir(self.directory)), 0)
self._temp_files = list()
def test_heal_files_failed_to_record_success_in_ds(self):
temp = uploader._record_status_in_datastore
uploader._record_status_in_datastore = Mock()
fnames = self._get_file_names(5)
for success in (True, False):
errors = uploader.UploadErrors()
if success:
errors.datastore_success = fnames
else:
errors.datastore_failure = fnames
# Call under test
uploader.heal(errors)
uploader._record_status_in_datastore.assert_called_with(
fnames, success=success)
# Clean up
uploader._record_status_in_datastore = temp
def test_scan(self):
# Call under test
fpaths = uploader.scan(self.directory, file_ready=self._file_ready)
self.assertEqual(len(fpaths), 0)
num_files = 10
num_not_ready = 2
self._write_files(num_files, num_not_ready)
# Call under test
fpaths = uploader.scan(self.directory, file_ready=self._file_ready)
self.assertEqual(len(fpaths), num_files - num_not_ready)
def test_upload_no_files(self):
# Call under test
ret_val = uploader.upload([])
self.assertEqual(ret_val, uploader.UploadErrors())
# First thing called by _upload_single is _get_client
# Assert that no clients were created
uploader.datastore.Client.assert_not_called()
def test_upload_correct_threads_created(self):
# Mock this out for our test
temp = uploader._record_status_in_datastore
uploader._record_status_in_datastore = Mock()
for adder in [-2, 2]:
num_files = constants.UPLOAD_DAEMON_MAX_PROCESSES + adder
exp_threads = min(num_files, constants.UPLOAD_DAEMON_MAX_PROCESSES)
self._write_files(num_files, num_not_ready=0)
fpaths = self._temp_files
results = [(True, fpath) for fpath in fpaths]
pool = Mock()
pool.map = Mock(return_value=results)
uploader.Pool = Mock(return_value=pool)
# Call under test
ret_val = uploader.upload(fpaths)
uploader.Pool.assert_called_with(exp_threads)
pool.map.assert_called_with(uploader._upload_single,
fpaths)
# These were deleted by the call to upload
self._temp_files = list()
uploader._record_status_in_datastore = temp
def test_upload_correct_calls_to_record_status_in_ds_made(self):
# We will mock this out for our test
temp = uploader._record_status_in_datastore
uploader._record_status_in_datastore = Mock()
# Create files to upload
num_files = constants.UPLOAD_DAEMON_MAX_PROCESSES
self._write_files(num_files, num_not_ready=0)
fpaths = self._temp_files
successful_uploads, failed_uploads = list(), list()
for i in range(len(fpaths)):
if 1 % 2 == 0:
successful_uploads.append(fpaths[i])
else:
failed_uploads.append(fpaths[i])
results = [(True, p) for p in successful_uploads]
results.extend([(False, p) for p in failed_uploads])
pool = Mock()
pool.map = Mock(return_value=results)
uploader.Pool = Mock(return_value=pool)
# Call under test
ret_val = uploader.upload(fpaths)
call1 = call(successful_uploads, success=True)
call2 = call(failed_uploads, success=False)
calls = [call1, call2]
uploader._record_status_in_datastore.assert_has_calls(calls)
# These were deleted by the call to upload
self._temp_files = list()
uploader._record_status_in_datastore = temp
def test_upload_record_status_in_ds_ret_vals_saved(self):
# We will mock this out for our test
temp = uploader._record_status_in_datastore
num_files = constants.UPLOAD_DAEMON_MAX_PROCESSES
for ds_failure in (True, False):
def _record_in_datastore_mock(fpaths, success):
return fpaths if ds_failure else []
uploader._record_status_in_datastore = _record_in_datastore_mock
# Create files to upload
self._write_files(num_files, num_not_ready=0)
fpaths = self._temp_files
# Make some files upload successfully while others fail
successful_uploads, failed_uploads = list(), list()
for i in range(len(fpaths)):
if 1 % 2 == 0:
successful_uploads.append(fpaths[i])
else:
failed_uploads.append(fpaths[i])
results = [(True, p) for p in successful_uploads]
results.extend([(False, p) for p in failed_uploads])
pool = Mock()
pool.map = Mock(return_value=results)
uploader.Pool = Mock(return_value=pool)
# Call under test
ret_val = uploader.upload(fpaths)
if ds_failure:
record_success_in_ds_failures = successful_uploads
record_failure_in_ds_failures = failed_uploads
else:
record_success_in_ds_failures = []
record_failure_in_ds_failures = []
self.assertEqual(ret_val.datastore_success,
record_success_in_ds_failures)
self.assertEqual(ret_val.datastore_failure,
record_failure_in_ds_failures)
for fpath in failed_uploads:
os.remove(fpath)
self._temp_files = list()
uploader._record_status_in_datastore = temp
def test_upload_files_deleted(self):
# We will mock this out for our test
temp = uploader._record_status_in_datastore
uploader._record_status_in_datastore = Mock()
for success in (True, False):
# Create files to upload
num_files = constants.UPLOAD_DAEMON_MAX_PROCESSES
self._write_files(num_files, num_not_ready=0)
fpaths = self._temp_files
results = [(True, p) for p in fpaths]
pool = Mock()
pool.map = Mock(return_value=results)
uploader.Pool = Mock(return_value=pool)
exp_len = 0
# Force a failure
if not success:
temp_retry_func = uploader.util.retry_func
uploader.util.retry_func = Mock(side_effect=RuntimeError)
exp_len = len(fpaths)
# Call under test
ret_val = uploader.upload(fpaths)
self.assertEqual(len(ret_val.failed_to_delete), exp_len)
self.assertEqual(len(os.listdir(self.directory)), exp_len)
# Clean up
if not success:
for fpath in fpaths:
os.remove(fpath)
# Reset the retry function
uploader.util.retry_func = temp_retry_func
# These were deleted by the call to upload or above code block
self._temp_files = list()
uploader._record_status_in_datastore = temp
def test_delete_all_files(self):
num_files = 10
self._write_files(num_files, num_not_ready=0)
self.assertEqual(num_files, len(os.listdir(self.directory)))
# Call under test
uploader._delete_all_files(self._temp_files)
self.assertEqual(0, len(os.listdir(self.directory)))
# We have deleted them all
self._temp_files = list()
def test_get_client_could_not_obtain_credentials(self):
uploader.sa.get_credentials = Mock(
side_effect=CouldNotObtainCredentialsError)
with self.assertRaises(CouldNotObtainCredentialsError):
# Call under test
ret_val = uploader._get_client(client_type='datastore')
uploader.datastore.Client.assert_not_called()
# def test_get_client_for_datastore(self):
# credentials = 'secret'
# uploader.sa.get_credentials = Mock(return_value=credentials)
# uploader.datastore.Client = Stub('datastore.Client')
# # Call under test
# ret_val = uploader._get_client(client_type='datastore')
# self.assertEqual(
# ret_val, Stub('datastore.Client', config.PROJECT_ID, credentials))
# def test_get_client_for_gcs(self):
# credentials = 'secret'
# uploader.sa.get_credentials = Mock(return_value=credentials)
# uploader.storage.client.Client = Stub('storage.client.Client')
# # Call under test
# ret_val = uploader._get_client(client_type='storage')
# self.assertEqual(ret_val, Stub('storage.client.Client',
# config.PROJECT_ID, credentials))
def test_get_ds_key_for_file(self):
fname = 'to.file'
fpath = '/arbitrary/path/' + fname
uploader.datastore.key.Key = Stub('datastore.key.Key')
# Call under test
ret_val = uploader._get_ds_key_for_file(fpath)
exp_ret_val = Stub('datastore.key.Key', ds.DATASTORE_PHOTO,
fname, project=config.PROJECT_ID)
self.assertEqual(ret_val, exp_ret_val)
def test_insert_missing_entities(self):
now = 'now'
fnames = self._get_file_names(10)
entities = list()
# Create entities for half the files
for i in range(len(fnames)):
if i % 2 != 1:
key = KeyStub(ds.DATASTORE_PHOTO, fnames[i])
entities.append(GCDEntity(key=key))
uploader.datetime.now = Mock(return_value=now)
uploader.datastore.entity.Entity = GCDEntity
uploader.datastore.key.Key = KeyStub
# Insert missing entities should work with file paths or names
prefixes = ('', '/some/arbitrary/path/')
for prefix in prefixes:
fpaths = [prefix + fname for fname in fnames]
# Call under test
ret_val = uploader._insert_missing_entities(entities, fpaths)
self.assertEqual(len(ret_val), len(fnames))
for fname in fnames:
self.assertTrue(
util.in_list(ret_val, fname, key=lambda x: x.key.name))
# All the new eitities (the second half of them) should have the
# following
for i in range(len(fnames) / 2, len(fnames)):
self.assertNotIn('user', entities[i])
self.assertEqual(entities[i]['uploaded_date'], now)
def test_record_status_in_datastore_could_not_obtain_credentials(self):
fnames = self._get_file_names(5)
uploader.datastore.Client = Mock(
side_effect=CouldNotObtainCredentialsError)
# Call under test
ret_val = uploader._record_status_in_datastore(fnames, success=True)
self.assertEqual(ret_val, fnames)
def test_record_status_in_datastore_error_getting_entities(self):
fnames = self._get_file_names(5)
gcloud_error = GCloudError('')
# Must set the code since __str__ will be called on this instance.
# There is no way to set the code through the __init__ method
gcloud_error.code = 500
client = Mock()
client.get_multi = Mock(side_effect=gcloud_error)
uploader.datastore.Client = Mock(return_value=client)
uploader.datastore.key.Key = KeyStub
# Call under test
ret_val = uploader._record_status_in_datastore(fnames, success=True)
keys = [KeyStub(ds.DATASTORE_PHOTO, f, project=config.PROJECT_ID)
for f in fnames]
self.assertEqual(ret_val, fnames)
client.get_multi.assert_called_with(keys)
client.put_multi.assert_not_called()
def test_record_status_in_datastore_restricted_fields_in_fetched_entities(self):
num_files = 5
fnames = self._get_file_names(num_files)
# user is a restricted field for datastore photo entities - see
# common/constants.py
entities = [{'user': 123456789} for _ in range(num_files)]
client = Mock()
client.get_multi = Mock(return_value=entities)
uploader.datastore.Client = Mock(return_value=client)
uploader.datastore.key.Key = KeyStub
# Call under test
ret_val = uploader._record_status_in_datastore(fnames, success=True)
keys = [KeyStub(ds.DATASTORE_PHOTO, f, project=config.PROJECT_ID)
for f in fnames]
self.assertEqual(ret_val, list())
client.get_multi.assert_called_with(keys)
# Entities should have been put into datastore, despite having
# restricted fields - this is because the validate data function
# is only called on the new data.
client.put_multi.assert_called_with(entities)
def test_record_status_in_datastore_error_saving_to_datastore(self):
num_files = 5
fnames = self._get_file_names(num_files)
gcloud_error = GCloudError('')
# Must set the code since __str__ will be called on this instance.
# There is no way to set the code through the __init__ method
gcloud_error.code = 500
entities = [dict() for _ in range(num_files)]
client = Mock()
client.get_multi = Mock(return_value=entities)
client.put_multi = Mock(side_effect=gcloud_error)
uploader.datastore.Client = Mock(return_value=client)
uploader.datastore.key.Key = KeyStub
# Call under test
ret_val = uploader._record_status_in_datastore(fnames, success=True)
keys = [KeyStub(ds.DATASTORE_PHOTO, f, project=config.PROJECT_ID)
for f in fnames]
self.assertEqual(ret_val, fnames)
client.get_multi.assert_called_with(keys)
client.put_multi.assert_called_with(entities)
# Make sure the entities were updated correctly
for e in entities:
self.assertTrue(e['in_gcs'])
def test_record_status_in_datastore_missing_entities_success(self):
num_files = 5
fnames = self._get_file_names(num_files)
entities = list()
for i in range(num_files / 2):
key = KeyStub(ds.DATASTORE_PHOTO, fnames[i])
entities.append(GCDEntity(key=key))
client = Mock()
client.get_multi = Mock(return_value=entities)
uploader.datastore.entity.Entity = GCDEntity
uploader.datastore.Client = Mock(return_value=client)
uploader.datastore.key.Key = KeyStub
# Call under test
ret_val = uploader._record_status_in_datastore(fnames, success=True)
keys = [KeyStub(ds.DATASTORE_PHOTO, f, project=config.PROJECT_ID)
for f in fnames]
self.assertEqual(ret_val, [])
client.get_multi.assert_called_with(keys)
client.put_multi.assert_called_with(entities)
# Make sure the entities were updated correctly
self.assertEqual(len(entities), len(fnames))
for e in entities:
self.assertTrue(e['in_gcs'])
def test_record_status_in_datastore_success(self):
num_files = 5
fnames = self._get_file_names(num_files)
entities = list()
for i in range(num_files):
key = KeyStub(ds.DATASTORE_PHOTO, fnames[i])
entities.append(GCDEntity(key=key))
client = Mock()
client.get_multi = Mock(return_value=entities)
uploader.datastore.Client = Mock(return_value=client)
uploader.datastore.key.Key = KeyStub
for success in (True, False):
# Call under test
ret_val = uploader._record_status_in_datastore(fnames,
success=success)
keys = [KeyStub(ds.DATASTORE_PHOTO, f, project=config.PROJECT_ID)
for f in fnames]
self.assertEqual(ret_val, [])
client.get_multi.assert_called_with(keys)
client.put_multi.assert_called_with(entities)
# Make sure the entities were updated correctly
for e in entities:
field = 'in_gcs' if success else 'gcs_upload_failed'
self.assertTrue(e[field])
def test_upload_single_could_not_obtain_credentials_error(self):
fpath = '/arbitrary/path/to.file'
uploader.sa.get_credentials = Mock(
side_effect=CouldNotObtainCredentialsError)
# Call under test
ret_success, ret_fpath = uploader._upload_single(fpath)
self.assertFalse(ret_success)
self.assertEqual(ret_fpath, fpath)
uploader.storage.client.Client.assert_not_called()
uploader.storage.Blob.assert_not_called()
def test_upload_single_gcs_upload_error(self):
gcloud_error = GCloudError('')
gcloud_error.code = 500
streaming_error = GCloudStreamingError()
for error in (gcloud_error, streaming_error):
fpath = '/arbitrary/path/to.file'
bucket = Mock()
blob = Mock()
client = Mock()
blob.upload_from_filename = Mock(side_effect=error)
client.bucket = Mock(return_value=bucket)
uploader.storage.Blob = Mock(return_value=blob)
uploader.storage.client.Client = Mock(return_value=client)
# Call under test
ret_success, ret_fpath = uploader._upload_single(fpath)
self.assertFalse(ret_success)
self.assertEqual(ret_fpath, fpath)
def test_upload_single_success(self):
fname = 'to.file'
fpath = '/arbitrary/path/' + fname
bucket = Mock()
blob = Mock()
client = Mock()
client.bucket = Mock(return_value=bucket)
uploader.storage.Blob = Mock(return_value=blob)
uploader.storage.client.Client = Mock(return_value=client)
# Call under test
ret_success, ret_fpath = uploader._upload_single(fpath)
self.assertTrue(ret_success)
self.assertEqual(ret_fpath, fpath)
blob.upload_from_filename.assert_called_with(fpath)
client.bucket.assert_called_with(config.GCS_BUCKET)
uploader.storage.Blob.assert_called_with(fname, bucket)
# Called by sa.get_credentials
uploader.storage.client.Client.assert_called()
def _file_ready(self, fpath):
"""
Function passed to uploader.scan to return whether a given file is ready
to be uploaded or not.
"""
return not fpath.endswith(self.file_not_ready_suffix)
@staticmethod
def _get_file_names(num_files):
return ['file' + str(i) for i in range(num_files)]
def _get_some_data(self, num_bytes=1024):
"""
Return a random string with num_bytes characters.
"""
return ''.join([chr(ord('a') + int(25 * random.random()))
for _ in range(num_bytes)])
def _write_files(self, num_files, num_not_ready):
if num_not_ready > num_files:
raise ValueError('num_not_ready > num_files')
files = self._get_file_names(num_files)
for i in range(num_files):
if i < num_not_ready:
files[i] += self.file_not_ready_suffix
fpath = os.path.join(self.directory, files[i])
with open(fpath, 'w') as f:
f.write(self._get_some_data())
self._temp_files.append(fpath)
|
google/eclipse2017
|
upload/daemon/tests/uploader_test.py
|
Python
|
apache-2.0
| 23,064
|
from collections import Counter
import pandas as pd
# Le o CSV e cria o data frame
data_frame = pd.read_csv('buscas.csv')
# Le as colunas de dados
X_df = data_frame[['home', 'busca', 'logado']]
# Le a coluna de resultado
Y_df = data_frame['comprou']
# Extrai os dummies para substituir os dados categoricos
Xdummies_df = pd.get_dummies(X_df).astype(int)
# Extrai o array de dados e resultados
X = Xdummies_df.values
Y = Y_df.values
# Utiliza 90% dos dados para treino e 10% para teste
porcentagem_treino = 0.9
tamanho_treino = int(porcentagem_treino * len(Y))
tamanho_teste = len(Y) - tamanho_treino
treino_dados = X[:tamanho_treino]
treino_marcacoes = Y[:tamanho_treino]
teste_dados = X[-tamanho_teste:]
teste_marcacoes = Y[-tamanho_teste:]
# Taxa de acerto do algoritmo base (burro)
# Forma bruta de contagem
# acertos_um = len(teste_marcacoes[teste_marcacoes==1])
# acertos_zero = len(teste_marcacoes[teste_marcacoes==0])
# taxa_acerto_base = 100.0 * max(acertos_um, acertos_zero) / len(teste_marcacoes)
taxa_acerto_base = 100.0 * max(Counter(teste_marcacoes).itervalues()) / len(teste_marcacoes)
# Cria o modelo para treino
from sklearn.naive_bayes import MultinomialNB
modelo = MultinomialNB()
modelo.fit(treino_dados, treino_marcacoes)
# efetua predicao
resultado = modelo.predict(teste_dados)
# Compara se o resultado e igual ao esperado
diferencas = (resultado == teste_marcacoes)
# No python True = 1, entao somando temos o total de iguais
total_acertos = sum(diferencas)
total_elementos = len(teste_dados)
taxa_acerto = 100.0 * total_acertos / total_elementos
print('Total elementos: %d' % total_elementos)
print('Taxa acerto base: %f' % taxa_acerto_base)
print('Taxa acerto: %f' % taxa_acerto)
|
wesleyegberto/courses-projects
|
ia/machine-learning-introducao-classificacao/classifica_buscas.py
|
Python
|
apache-2.0
| 1,718
|
#!/usr/bin/env python
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Antonin Bas (antonin@barefootnetworks.com)
#
#
# -*- coding: utf-8 -*-
import p4c_bm
import os
import sys
SETUP_PY_PATH = os.path.dirname(__file__)
SRC_PATH = os.path.relpath(os.path.join(os.path.dirname(__file__), "p4c_bm"))
from setuptools import setup
from setuptools.command.install import install
from setuptools.command.install_scripts import install_scripts
with open(os.path.join(SETUP_PY_PATH, 'README.rst')) as readme_file:
readme = readme_file.read()
with open(os.path.join(SETUP_PY_PATH, 'HISTORY.rst')) as history_file:
history = history_file.read().replace('.. :changelog:', '')
with open(os.path.join(SRC_PATH, "_version_str.py"), 'w') as version_f:
version_f.write("# This file is auto-generated\n")
version_f.write("version_str = '{}'\n".format(p4c_bm.__version__))
requirements = [
# TODO: put package requirements here
'p4-hlir',
'Tenjin'
]
install_lib = None
class CustomInstall(install):
def run(self):
# in this step we simply retrieve the installation path that we need to
# append to the PYTHONPATH dynamically
global install_lib
assert(install_lib is None)
install_lib = os.path.abspath(self.install_lib)
# if a root was specified we remove it from the install path
if self.root is not None:
assert(install_lib.startswith(self.root))
install_lib = install_lib[len(self.root):]
install.run(self)
class CustomInstallScripts(install_scripts):
def run(self):
# in this second step we edit the script in the build directory to
# replace @pythondir@ with the value of install_lib and we rename the
# script; the modified script will be copied to the installation
# directory by setuptools
assert(install_lib is not None)
in_path = os.path.join(self.build_dir, 'p4c-bmv2.in')
out_path = os.path.join(self.build_dir, 'p4c-bmv2')
with open(in_path, "r") as fin:
with open(out_path, "w") as fout:
for line in fin:
# we use the platform-dependent install path computed by
# setuptools
fout.write(line.replace('@pythondir@', install_lib))
os.remove(os.path.join(self.build_dir, 'p4c-bmv2.in'))
install_scripts.run(self)
setup(
name='p4c_bm',
version=p4c_bm.__version__,
description="Generates the JSON configuration for the behavioral-model",
long_description=readme + '\n\n' + history,
author="Antonin Bas",
author_email='antonin@barefootnetworks.com',
url='https://github.com/antoninbas/p4c_bm',
packages=[
'p4c_bm', 'p4c_bm.util',
],
package_dir={'p4c_bm': SRC_PATH},
include_package_data=True,
install_requires=requirements,
# entry_points={
# 'console_scripts': [
# 'p4c-bmv2=p4c_bm.__main__:main',
# ],
# },
# we use the "template" here, because it is better if this script exists
# (otherwise I need to provide a custom command for the build step as well
scripts=['p4c-bmv2.in'],
license="Apache",
zip_safe=False,
keywords='p4c_bm',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
cmdclass={'install': CustomInstall,
'install_scripts': CustomInstallScripts},
)
|
p4lang/p4c-bm
|
setup.py
|
Python
|
apache-2.0
| 4,221
|
#!/usr/bin/env python2.7
# John Vivian
# Fall-2015
"""
Toil pipeline for exome variant analysis
Tree Structure of variant pipeline (per sample)
/-------> 14 --\
0------------> 5 ------------> 15 ------> 17 ---> 18
/ / \ \ / \ \-------> 16 --/
1 2 3 4 6 7
| |
8 9
| |
10 11
| |
12 13
0 = Start node
1 = reference index
2 = reference dict
3 = normal bam index
4 = tumor bam index
5 = pre-processing node / DAG declaration
6,7 = RealignerTargetCreator
8,9 = IndelRealigner
10,11 = BaseRecalibration
12,13 = PrintReads
14 = MuTect
15 = Pindel
16 = MuSe
17 = Consolidate Output
18 = Upload to S3
===================================================================
:Dependencies:
curl - apt-get install curl
docker - https://docs.docker.com/linux/step_one/
Toil - pip install toil
Optional:
S3AM - pip install --pre s3am (requires ~/.boto)
"""
import argparse
import base64
from collections import OrderedDict
from contextlib import closing
import glob
import hashlib
import os
import shutil
import subprocess
import multiprocessing
import errno
import tarfile
from toil.job import Job
def build_parser():
"""
Contains argparse arguments
"""
parser = argparse.ArgumentParser(description=main.__doc__, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-r', '--reference', required=True, help="Reference Genome URL")
parser.add_argument('-f', '--config', required=True, help="Each line contains (CSV): UUID,Normal_URL,Tumor_URL")
parser.add_argument('-p', '--phase', required=True, help='1000G_phase1.indels.hg19.sites.fixed.vcf URL')
parser.add_argument('-m', '--mills', required=True, help='Mills_and_1000G_gold_standard.indels.hg19.sites.vcf URL')
parser.add_argument('-d', '--dbsnp', required=True, help='dbsnp_132_b37.leftAligned.vcf URL')
parser.add_argument('-c', '--cosmic', required=True, help='b37_cosmic_v54_120711.vcf URL')
parser.add_argument('-o', '--output_dir', default=None, help='Full path to final output dir')
parser.add_argument('-s', '--ssec', help='A key that can be used to fetch encrypted data')
parser.add_argument('-3', '--s3_dir', default=None, help='S3 Directory, starting with bucket name. e.g.: '
'cgl-driver-projects/ckcc/rna-seq-samples/')
parser.add_argument('-u', '--sudo', dest='sudo', action='store_true', help='Docker usually needs sudo to execute '
'locally, but not''when running Mesos '
'or when a member of a Docker group.')
return parser
# Convenience functions used in the pipeline
def mkdir_p(path):
"""
It is Easier to Ask for Forgiveness than Permission
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def flatten(x):
"""
Flattens a nested array into a single list
x: list/tuple The nested list/tuple to be flattened.
"""
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(flatten(el))
else:
result.append(el)
return result
def generate_unique_key(master_key_path, url):
"""
master_key_path: str Path to the BD2K Master Key (for S3 Encryption)
url: str S3 URL (e.g. https://s3-us-west-2.amazonaws.com/bucket/file.txt)
Returns: str 32-byte unique key generated for that URL
"""
with open(master_key_path, 'r') as f:
master_key = f.read()
assert len(master_key) == 32, 'Invalid Key! Must be 32 characters. ' \
'Key: {}, Length: {}'.format(master_key, len(master_key))
new_key = hashlib.sha256(master_key + url).digest()
assert len(new_key) == 32, 'New key is invalid and is not 32 characters: {}'.format(new_key)
return new_key
def download_encrypted_file(job, input_args, name):
"""
Downloads encrypted files from S3 via header injection
input_args: dict Input dictionary defined in main()
name: str Symbolic name associated with file
"""
work_dir = job.fileStore.getLocalTempDir()
key_path = input_args['ssec']
file_path = os.path.join(work_dir, name)
url = input_args[name]
with open(key_path, 'r') as f:
key = f.read()
if len(key) != 32:
raise RuntimeError('Invalid Key! Must be 32 bytes: {}'.format(key))
key = generate_unique_key(key_path, url)
encoded_key = base64.b64encode(key)
encoded_key_md5 = base64.b64encode(hashlib.md5(key).digest())
h1 = 'x-amz-server-side-encryption-customer-algorithm:AES256'
h2 = 'x-amz-server-side-encryption-customer-key:{}'.format(encoded_key)
h3 = 'x-amz-server-side-encryption-customer-key-md5:{}'.format(encoded_key_md5)
try:
subprocess.check_call(['curl', '-fs', '--retry', '5', '-H', h1, '-H', h2, '-H', h3, url, '-o', file_path])
except OSError:
raise RuntimeError('Failed to find "curl". Install via "apt-get install curl"')
assert os.path.exists(file_path)
return job.fileStore.writeGlobalFile(file_path)
def copy_to_output_dir(work_dir, output_dir, uuid=None, files=None):
"""
A list of files to move from work_dir to output_dir.
work_dir: str Current working directory
output_dir: str Output directory for files to go
uuid: str UUID to "stamp" onto output files
files: list List of files to iterate through
"""
for fname in files:
if uuid is None:
shutil.copy(os.path.join(work_dir, fname), os.path.join(output_dir, fname))
else:
shutil.copy(os.path.join(work_dir, fname), os.path.join(output_dir, '{}.{}'.format(uuid, fname)))
def download_from_url(job, url, name):
"""
Simple curl request made for a given url
url: str URL to download
name: str Name to give downloaded file
"""
work_dir = job.fileStore.getLocalTempDir()
file_path = os.path.join(work_dir, name)
if not os.path.exists(file_path):
try:
subprocess.check_call(['curl', '-fs', '--retry', '5', '--create-dir', url, '-o', file_path])
except subprocess.CalledProcessError:
raise RuntimeError(
'\nNecessary file could not be acquired: {}. Check input URL'.format(url))
except OSError:
raise RuntimeError('Failed to find "curl". Install via "apt-get install curl"')
assert os.path.exists(file_path)
return job.fileStore.writeGlobalFile(file_path)
def return_input_paths(job, work_dir, ids, *args):
"""
Given one or more strings representing file_names, return the paths to those files. Each item must be unpacked!
work_dir: str Current working directory
ids: dict Dictionary of fileStore IDs
*args: str(s) for every file in *args, place file in work_dir via FileStore
"""
paths = OrderedDict()
for name in args:
if not os.path.exists(os.path.join(work_dir, name)):
file_path = job.fileStore.readGlobalFile(ids[name], os.path.join(work_dir, name))
else:
file_path = name
paths[name] = file_path
if len(args) == 1:
paths = file_path
return paths
def docker_path(file_path):
"""
Returns the path internal to the docker container (for standard reasons, this is always /data)
"""
return os.path.join('/data', os.path.basename(file_path))
def docker_call(work_dir, tool_parameters, tool, java_opts=None, outfile=None, sudo=False):
"""
Makes subprocess call of a command to a docker container.
tool_parameters: list An array of the parameters to be passed to the tool
tool: str Name of the Docker image to be used (e.g. quay.io/ucsc_cgl/samtools)
java_opts: str Optional commands to pass to a java jar execution. (e.g. '-Xmx15G')
outfile: file Filehandle that stderr will be passed to
sudo: bool If the user wants the docker command executed as sudo
"""
base_docker_call = 'docker run --log-driver=none --rm -v {}:/data'.format(work_dir).split()
if sudo:
base_docker_call = ['sudo'] + base_docker_call
if java_opts:
base_docker_call = base_docker_call + ['-e', 'JAVA_OPTS={}'.format(java_opts)]
try:
if outfile:
subprocess.check_call(base_docker_call + [tool] + tool_parameters, stdout=outfile)
else:
subprocess.check_call(base_docker_call + [tool] + tool_parameters)
except subprocess.CalledProcessError, e:
raise RuntimeError('docker command returned a non-zero exit status. {}'.format(e))
except OSError:
raise RuntimeError('docker not found on system. Install on all nodes.')
def tarball_files(work_dir, tar_name, uuid=None, files=None):
"""
Tars a group of files together into a tarball
work_dir: str Current Working Directory
tar_name: str Name of tarball
uuid: str UUID to stamp files with
files: str(s) List of filenames to place in the tarball from working directory
"""
with tarfile.open(os.path.join(work_dir, tar_name), 'w:gz') as f_out:
for fname in files:
if uuid:
f_out.add(os.path.join(work_dir, fname), arcname=uuid + '.' + fname)
else:
f_out.add(os.path.join(work_dir, fname), arcname=fname)
def getMeanInsertSize(work_dir, bam_path):
cmd = "sudo docker run --log-driver=none --rm -v {}:/data quay.io/ucsc_cgl/samtools " \
"view -f66 {}".format(work_dir, bam_path)
process = subprocess.Popen(args=cmd, shell=True, stdout=subprocess.PIPE)
b_sum = 0L
b_count = 0L
while True:
line = process.stdout.readline()
if not line:
break
tmp = line.split("\t")
if abs(long(tmp[8])) < 10000:
b_sum += abs(long(tmp[8]))
b_count +=1
process.wait()
mean = b_sum / b_count
print "Using insert size: %d" % (mean)
return int(mean)
# Start of Job Functions
def download_shared_files(job, input_args):
"""
Downloads files shared by all samples in the pipeline
input_args: dict Dictionary of input arguments (from main())
"""
shared_ids = {}
for fname in ['ref.fasta', 'phase.vcf', 'mills.vcf', 'dbsnp.vcf', 'cosmic.vcf']:
shared_ids[fname] = job.addChildJobFn(download_from_url, url=input_args[fname], name=fname).rv()
job.addFollowOnJobFn(reference_preprocessing, input_args, shared_ids)
def reference_preprocessing(job, input_args, shared_ids):
"""
Create index and dict file for reference
input_args: dict Dictionary of input argumnets
shared_ids: dict Dictionary of fileStore IDs
"""
ref_id = shared_ids['ref.fasta']
sudo = input_args['sudo']
shared_ids['ref.fasta.fai'] = job.addChildJobFn(create_reference_index, ref_id, sudo).rv()
shared_ids['ref.dict'] = job.addChildJobFn(create_reference_dict, ref_id, sudo).rv()
job.addFollowOnJobFn(spawn_batch_jobs, input_args, shared_ids)
def create_reference_index(job, ref_id, sudo):
"""
Uses Samtools to create reference index file (.fasta.fai)
ref_id: str The fileStore ID of the reference
sudo: bool Boolean item to determine whether to invoke sudo with docker
"""
work_dir = job.fileStore.getLocalTempDir()
# Retrieve file path to reference
ref_path = docker_path(job.fileStore.readGlobalFile(ref_id, os.path.join(work_dir, 'ref.fasta')))
# Call: Samtools
command = ['faidx', ref_path]
docker_call(work_dir=work_dir, tool_parameters=command,
tool='quay.io/ucsc_cgl/samtools:0.1.19--dd5ac549b95eb3e5d166a5e310417ef13651994e', sudo=sudo)
# Write to fileStore
return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'ref.fasta.fai'))
def create_reference_dict(job, ref_id, sudo):
"""
Uses Picardtools to create reference dictionary (.dict) for the sample
ref_id: str The fileStore ID of the reference
sudo: bool Boolean item to determine whether to invoke sudo with docker
"""
work_dir = job.fileStore.getLocalTempDir()
# Retrieve file path
ref_path = job.fileStore.readGlobalFile(ref_id, os.path.join(work_dir, 'ref.fasta'))
# Call: picardtools
output = os.path.splitext(docker_path(ref_path))[0]
command = ['CreateSequenceDictionary', 'R={}'.format(docker_path(ref_path)), 'O={}.dict'.format(output)]
docker_call(work_dir=work_dir, tool_parameters=command,
tool='quay.io/ucsc_cgl/picardtools:1.95--dd5ac549b95eb3e5d166a5e310417ef13651994e', sudo=sudo)
# Write to fileStore
return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'ref.dict'))
def spawn_batch_jobs(job, input_args, shared_ids):
"""
Spawn a pipeline for each sample in the configuration file
input_args: dict Dictionary of input argumnets
shared_ids: dict Dictionary of fileStore IDs
"""
samples = []
config = input_args['config']
with open(config, 'r') as f:
for line in f.readlines():
if not line.isspace():
samples.append(line.strip().split(','))
for sample in samples:
job.addChildJobFn(download_samples, shared_ids, input_args, sample)
def download_samples(job, ids, input_args, sample):
"""
Defines sample variables then downloads the sample.
ids: dict Dictionary of fileStore IDs
input_args: dict Dictionary of input arguments
sample: str Contains uuid, normal url, and tumor url
"""
uuid, normal_url, tumor_url = sample
# Create a unique
sample_input = dict(input_args)
sample_input['uuid'] = uuid
sample_input['normal.bam'] = normal_url
sample_input['tumor.bam'] = tumor_url
sample_input['cpu_count'] = multiprocessing.cpu_count()
if sample_input['output_dir']:
sample_input['output_dir'] = os.path.join(input_args['output_dir'], uuid)
# Download sample bams and launch pipeline
if input_args['ssec']:
ids['normal.bam'] = job.addChildJobFn(download_encrypted_file, sample_input, 'normal.bam').rv()
ids['tumor.bam'] = job.addChildJobFn(download_encrypted_file, sample_input, 'tumor.bam').rv()
else:
ids['normal.bam'] = job.addChildJobFn(download_from_url, url=sample_input['normal.bam'], name='normal.bam').rv()
ids['tumor.bam'] = job.addChildJobFn(download_from_url, url=sample_input['tumor.bam'], name='tumor.bam').rv()
job_vars = (sample_input, ids)
job.addFollowOnJobFn(pipeline_launchpoint, job_vars)
def pipeline_launchpoint(job, job_vars):
"""
Statically link the rest of the workflow
job_vars: tuple Contains the input_args and ids dictionaries
"""
pre_processing = job.wrapJobFn(index_bams, job_vars).encapsulate()
run_mutect = job.wrapJobFn(mutect, job_vars, pre_processing.rv())
run_pindel = job.wrapJobFn(pindel, job_vars, pre_processing.rv())
run_muse = job.wrapJobFn(muse, job_vars, pre_processing.rv())
consolidate = job.wrapJobFn(consolidate_output, job_vars, run_mutect.rv(), run_pindel.rv(), run_muse.rv())
# Wire up DAG
job.addChild(pre_processing)
pre_processing.addChild(run_mutect)
pre_processing.addChild(run_pindel)
pre_processing.addChild(run_muse)
pre_processing.addFollowOn(consolidate)
def index_bams(job, job_vars):
"""
Create index (.bai) files for each sample bam
job_vars: tuple Contains the input_args and ids dictionaries
"""
normal_ids = job.addChildJobFn(index, job_vars, 'normal', cores=1, memory='1 G', disk='8 G').rv()
tumor_ids = job.addChildJobFn(index, job_vars, 'tumor', cores=1, memory='1 G', disk='8 G').rv()
return (normal_ids, tumor_ids)
def index(job, job_vars, sample):
"""
Runs samtools index to create (.bai) files
job_vars: tuple Contains the input_args and ids dictionaries
"""
# Unpack convenience variables for job
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
sudo = input_args['sudo']
cores = int(input_args['cpu_count'])
# Retrieve file path
bam = '{}.bam'.format(sample)
path = return_input_paths(job, work_dir, ids, bam)
# Call: index the normal.bam
parameters = ['index', '{}'.format(docker_path(path))]
docker_call(work_dir=work_dir, tool_parameters=parameters,
tool='quay.io/ucsc_cgl/samtools:0.1.19--dd5ac549b95eb3e5d166a5e310417ef13651994e', sudo=sudo)
# Write to fileStore
ids[bam + '.bai'] = job.fileStore.writeGlobalFile(os.path.join(work_dir, bam) + '.bai')
return job.addChildJobFn(realigner_target_creator, job_vars, sample, cores=cores, memory='10 G', disk='15 G').rv()
def realigner_target_creator(job, job_vars, sample):
"""
Creates <type>.intervals file needed for indel realignment
job_vars: tuple Contains the input_args and ids dictionaries
sample: str Either "normal" or "tumor" to track which one is which
"""
# Unpack convenience variables for job
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
sudo = input_args['sudo']
cores = int(input_args['cpu_count'])
# Retrieve input file paths
return_input_paths(job, work_dir, ids, 'ref.fasta', '{}.bam'.format(sample), 'ref.fasta.fai', 'ref.dict',
'{}.bam.bai'.format(sample), 'phase.vcf', 'mills.vcf')
# Output file path
output = os.path.join(work_dir, '{}.intervals'.format(sample))
# Call: GATK -- RealignerTargetCreator
parameters = ['-T', 'RealignerTargetCreator',
'-nt', str(cores),
'-R', '/data/ref.fasta',
'-I', '/data/{}.bam'.format(sample),
'-known', '/data/phase.vcf',
'-known', '/data/mills.vcf',
'--downsampling_type', 'NONE',
'-o', docker_path(output)]
docker_call(tool='quay.io/ucsc_cgl/gatk:3.4--dd5ac549b95eb3e5d166a5e310417ef13651994e',
work_dir=work_dir, tool_parameters=parameters, java_opts='-Xmx10g', sudo=sudo)
# Write to fileStore
ids['{}.intervals'.format(sample)] = job.fileStore.writeGlobalFile(output)
return job.addChildJobFn(indel_realignment, job_vars, sample, cores=1, memory='10 G', disk='30 G').rv()
def indel_realignment(job, job_vars, sample):
"""
Creates realigned bams using <sample>.intervals file from previous step
job_vars: tuple Contains the input_args and ids dictionaries
sample: str Either "normal" or "tumor" to track which one is which
"""
# Unpack convenience variables for job
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
sudo = input_args['sudo']
cores = int(input_args['cpu_count'])
# Retrieve input file paths
return_input_paths(job, work_dir, ids, 'ref.fasta', '{}.bam'.format(sample), 'phase.vcf', 'mills.vcf',
'{}.intervals'.format(sample), 'ref.fasta.fai', 'ref.dict', '{}.bam.bai'.format(sample))
# Output file path
output = os.path.join(work_dir, '{}.indel.bam'.format(sample))
# Call: GATK -- IndelRealigner
parameters = ['-T', 'IndelRealigner',
'-R', '/data/ref.fasta',
'-I', '/data/{}.bam'.format(sample),
'-known', '/data/phase.vcf',
'-known', '/data/mills.vcf',
'-targetIntervals', '/data/{}.intervals'.format(sample),
'--downsampling_type', 'NONE',
'-maxReads', str(720000),
'-maxInMemory', str(5400000),
'-o', docker_path(output)]
docker_call(tool='quay.io/ucsc_cgl/gatk:3.4--dd5ac549b95eb3e5d166a5e310417ef13651994e',
work_dir=work_dir, tool_parameters=parameters, java_opts='-Xmx10g', sudo=sudo)
# Write to fileStore
ids['{}.indel.bam'.format(sample)] = job.fileStore.writeGlobalFile(output)
ids['{}.indel.bai'.format(sample)] = job.fileStore.writeGlobalFile(os.path.splitext(output)[0] + '.bai')
return job.addChildJobFn(base_recalibration, job_vars, sample, cores=cores, memory='15 G', disk='15 G').rv()
def base_recalibration(job, job_vars, sample):
"""
Creates recal table to perform Base Quality Score Recalibration
job_vars: tuple Contains the input_args and ids dictionaries
sample: str Either "normal" or "tumor" to track which one is which
"""
# Unpack convenience variables for job
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
sudo = input_args['sudo']
cores = int(input_args['cpu_count'])
# Retrieve input file paths
return_input_paths(job, work_dir, ids, 'ref.fasta', '{}.indel.bam'.format(sample), 'dbsnp.vcf', 'ref.fasta.fai',
'ref.dict', '{}.indel.bai'.format(sample))
# Output file path
output = os.path.join(work_dir, '{}.recal.table'.format(sample))
# Call: GATK -- IndelRealigner
parameters = ['-T', 'BaseRecalibrator',
'-nct', str(cores),
'-R', '/data/ref.fasta',
'-I', '/data/{}.indel.bam'.format(sample),
'-knownSites', '/data/dbsnp.vcf',
'-o', docker_path(output)]
docker_call(tool='quay.io/ucsc_cgl/gatk:3.4--dd5ac549b95eb3e5d166a5e310417ef13651994e',
work_dir=work_dir, tool_parameters=parameters, java_opts='-Xmx15g', sudo=sudo)
# Write to fileStore
ids['{}.recal.table'.format(sample)] = job.fileStore.writeGlobalFile(output)
return job.addChildJobFn(print_reads, job_vars, sample, cores=cores, memory='15 G', disk='40 G').rv()
def print_reads(job, job_vars, sample):
"""
Create bam that has undergone Base Quality Score Recalibration (BQSR)
job_vars: tuple Contains the input_args and ids dictionaries
sample: str Either "normal" or "tumor" to track which one is which
"""
# Unpack convenience variables for job
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
sudo = input_args['sudo']
cores = int(input_args['cpu_count'])
# Retrieve input file paths
return_input_paths(job, work_dir, ids, 'ref.fasta', '{}.indel.bam'.format(sample), 'ref.fasta.fai',
'ref.dict', '{}.indel.bai'.format(sample), '{}.recal.table'.format(sample))
# Output file
output = os.path.join(work_dir, '{}.bqsr.bam'.format(sample))
# Call: GATK -- PrintReads
parameters = ['-T', 'PrintReads',
'-nct', str(cores),
'-R', '/data/ref.fasta',
'--emit_original_quals',
'-I', '/data/{}.indel.bam'.format(sample),
'-BQSR', '/data/{}.recal.table'.format(sample),
'-o', docker_path(output)]
docker_call(tool='quay.io/ucsc_cgl/gatk:3.4--dd5ac549b95eb3e5d166a5e310417ef13651994e',
work_dir=work_dir, tool_parameters=parameters, java_opts='-Xmx15g', sudo=sudo)
# Write to fileStore
bam_id = job.fileStore.writeGlobalFile(output)
bai_id = job.fileStore.writeGlobalFile(os.path.splitext(output)[0] + '.bai')
return (bam_id, bai_id)
def mutect(job, job_vars, bam_ids):
"""
Calls MuTect to perform variant analysis
job_vars: tuple Contains the input_args and ids dictionaries
bam_ids: tuple Contains a tuple of normal/tumor fileStore ids for bams and index files (bai)
"""
# Unpack convenience variables for job
normal_ids, tumor_ids = bam_ids
normal_bam_id, normal_bai_id = normal_ids
tumor_bam_id, tumor_bai_id = tumor_ids
input_args, ids = job_vars
sudo = input_args['sudo']
work_dir = job.fileStore.getLocalTempDir()
# Retrieve input files
job.fileStore.readGlobalFile(normal_bam_id, os.path.join(work_dir, 'normal.bam'))
job.fileStore.readGlobalFile(normal_bai_id, os.path.join(work_dir, 'normal.bai'))
job.fileStore.readGlobalFile(tumor_bam_id, os.path.join(work_dir, 'tumor.bam'))
job.fileStore.readGlobalFile(tumor_bai_id, os.path.join(work_dir, 'tumor.bai'))
return_input_paths(job, work_dir, ids, 'ref.fasta', 'dbsnp.vcf', 'ref.fasta.fai', 'ref.dict', 'cosmic.vcf')
# Output VCF
uuid = input_args['uuid']
output_name = uuid + '.vcf'
mut_out = docker_path(os.path.join(work_dir, uuid + '.out'))
mut_cov = docker_path(os.path.join(work_dir, uuid + '.cov'))
# Call: MuTect
parameters = ['--analysis_type', 'MuTect',
'--reference_sequence', 'ref.fasta',
'--cosmic', 'cosmic.vcf',
'--dbsnp', 'dbsnp.vcf',
'--input_file:normal', 'normal.bam',
'--input_file:tumor', 'tumor.bam',
'--tumor_lod', str(10),
'--initial_tumor_lod', str(4.0),
'--out', mut_out,
'--coverage_file', mut_cov,
'--vcf', docker_path(output_name)]
docker_call(work_dir=work_dir, tool_parameters=parameters,
tool='quay.io/ucsc_cgl/mutect:1.1.7--e8bf09459cf0aecb9f55ee689c2b2d194754cbd3', sudo=sudo)
# Tarball files
tarball_files(work_dir, 'mutect.tar.gz', files=[uuid + '.vcf', uuid + '.cov', uuid + '.out'])
# Return fileStore ID
return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'mutect.tar.gz'))
def pindel(job, job_vars, bam_ids):
"""
Calls Pindel to compute indels / deletions in
job_vars: tuple Contains the input_args and ids dictionaries
bam_ids: tuple Contains a tuple of normal/tumor fileStore ids for bams and index files (bai)
"""
# Unpack convenience variables for job
normal_ids, tumor_ids = bam_ids
normal_bam_id, normal_bai_id = normal_ids
tumor_bam_id, tumor_bai_id = tumor_ids
input_args, ids = job_vars
sudo = input_args['sudo']
uuid = input_args['uuid']
cores = input_args['cpu_count']
work_dir = job.fileStore.getLocalTempDir()
# Retrieve input files
job.fileStore.readGlobalFile(normal_bam_id, os.path.join(work_dir, 'normal.bam'))
job.fileStore.readGlobalFile(normal_bai_id, os.path.join(work_dir, 'normal.bai'))
job.fileStore.readGlobalFile(tumor_bam_id, os.path.join(work_dir, 'tumor.bam'))
job.fileStore.readGlobalFile(tumor_bai_id, os.path.join(work_dir, 'tumor.bai'))
return_input_paths(job, work_dir, ids, 'ref.fasta')
# Call: Pindel
parameters = ['-f', '/data/ref.fasta',
'-i', '/data/config.txt',
'--number_of_threads', cores,
'--minimum_support_for_event', '3',
'--report_long_insertions', 'true',
'--report_breakpoints', 'true',
'-o', uuid]
docker_call(tool='quay.io/ucsc_cgl/pindel:0.2.5b6--4e8d1b31d4028f464b3409c6558fb9dfcad73f88',
work_dir=work_dir, tool_parameters=parameters, sudo=sudo)
output_files = [f for f in glob.glob(os.path.join(work_dir, '*')) if '.bam' not in f and '.bai' not in f]
tarball_files(work_dir, 'pindel.tar.gz', files=[output_files])
# Return fileStore ID
return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'pindel.tar.gz'))
def muse(job, job_vars, bam_ids):
"""
Calls MuSe to find variants
job_vars: tuple Contains the input_args and ids dictionaries
bam_ids: tuple Contains a tuple of normal/tumor fileStore ids for bams and index files (bai)
"""
# Unpack convenience variables for job
normal_ids, tumor_ids = bam_ids
normal_bam_id, normal_bai_id = normal_ids
tumor_bam_id, tumor_bai_id = tumor_ids
input_args, ids = job_vars
sudo = input_args['sudo']
uuid = input_args['uuid']
cores = int(input_args['cpu_count'])
output_name = uuid + '.muse.vcf'
work_dir = job.fileStore.getLocalTempDir()
# Retrieve input files
job.fileStore.readGlobalFile(normal_bam_id, os.path.join(work_dir, 'normal.bam'))
job.fileStore.readGlobalFile(normal_bai_id, os.path.join(work_dir, 'normal.bai'))
job.fileStore.readGlobalFile(tumor_bam_id, os.path.join(work_dir, 'tumor.bam'))
job.fileStore.readGlobalFile(tumor_bai_id, os.path.join(work_dir, 'tumor.bai'))
return_input_paths(job, work_dir, ids, 'ref.fasta', 'dbsnp.vcf', 'ref.fasta.fai', 'ref.dict')
# Call: MuSE
parameters = ['--muse', 'MuSEv1.0rc',
'--mode', 'wxs',
'--dbsnp', 'dbsnp.vcf',
'--fafile', 'ref.fasta',
'--tumor-bam', 'tumor.bam',
'--tumor-bam-index', 'tumor.bam.bai',
'--normal-bam', 'normal.bam',
'--normal-bam-index', 'normal.bam.bai',
'--outfile', output_name,
'--cpus', cores]
docker_call(tool='quay.io/ucsc_cgl/muse:1.0--6add9b0a1662d44fd13bbc1f32eac49326e48562',
work_dir=work_dir, tool_parameters=parameters, sudo=sudo)
tarball_files(work_dir, 'pindel.tar.gz', files=[output_name])
# Return fileStore ID
return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'pindel.tar.gz'))
def consolidate_output(job, job_vars, mutect_id, pindel_id, muse_id):
"""
Combine the contents of separate zipped outputs into one via streaming
job_vars: tuple Tuple of dictionaries: input_args and ids
output_ids: tuple Nested tuple of all the output fileStore IDs
"""
input_args, ids = job_vars
work_dir = job.fileStore.getLocalTempDir()
uuid = input_args['uuid']
# Retrieve output file paths to consolidate
mutect_tar = job.fileStore.readGlobalFile(mutect_id, os.path.join(work_dir, 'mutect.tar.gz'))
pindel_tar = job.fileStore.readGlobalFile(pindel_id, os.path.join(work_dir, 'pindel.tar.gz'))
muse_tar = job.fileStore.readGlobalFile(muse_id, os.path.join(work_dir, 'muse.tar.gz'))
# I/O
out_tar = os.path.join(work_dir, uuid + '.tar.gz')
# Consolidate separate tarballs into one as streams (avoids unnecessary untaring)
with tarfile.open(os.path.join(work_dir, out_tar), 'w:gz') as f_out:
for tar in [mutect_tar, pindel_tar, muse_tar]:
with tarfile.open(tar, 'r') as f_in:
for tarinfo in f_in:
with closing(f_in.extractfile(tarinfo)) as f_in_file:
if tar == mutect_tar:
tarinfo.name = os.path.join(uuid, 'MuTect', os.path.basename(tarinfo.name))
elif tar == pindel_tar:
tarinfo.name = os.path.join(uuid, 'Kallisto', os.path.basename(tarinfo.name))
else:
tarinfo.name = os.path.join(uuid, 'MuSe', os.path.basename(tarinfo.name))
f_out.addfile(tarinfo, fileobj=f_in_file)
# Move to output directory of selected
if input_args['output_dir']:
output_dir = input_args['output_dir']
mkdir_p(output_dir)
copy_to_output_dir(work_dir, output_dir, uuid=None, files=[uuid + '.tar.gz'])
# Write output file to fileStore
output_tar = job.fileStore.writeGlobalFile(out_tar)
# If S3 bucket argument specified, upload to S3
if input_args['s3_dir']:
job.addChildJobFn(upload_output_to_s3, job_vars, output_tar)
def upload_output_to_s3(job, input_args, output_tar):
"""
Uploads Mutect files to S3 using S3AM
mutect_id: str FileStore ID for the mutect.vcf
input_args: dict Dictionary of input arguments
"""
work_dir = job.fileStore.getLocalTempDir()
uuid = input_args['uuid']
# Parse s3_dir to get bucket and s3 path
s3_dir = input_args['s3_dir']
bucket_name = s3_dir.lstrip('/').split('/')[0]
bucket_dir = '/'.join(s3_dir.lstrip('/').split('/')[1:])
# Retrieve VCF file
job.fileStore.readGlobalFile(output_tar, os.path.join(work_dir, uuid + '.tar.gz'))
# Upload to S3 via S3AM
s3am_command = ['s3am',
'upload',
'file://{}'.format(os.path.join(work_dir, uuid + '.tar.gz')),
os.path.join('s3://', bucket_name, bucket_dir, uuid + '.tar.gz')]
subprocess.check_call(s3am_command)
def main():
"""
This is a Toil pipeline used to perform variant analysis (usually on exomes) from Tumor/Normal BAMs.
All samples are co-cleaned (GATK Indel Realignment (IR) and Base Quality Score Recalibration (BQSR))
before variant analysis is performed by MuTect. The final output of this pipeline is a tarball
containing the output of MuTect (.vcf, .cov, .out).
Please see the associated README.md for an overview and quickstart walkthrough.
"""
# Define Parser object and add to jobTree
argparser = build_parser()
Job.Runner.addToilOptions(argparser)
pargs = argparser.parse_args()
# Variables to pass to initial job
inputs = {'ref.fasta': pargs.reference,
'config': pargs.config,
'phase.vcf': pargs.phase,
'mills.vcf': pargs.mills,
'dbsnp.vcf': pargs.dbsnp,
'cosmic.vcf': pargs.cosmic,
'output_dir': pargs.output_dir,
'ssec': pargs.ssec,
's3_dir': pargs.s3_dir,
'sudo': pargs.sudo,
'uuid': None,
'normal.bam': None,
'tumor.bam': None,
'cpu_count': None}
# Launch Pipeline
Job.Runner.startToil(Job.wrapJobFn(download_shared_files, inputs), pargs)
if __name__ == '__main__':
main()
|
fnothaft/toil-scripts
|
src/toil_scripts/exome_variant_pipeline/exome_variant_pipeline.py
|
Python
|
apache-2.0
| 34,065
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-23 03:15
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('conversations', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Activity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=16)),
('date', models.DateTimeField()),
('location', models.CharField(max_length=30)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='activities', to=settings.AUTH_USER_MODEL, to_field='username')),
('conversation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='activities', to='conversations.Conversation')),
],
),
]
|
toss-app/toss-backend
|
toss/activities/migrations/0001_initial.py
|
Python
|
apache-2.0
| 1,169
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import requests
from cloudevents.http import CloudEvent, to_binary, to_structured
def send_binary_cloud_event(url):
# This data defines a binary cloudevent
attributes = {
"type": "com.example.sampletype1",
"source": "https://example.com/event-producer",
}
data = {"message": "Hello World!"}
event = CloudEvent(attributes, data)
headers, body = to_binary(event)
# send and print event
requests.post(url, headers=headers, data=body)
print(f"Sent {event['id']} from {event['source']} with " f"{event.data}")
def send_structured_cloud_event(url):
# This data defines a binary cloudevent
attributes = {
"type": "com.example.sampletype2",
"source": "https://example.com/event-producer",
}
data = {"message": "Hello World!"}
event = CloudEvent(attributes, data)
headers, body = to_structured(event)
# send and print event
requests.post(url, headers=headers, data=body)
print(f"Sent {event['id']} from {event['source']} with " f"{event.data}")
if __name__ == "__main__":
# expects a url from command line.
# e.g. python3 client.py http://localhost:3000/
if len(sys.argv) < 2:
sys.exit(
"Usage: python with_requests.py " "<CloudEvents controller URL>"
)
url = sys.argv[1]
send_binary_cloud_event(url)
send_structured_cloud_event(url)
|
cloudevents/sdk-python
|
samples/http-json-cloudevents/client.py
|
Python
|
apache-2.0
| 2,002
|
from tests import testsSetup
testsSetup
from paperThinRest.test.baseRestTestCase import BaseRestTestCase
class Test(BaseRestTestCase):
def testCrud(self):
data = self.expect(status=201).post('/projects/',data={'title':'something'}).json
self.assertNotEqual(data.get('created'), None)
data2 = self.expect(status=200).get('/projects/id1').jsonObject
self.assertEqual(data2.id,'id1')
editedData = self.expect(status=200).post('/projects/id2',data={'title':'something'}).jsonObject
self.assertEqual(editedData.edited.title,'something')
def testValidateSchema(self):
self.expect(status=400).post('/projects/',data={'title2':'something'})
|
viktorasm/paper-thin-rest
|
exampleProject/tests/testProjects.py
|
Python
|
apache-2.0
| 768
|
#!/usr/bin/python
# Copyright 2015 Google.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for encoder_configuration."""
import os
import unittest
import encoder_configuration
class TestConfiguration(unittest.TestCase):
def test_defaults(self):
# These tests verify reading from the current environment.
# They should work no matter what the variables are set to,
# as long as required variables are set.
self.assertEquals(os.environ['CODEC_WORKDIR'],
encoder_configuration.conf.workdir())
self.assertEquals(os.environ['WORKDIR'],
encoder_configuration.conf.sysdir())
self.assertEquals(os.environ['CODEC_TOOLPATH'],
encoder_configuration.conf.tooldir())
self.assertEquals(os.getenv('CODEC_SCOREPATH', ''),
':'.join(encoder_configuration.conf.scorepath()))
def test_scorepath_variants(self):
# This test works by modifying the environment and then creating
# a new configuration object. It does not use the global configuration
# object.
if 'CODEC_SCOREPATH' in os.environ:
del os.environ['CODEC_SCOREPATH']
os.environ['CODEC_WORKDIR'] = 'strange_string'
my_conf = encoder_configuration.Configuration()
self.assertEquals(0, len(my_conf.scorepath()))
os.environ['CODEC_SCOREPATH'] = 'a:b'
my_conf = encoder_configuration.Configuration()
self.assertEquals(['a', 'b'], my_conf.scorepath())
def test_override(self):
encoder_configuration.conf.override_workdir_for_test('new_value')
self.assertEquals('new_value',
encoder_configuration.conf.workdir())
if __name__ == '__main__':
unittest.main()
|
google/compare-codecs
|
lib/encoder_configuration_unittest.py
|
Python
|
apache-2.0
| 2,207
|
#!/usr/bin/env python
#
# Copyright (C) 2012 LeZiZi Studio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ConfigParser
def connector_get_conf_path():
import os
path = os.getcwd()
while len(path)>0:
if os.path.exists(path+"/AFConnector.conf"):
return path
path=path[0:path.rfind("/")]
def connector_import_package(name):
import sys
cf = ConfigParser.ConfigParser()
cf.read(connector_get_conf_path()+"/AFConnector.conf")
ret = cf.get("python_packages", name)
sys.path.append(connector_get_conf_path()+ret)
def connector_get_file(name):
cf = ConfigParser.ConfigParser()
cf.read(connector_get_conf_path()+"/AFConnector.conf")
name = cf.get("local_file", name)
if (name.find('"')):
return name.replace('"','')
else:
return connector_get_conf_path()+name
def main():
try:
connector_import_package("base")
except Exception:
raise ValueError("AFConnector.conf not found.")
main()
|
lezizi/A-Framework
|
python/local-source/AFConnector.py
|
Python
|
apache-2.0
| 1,497
|
"""
Synthetic data loaders for testing.
"""
from bcolz import ctable
from numpy import (
arange,
array,
float64,
full,
iinfo,
uint32,
)
from pandas import DataFrame, Timestamp
from six import iteritems
from sqlite3 import connect as sqlite3_connect
from .base import PipelineLoader
from .frame import DataFrameLoader
from .equity_pricing_loader import (
BcolzDailyBarWriter,
SQLiteAdjustmentReader,
SQLiteAdjustmentWriter,
US_EQUITY_PRICING_BCOLZ_COLUMNS,
)
UINT_32_MAX = iinfo(uint32).max
def nanos_to_seconds(nanos):
return nanos / (1000 * 1000 * 1000)
class MultiColumnLoader(PipelineLoader):
"""
PipelineLoader that can delegate to sub-loaders.
Parameters
----------
loaders : dict
Dictionary mapping columns -> loader
"""
def __init__(self, loaders):
self._loaders = loaders
def load_adjusted_array(self, columns, dates, assets, mask):
"""
Load by delegating to sub-loaders.
"""
out = []
for col in columns:
try:
loader = self._loaders[col]
except KeyError:
raise ValueError("Couldn't find loader for %s" % col)
out.extend(loader.load_adjusted_array([col], dates, assets, mask))
return out
class ConstantLoader(MultiColumnLoader):
"""
Synthetic PipelineLoader that returns a constant value for each column.
Parameters
----------
constants : dict
Map from column to value(s) to use for that column.
Values can be anything that can be passed as the first positional
argument to a DataFrame of the same shape as `mask`.
mask : pandas.DataFrame
Mask indicating when assets existed.
Indices of this frame are used to align input queries.
Notes
-----
Adjustments are unsupported with ConstantLoader.
"""
def __init__(self, constants, dates, assets):
loaders = {}
for column, const in iteritems(constants):
frame = DataFrame(
const,
index=dates,
columns=assets,
dtype=column.dtype,
)
loaders[column] = DataFrameLoader(
column=column,
baseline=frame,
adjustments=None,
)
super(ConstantLoader, self).__init__(loaders)
class SyntheticDailyBarWriter(BcolzDailyBarWriter):
"""
Bcolz writer that creates synthetic data based on asset lifetime metadata.
For a given asset/date/column combination, we generate a corresponding raw
value using the following formula for OHLCV columns:
data(asset, date, column) = (100,000 * asset_id)
+ (10,000 * column_num)
+ (date - Jan 1 2000).days # ~6000 for 2015
where:
column_num('open') = 0
column_num('high') = 1
column_num('low') = 2
column_num('close') = 3
column_num('volume') = 4
We use days since Jan 1, 2000 to guarantee that there are no collisions
while also the produced values smaller than UINT32_MAX / 1000.
For 'day' and 'id', we use the standard format expected by the base class.
Parameters
----------
asset_info : DataFrame
DataFrame with asset_id as index and 'start_date'/'end_date' columns.
calendar : DatetimeIndex
Calendar to use for constructing asset lifetimes.
"""
OHLCV = ('open', 'high', 'low', 'close', 'volume')
OHLC = ('open', 'high', 'low', 'close')
PSEUDO_EPOCH = Timestamp('2000-01-01', tz='UTC')
def __init__(self, asset_info, calendar):
super(SyntheticDailyBarWriter, self).__init__()
assert (
# Using .value here to avoid having to care about UTC-aware dates.
self.PSEUDO_EPOCH.value <
calendar.min().value <=
asset_info['start_date'].min().value
)
assert (asset_info['start_date'] < asset_info['end_date']).all()
self._asset_info = asset_info
self._calendar = calendar
def _raw_data_for_asset(self, asset_id):
"""
Generate 'raw' data that encodes information about the asset.
See class docstring for a description of the data format.
"""
# Get the dates for which this asset existed according to our asset
# info.
dates = self._calendar[
self._calendar.slice_indexer(
self.asset_start(asset_id), self.asset_end(asset_id)
)
]
data = full(
(len(dates), len(US_EQUITY_PRICING_BCOLZ_COLUMNS)),
asset_id * (100 * 1000),
dtype=uint32,
)
# Add 10,000 * column-index to OHLCV columns
data[:, :5] += arange(5) * (10 * 1000)
# Add days since Jan 1 2001 for OHLCV columns.
data[:, :5] += (dates - self.PSEUDO_EPOCH).days[:, None]
frame = DataFrame(
data,
index=dates,
columns=US_EQUITY_PRICING_BCOLZ_COLUMNS,
)
frame['day'] = nanos_to_seconds(dates.asi8)
frame['id'] = asset_id
return ctable.fromdataframe(frame)
def asset_start(self, asset):
ret = self._asset_info.loc[asset]['start_date']
if ret.tz is None:
ret = ret.tz_localize('UTC')
assert ret.tzname() == 'UTC', "Unexpected non-UTC timestamp"
return ret
def asset_end(self, asset):
ret = self._asset_info.loc[asset]['end_date']
if ret.tz is None:
ret = ret.tz_localize('UTC')
assert ret.tzname() == 'UTC', "Unexpected non-UTC timestamp"
return ret
@classmethod
def expected_value(cls, asset_id, date, colname):
"""
Check that the raw value for an asset/date/column triple is as
expected.
Used by tests to verify data written by a writer.
"""
from_asset = asset_id * 100 * 1000
from_colname = cls.OHLCV.index(colname) * (10 * 1000)
from_date = (date - cls.PSEUDO_EPOCH).days
return from_asset + from_colname + from_date
def expected_values_2d(self, dates, assets, colname):
"""
Return an 2D array containing cls.expected_value(asset_id, date,
colname) for each date/asset pair in the inputs.
Values before/after an assets lifetime are filled with 0 for volume and
NaN for price columns.
"""
if colname == 'volume':
dtype = uint32
missing = 0
else:
dtype = float64
missing = float('nan')
data = full((len(dates), len(assets)), missing, dtype=dtype)
for j, asset in enumerate(assets):
start, end = self.asset_start(asset), self.asset_end(asset)
for i, date in enumerate(dates):
# No value expected for dates outside the asset's start/end
# date.
if not (start <= date <= end):
continue
data[i, j] = self.expected_value(asset, date, colname)
return data
# BEGIN SUPERCLASS INTERFACE
def gen_tables(self, assets):
for asset in assets:
yield asset, self._raw_data_for_asset(asset)
def to_uint32(self, array, colname):
if colname in {'open', 'high', 'low', 'close'}:
# Data is stored as 1000 * raw value.
assert array.max() < (UINT_32_MAX / 1000), "Test data overflow!"
return array * 1000
else:
assert colname in ('volume', 'day'), "Unknown column: %s" % colname
return array
# END SUPERCLASS INTERFACE
class NullAdjustmentReader(SQLiteAdjustmentReader):
"""
A SQLiteAdjustmentReader that stores no adjustments and uses in-memory
SQLite.
"""
def __init__(self):
conn = sqlite3_connect(':memory:')
writer = SQLiteAdjustmentWriter(conn)
empty = DataFrame({
'sid': array([], dtype=uint32),
'effective_date': array([], dtype=uint32),
'ratio': array([], dtype=float),
})
writer.write(splits=empty, mergers=empty, dividends=empty)
super(NullAdjustmentReader, self).__init__(conn)
|
otmaneJai/Zipline
|
zipline/pipeline/loaders/synthetic.py
|
Python
|
apache-2.0
| 8,291
|
#!/usr/bin/env python
import argparse
from cStringIO import StringIO
import logging
from math import ceil
from multiprocessing import Pool
import time
import urlparse
import boto
parser = argparse.ArgumentParser(description="Transfer large files to S3",
prog="s3-mp-upload")
parser.add_argument("src", type=file, help="The file to transfer")
parser.add_argument("dest", help="The S3 destination object")
parser.add_argument("-np", "--num-processes", help="Number of processors to use",
type=int, default=2)
parser.add_argument("-f", "--force", help="Overwrite an existing S3 key",
action="store_true")
parser.add_argument("-s", "--split", help="Split size, in Mb", type=int, default=50)
parser.add_argument("-rrs", "--reduced-redundancy", help="Use reduced redundancy storage. Default is standard.", default=False, action="store_true")
parser.add_argument("--insecure", dest='secure', help="Use HTTP for connection",
default=True, action="store_false")
parser.add_argument("-t", "--max-tries", help="Max allowed retries for http timeout", type=int, default=5)
parser.add_argument("-v", "--verbose", help="Be more verbose", default=False, action="store_true")
parser.add_argument("-q", "--quiet", help="Be less verbose (for use in cron jobs)", default=False, action="store_true")
logger = logging.getLogger("s3-mp-upload")
def do_part_upload(args):
"""
Upload a part of a MultiPartUpload
Open the target file and read in a chunk. Since we can't pickle
S3Connection or MultiPartUpload objects, we have to reconnect and lookup
the MPU object with each part upload.
:type args: tuple of (string, string, string, int, int, int)
:param args: The actual arguments of this method. Due to lameness of
multiprocessing, we have to extract these outside of the
function definition.
The arguments are: S3 Bucket name, MultiPartUpload id, file
name, the part number, part offset, part size
"""
# Multiprocessing args lameness
bucket_name, mpu_id, fname, i, start, size, secure, max_tries, current_tries = args
logger.debug("do_part_upload got args: %s" % (args,))
# Connect to S3, get the MultiPartUpload
s3 = boto.connect_s3()
s3.is_secure = secure
bucket = s3.lookup(bucket_name)
mpu = None
for mp in bucket.list_multipart_uploads():
if mp.id == mpu_id:
mpu = mp
break
if mpu is None:
raise Exception("Could not find MultiPartUpload %s" % mpu_id)
# Read the chunk from the file
fp = open(fname, 'rb')
fp.seek(start)
data = fp.read(size)
fp.close()
if not data:
raise Exception("Unexpectedly tried to read an empty chunk")
def progress(x,y):
logger.debug("Part %d: %0.2f%%" % (i+1, 100.*x/y))
try:
# Do the upload
t1 = time.time()
mpu.upload_part_from_file(StringIO(data), i+1, cb=progress)
# Print some timings
t2 = time.time() - t1
s = len(data)/1024./1024.
logger.info("Uploaded part %s (%0.2fM) in %0.2fs at %0.2fMBps" % (i+1, s, t2, s/t2))
except Exception, err:
logger.debug("Retry request %d of max %d times" % (current_tries, max_tries))
if (current_tries > max_tries):
logger.error(err)
else:
time.sleep(3)
current_tries += 1
do_part_upload((bucket_name, mpu_id, fname, i, start, size, secure, max_tries, current_tries))
def main(src, dest, num_processes=2, split=50, force=False, reduced_redundancy=False, verbose=False, quiet=False, secure=True, max_tries=5):
# Check that dest is a valid S3 url
split_rs = urlparse.urlsplit(dest)
if split_rs.scheme != "s3":
raise ValueError("'%s' is not an S3 url" % dest)
s3 = boto.connect_s3()
s3.is_secure = secure
bucket = s3.lookup(split_rs.netloc)
if bucket == None:
raise ValueError("'%s' is not a valid bucket" % split_rs.netloc)
key = bucket.get_key(split_rs.path)
# See if we're overwriting an existing key
if key is not None:
if not force:
raise ValueError("'%s' already exists. Specify -f to overwrite it" % dest)
# Determine the splits
part_size = max(5*1024*1024, 1024*1024*split)
src.seek(0,2)
size = src.tell()
num_parts = int(ceil(float(size) / part_size))
logger.debug("Part Size: {0}".format(part_size))
logger.debug("File Size: {0}".format(size))
logger.debug("Num Parts: {0}".format(num_parts))
# If file is less than 5M, just upload it directly
if size < 5*1024*1024:
src.seek(0)
t1 = time.time()
k = boto.s3.key.Key(bucket,split_rs.path)
k.set_contents_from_file(src)
t2 = time.time() - t1
s = size/1024./1024.
logger.info("Finished uploading %0.2fM in %0.2fs (%0.2fMBps)" % (s, t2, s/t2))
return
# Create the multi-part upload object
mpu = bucket.initiate_multipart_upload(split_rs.path, reduced_redundancy=reduced_redundancy)
logger.info("Initialized upload: %s" % mpu.id)
# Generate arguments for invocations of do_part_upload
def gen_args(num_parts, fold_last):
for i in range(num_parts):
part_start = part_size*i
if i == (num_parts-1) and fold_last is True:
yield (bucket.name, mpu.id, src.name, i, part_start, part_size*2, secure, max_tries, 0)
break
else:
yield (bucket.name, mpu.id, src.name, i, part_start, part_size, secure, max_tries, 0)
# If the last part is less than 5M, just fold it into the previous part
fold_last = ((size % part_size) < 5*1024*1024)
# Do the thing
try:
# Create a pool of workers
pool = Pool(processes=num_processes)
t1 = time.time()
pool.map_async(do_part_upload, gen_args(num_parts, fold_last)).get(9999999)
# Print out some timings
t2 = time.time() - t1
s = size/1024./1024.
# Finalize
src.close()
mpu.complete_upload()
logger.info("Finished uploading %0.2fM in %0.2fs (%0.2fMBps)" % (s, t2, s/t2))
except KeyboardInterrupt:
logger.warn("Received KeyboardInterrupt, canceling upload")
pool.terminate()
mpu.cancel_upload()
raise
except Exception, err:
logger.error("Encountered an error, canceling upload")
logger.error(err)
mpu.cancel_upload()
raise
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
args = parser.parse_args()
arg_dict = vars(args)
if arg_dict['quiet'] == True:
logger.setLevel(logging.WARNING)
if arg_dict['verbose'] == True:
logger.setLevel(logging.DEBUG)
logger.debug("CLI args: %s" % args)
main(**arg_dict)
|
jbrehm/s3-multipart
|
s3-mp-upload.py
|
Python
|
apache-2.0
| 6,867
|
from unittest import mock
from django.conf import settings
from django.test import TestCase, override_settings
from daiquiri.jobs.tests.mixins import SyncTestMixin
from daiquiri.query.models import QueryJob, Example
@override_settings(QUERY_ANONYMOUS=True)
@mock.patch(settings.ADAPTER_DATABASE + '.submit_query', mock.Mock())
@mock.patch(settings.ADAPTER_DATABASE + '.fetch_nrows', mock.Mock(return_value=100))
@mock.patch(settings.ADAPTER_DATABASE + '.fetch_size', mock.Mock(return_value=100))
@mock.patch(settings.ADAPTER_DATABASE + '.count_rows', mock.Mock(return_value=100))
@mock.patch(settings.ADAPTER_DATABASE + '.rename_table', mock.Mock())
@mock.patch(settings.ADAPTER_DATABASE + '.drop_table', mock.Mock())
@mock.patch(settings.ADAPTER_DATABASE + '.create_user_schema_if_not_exists', mock.Mock())
class SyncTestCase(SyncTestMixin, TestCase):
databases = ('default', 'data', 'tap', 'oai')
fixtures = (
'auth.json',
'metadata.json',
'jobs.json',
'queryjobs.json',
'examples.json'
)
users = (
('admin', 'admin'),
('user', 'user'),
('evil', 'evil'),
('anonymous', None),
)
url_names = {
'list': 'tap:sync-list'
}
jobs = QueryJob.objects.filter(owner__username='user')
def get_parameter_for_new_jobs(self, username):
return [{
'LANG': example.query_language,
'QUERY': example.query_string
} for example in Example.objects.filter(access_level='PUBLIC')]
def get_parameter_for_new_jobs_internal(self, username):
return [{
'LANG': example.query_language,
'QUERY': example.query_string
} for example in Example.objects.filter(access_level='INTERNAL')]
|
aipescience/django-daiquiri
|
daiquiri/tap/tests/test_sync.py
|
Python
|
apache-2.0
| 1,765
|
from collections import namedtuple
import pytest
import requests
from awx.api.versioning import reverse
@pytest.mark.django_db
class TestHostInsights:
def test_insights_bad_host(self, get, hosts, user, mocker):
mocker.patch.object(requests.Session, 'get')
host = hosts(host_count=1)[0]
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'] == 'This host is not recognized as an Insights host.'
assert response.status_code == 404
def test_insights_host_missing_from_insights(self, get, hosts, insights_credential, user, mocker):
class Response:
status_code = 200
content = "{'results': []}"
def json(self):
return {'results': []}
mocker.patch.object(requests.Session, 'get', return_value=Response())
host = hosts(host_count=1)[0]
host.insights_system_id = '123e4567-e89b-12d3-a456-426655440000'
host.inventory.insights_credential = insights_credential
host.inventory.save()
host.save()
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'] == (
'Could not translate Insights system ID 123e4567-e89b-12d3-a456-426655440000'
' into an Insights platform ID.')
assert response.status_code == 404
def test_insights_no_credential(self, get, hosts, user, mocker):
mocker.patch.object(requests.Session, 'get')
host = hosts(host_count=1)[0]
host.insights_system_id = '123e4567-e89b-12d3-a456-426655440000'
host.save()
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'] == 'The Insights Credential for "test-inv" was not found.'
assert response.status_code == 404
@pytest.mark.parametrize("status_code, exception, error, message", [
(502, requests.exceptions.SSLError, 'SSLError while trying to connect to https://myexample.com/whocares/me/', None,),
(504, requests.exceptions.Timeout, 'Request to https://myexample.com/whocares/me/ timed out.', None,),
(502, requests.exceptions.RequestException, 'booo!', 'Unknown exception booo! while trying to GET https://myexample.com/whocares/me/'),
])
def test_insights_exception(self, get, hosts, insights_credential, user, mocker, status_code, exception, error, message):
mocker.patch.object(requests.Session, 'get', side_effect=exception(error))
host = hosts(host_count=1)[0]
host.insights_system_id = '123e4567-e89b-12d3-a456-426655440000'
host.inventory.insights_credential = insights_credential
host.inventory.save()
host.save()
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'] == message or error
assert response.status_code == status_code
def test_insights_unauthorized(self, get, hosts, insights_credential, user, mocker):
Response = namedtuple('Response', 'status_code content')
mocker.patch.object(requests.Session, 'get', return_value=Response(401, 'mock 401 err msg'))
host = hosts(host_count=1)[0]
host.insights_system_id = '123e4567-e89b-12d3-a456-426655440000'
host.inventory.insights_credential = insights_credential
host.inventory.save()
host.save()
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'] == (
"Unauthorized access. Please check your Insights Credential username and password.")
assert response.status_code == 502
def test_insights_bad_status(self, get, hosts, insights_credential, user, mocker):
Response = namedtuple('Response', 'status_code content')
mocker.patch.object(requests.Session, 'get', return_value=Response(500, 'mock 500 err msg'))
host = hosts(host_count=1)[0]
host.insights_system_id = '123e4567-e89b-12d3-a456-426655440000'
host.inventory.insights_credential = insights_credential
host.inventory.save()
host.save()
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'].startswith("Failed to access the Insights API at URL")
assert "Server responded with 500 status code and message mock 500 err msg" in response.data['error']
assert response.status_code == 502
def test_insights_bad_json(self, get, hosts, insights_credential, user, mocker):
class Response:
status_code = 200
content = 'booo!'
def json(self):
raise ValueError("we do not care what this is")
mocker.patch.object(requests.Session, 'get', return_value=Response())
host = hosts(host_count=1)[0]
host.insights_system_id = '123e4567-e89b-12d3-a456-426655440000'
host.inventory.insights_credential = insights_credential
host.inventory.save()
host.save()
url = reverse('api:host_insights', kwargs={'pk': host.pk})
response = get(url, user('admin', True))
assert response.data['error'].startswith("Expected JSON response from Insights at URL")
assert 'insights_id=123e4567-e89b-12d3-a456-426655440000' in response.data['error']
assert response.data['error'].endswith("but instead got booo!")
assert response.status_code == 502
|
GoogleCloudPlatform/sap-deployment-automation
|
third_party/github.com/ansible/awx/awx/main/tests/functional/api/test_host_insights.py
|
Python
|
apache-2.0
| 5,733
|
#!/usr/bin/python3 -W all
# prepare-data.py: prepare data of file 2012-nl.csv for experiments
# usage: prepare-data.py < 2012-nl.csv
# notes:
# 1. collapses labels 5 and 6 to 5
# 2. moves labels 7-13 to 6-12
# 3. removes duplicate tweets
# 20170929 erikt(at)xs4all.nl
import csv
import sys
ID = 0
LABEL = 9
def convertLabel(label):
if (label <= 5): return(label)
else: return(label-1)
seen = {}
csvreader = csv.reader(sys.stdin,delimiter=',',quotechar='"')
csvwriter = csv.writer(sys.stdout,delimiter=',',quotechar='"')
for row in csvreader:
try: row[LABEL] = str(convertLabel(int(row[LABEL])))
except: sys.exit(COMMAND+": "+row[LABEL]+" is not a number\n")
if not row[ID] in seen: csvwriter.writerow(row)
seen[row[ID]] = True
|
online-behaviour/machine-learning
|
prepare-data.py
|
Python
|
apache-2.0
| 756
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import base
from models import Post, Comment
from google.appengine.api import users, images, memcache
from google.appengine.ext import ndb, blobstore
from google.appengine.ext.webapp import blobstore_handlers
class FeedHandler(base.BaseHandler):
def get(self):
posts = Post.query(Post.published == True).order(-Post.create)
values = {
'posts': posts.fetch(10),
'update': posts.get().create,
}
self.response.headers['Content-Type'] = 'application/xml'
self.generate('bacheca/atom.xml', values)
class HomePage(base.BaseHandler):
def get(self):
cursor = self.request.get('c')
c = ndb.Cursor(urlsafe=cursor)
qry = Post.query(Post.published == True).order(-Post.create)
posts, next_curs, more = qry.fetch_page(10, start_cursor=c)
if more and next_curs:
next_c = next_curs.urlsafe()
else:
next_c = None
values = {'posts': posts, 'c': next_c}
self.generate('bacheca/posts.html', values)
class DraftsPage(base.BaseHandler):
def get(self):
cursor = self.request.get('c')
c = ndb.Cursor(urlsafe=cursor)
qry = Post.query(Post.published == False).order(-Post.create)
posts, next_curs, more = qry.fetch_page(10, start_cursor=c)
if more and next_curs:
next_c = next_curs.urlsafe()
else:
next_c = None
values = {'posts': posts, 'c': next_c}
self.generate('bacheca/posts.html', values)
class PostPage(base.BaseHandler):
def get(self, post):
p = Post.get_by_id(int(post))
if users.is_current_user_admin() or p.published:
self.generate('bacheca/post.html', {'p': p})
else:
self.redirect('/')
class NewPost(base.BaseHandler):
def get(self):
p_key = Post(title='title', content='content').put()
self.redirect('/bacheca/admin/edit/%s' % p_key.id())
class EditPost(base.BaseHandler):
def get(self, post):
p = Post.get_by_id(int(post))
values = {
'p': p,
'upload_url': blobstore.create_upload_url('/bacheca/admin/upload'),
'code': memcache.get('last_img')
}
self.generate('bacheca/edit.html', values)
def post(self, post):
p = Post.get_by_id(int(post))
p.title = self.request.get('title')
p.content = self.request.get('content')
p.put()
self.redirect(self.request.referer)
class AddComment(base.BaseHandler):
def post(self, post):
p = Post.get_by_id(int(post))
Comment(post=p.key,
name=self.request.get('name'),
comment=self.request.get('comment')).put()
self.redirect('/bacheca/post/%s' % p.id)
class PublishPost(base.BaseHandler):
def get(self, post):
p = Post.get_by_id(int(post))
if p.published == False:
import datetime
p.create = datetime.datetime.now()
p.published = True
else:
p.published = False
p.put()
self.redirect('/bacheca/post/%s' % p.id)
class DeletePost(base.BaseHandler):
def get(self, post):
p = Post.get_by_id(int(post))
p.key.delete()
self.redirect('/bacheca/admin/')
class UploadHandler(blobstore_handlers.BlobstoreUploadHandler):
def post(self):
upload_files = self.get_uploads('file')
blob_info = upload_files[0]
img_url = images.get_serving_url(blob_info.key(), size=767, crop=False)
string = '<img src="%s" class="img-polaroid">' % img_url
memcache.set('last_img', string)
self.redirect(self.request.referer)
import webapp2
from webapp2_extras import routes
app = webapp2.WSGIApplication([
routes.RedirectRoute('/bacheca/', HomePage, name='Bacheca', strict_slash=True),
routes.RedirectRoute('/bacheca/admin/', DraftsPage, name='Admin', strict_slash=True),
routes.PathPrefixRoute('/bacheca', [
webapp2.Route('/atom', FeedHandler),
webapp2.Route(r'/post/<post:(.*)>', PostPage),
webapp2.Route(r'/comment/<post:(.*)>', AddComment),
webapp2.Route('/admin/new', NewPost),
webapp2.Route('/admin/upload', UploadHandler),
webapp2.Route(r'/admin/edit/<post:(.*)>', EditPost),
webapp2.Route(r'/admin/publish/<post:(.*)>', PublishPost),
webapp2.Route(r'/admin/delete/<post:(.*)>', DeletePost),
]), ], debug=base.debug)
if __name__ == "__main__":
app.run()
|
presveva/tornei_all_italiana
|
bacheca.py
|
Python
|
apache-2.0
| 4,558
|
import inspect
from typing import Any, Callable, Dict, Optional, Type, Union
import starlette.requests
from fastapi import Depends, FastAPI
from ray._private.utils import import_attr
from ray.ml.checkpoint import Checkpoint
from ray.ml.predictor import Predictor, DataBatchType
from ray.serve.http_util import ASGIHTTPSender
from ray import serve
DEFAULT_INPUT_SCHEMA = "ray.serve.http_adapters.array_to_databatch"
InputSchemaFn = Callable[[Any], DataBatchType]
def _load_input_schema(
input_schema: Optional[Union[str, InputSchemaFn]]
) -> InputSchemaFn:
if input_schema is None:
input_schema = DEFAULT_INPUT_SCHEMA
if isinstance(input_schema, str):
input_schema = import_attr(input_schema)
assert inspect.isfunction(input_schema), "input schema must be a callable function."
return input_schema
def _load_checkpoint(
checkpoint: Union[Checkpoint, Dict],
) -> Checkpoint:
if isinstance(checkpoint, dict):
user_keys = set(checkpoint.keys())
expected_keys = {"checkpoint_cls", "uri"}
if user_keys != expected_keys:
raise ValueError(
"The `checkpoint` dictionary is expects keys "
f"{expected_keys} but got {user_keys}"
)
checkpoint = import_attr(checkpoint["checkpoint_cls"]).from_uri(
checkpoint["uri"]
)
assert isinstance(checkpoint, Checkpoint)
return checkpoint
def _load_predictor_cls(
predictor_cls: Union[str, Type[Predictor]],
) -> Type[Predictor]:
if isinstance(predictor_cls, str):
predictor_cls = import_attr(predictor_cls)
if not issubclass(predictor_cls, Predictor):
raise ValueError(
f"{predictor_cls} class must be a subclass of ray.ml `Predictor`"
)
return predictor_cls
class ModelWrapper:
def __init__(
self,
predictor_cls: Union[str, Type[Predictor]],
checkpoint: Union[Checkpoint, Dict],
input_schema: Optional[Union[str, InputSchemaFn]] = None,
batching_params: Optional[Dict[str, int]] = None,
):
"""Serve any Ray ML predictor from checkpoint.
Args:
predictor_cls(str, Type[Predictor]): The class or path for predictor class.
The type must be a subclass of ray.ml `Predictor`.
checkpoint(Checkpoint, dict): The checkpoint object or a dictionary describe
the object.
- The checkpoint object must be a subclass of ray.ml `Checkpoint`.
- The dictionary should be in the form of
{"checkpoint_cls": "import.path.MyCheckpoint",
"uri": "uri_to_load_from"}.
Serve will then call `MyCheckpoint.from_uri("uri_to_load_from")` to
instantiate the object.
input_schema(str, InputSchemaFn, None): The FastAPI input conversion
function. By default, Serve will use the `NdArray` schema and convert to
numpy array. You can pass in any FastAPI dependency resolver that returns
an array. When you pass in a string, Serve will import it.
Please refer to Serve HTTP adatper documentation to learn more.
batching_params(dict, None): override the default parameters to serve.batch.
"""
predictor_cls = _load_predictor_cls(predictor_cls)
checkpoint = _load_checkpoint(checkpoint)
self.model = predictor_cls.from_checkpoint(checkpoint)
self.app = FastAPI()
input_schema = _load_input_schema(input_schema)
batching_params = batching_params or dict()
@self.app.post("/predict")
@serve.batch(**batching_params)
async def handle_request(inp=Depends(input_schema)):
return self.model.predict(inp)
async def __call__(self, request: starlette.requests.Request):
# NOTE(simon): This is now duplicated from ASGIAppWrapper because we need to
# generate FastAPI on the fly, we should find a way to unify the two.
sender = ASGIHTTPSender()
await self.app(request.scope, receive=request.receive, send=sender)
return sender.build_asgi_response()
|
ray-project/ray
|
python/ray/serve/model_wrappers.py
|
Python
|
apache-2.0
| 4,194
|
from unittest import mock
import cumtest
class TestCLIEdit(cumtest.CumCLITest):
FOLLOW = {'url': ('https://manga.madokami.al/Manga/M/MO/MOLE/'
'Molester%20Man'),
'alias': 'molester-man', 'name': 'Molester Man'}
def setUp(self):
super().setUp()
series = self.create_mock_series(**TestCLIEdit.FOLLOW)
series.follow()
def test_edit_alias(self):
ALIAS_NEW = 'molester'
MESSAGE = 'Changed alias for molester-man to molester'
args = ('edit', TestCLIEdit.FOLLOW['alias'], 'alias', ALIAS_NEW)
result = self.invoke(*args)
self.assertEqual(result.exit_code, 0)
self.assertIn(MESSAGE, result.output)
series = self.db.session.query(self.db.Series).get(1)
self.assertEqual(series.alias, ALIAS_NEW)
def test_edit_alias_illegal_value_none(self):
ALIAS_NEW = 'none'
MESSAGE = 'Illegal value none'
args = ('edit', TestCLIEdit.FOLLOW['alias'], 'alias', ALIAS_NEW)
result = self.invoke(*args)
args = ('edit', TestCLIEdit.FOLLOW['alias'], 'alias', ALIAS_NEW)
result = self.invoke(*args)
self.assertEqual(result.exit_code, 1)
self.assertIn(MESSAGE, result.output)
series = self.db.session.query(self.db.Series).get(1)
self.assertEqual(series.alias, TestCLIEdit.FOLLOW['alias'])
def test_edit_directory(self):
DIR_NEW = 'molesterdir'
MESSAGE = 'Changed directory for molester-man to molesterdir'
args = ('edit', TestCLIEdit.FOLLOW['alias'], 'directory', DIR_NEW)
result = self.invoke(*args)
self.assertEqual(result.exit_code, 0)
self.assertIn(MESSAGE, result.output)
series = self.db.session.query(self.db.Series).get(1)
self.assertEqual(series.directory, DIR_NEW)
def test_edit_invalid_setting(self):
MESSAGE = 'Invalid setting rating'
args = ('edit', TestCLIEdit.FOLLOW['alias'], 'rating', '10/10')
result = self.invoke(*args)
self.assertEqual(result.exit_code, 1)
self.assertIn(MESSAGE, result.output)
|
Hamuko/cum
|
tests/test_cli_edit.py
|
Python
|
apache-2.0
| 2,130
|
import json
from ct.models import *
from django.contrib.auth.models import User
from django.utils import dateparse, timezone
from datetime import datetime
import codecs
def store_errors(q, concept, parentUL, conceptIDdict=None):
'store error models associated with question'
errorModels = []
courseletsMap = q.get('courseletsMapError', ())
if conceptIDdict is None:
conceptIDdict = {}
for i, e in enumerate(q.get('error', ())):
em = emUL = saveMapping = None
try:
mapID = courseletsMap[i]
except IndexError:
pass
else:
ulID = conceptIDdict.get(mapID, None)
if ulID is None:
saveMapping = True
print 'WARNING: %s not found in conceptIDdict; treating as new error model' % mapID
else:
if isinstance(ulID, int): # just add existing EM to this question
ul = UnitLesson.objects.get(pk=ulID)
emUL = UnitLesson.create_from_lesson(ul.lesson, unit,
parent=parentUL)
else: # add new EMLesson to existing EM
try:
if not ulID.startswith('fork:'):
raise ValueError
ul = UnitLesson.objects.get(pk=int(ulID[5:]))
except ValueError:
raise ValueError('bad conceptIDdict ID value %s: should be int or "fork:INT"'
% ulID)
em = ul.lesson.concept # link to existing error model
if not ul.lesson.concept or not ul.lesson.concept.isError:
raise ValueError('%s: not a valid error model'
% ul.lesson.title)
if not emUL: # create new error model lesson
emLesson = Lesson(title='(rename this)', addedBy=parentUL.addedBy,
text=e)
emUL = emLesson.save_as_error_model(concept, parentUL, em)
if saveMapping: # allow other questions to fork this EM
conceptIDdict[mapID] = 'fork:%d' % emUL.pk
errorModels.append(emUL)
return errorModels
def get_or_create_user(username, email='unknown'):
'get user object with specified username, create it if necessary'
try:
u = User.objects.get(username=username)
except User.DoesNotExist:
u = User.objects.create_user(username, email, None,
first_name='Student', last_name=username)
u.save()
return u
def store_response_errors(r, errorModels, response, genericErrors,
genericIndex):
'store all student errors associated with a response'
for se in r['errors']:
error_id = se['error_id']
if isinstance(error_id, int):
emUL = errorModels[error_id]
else: # look up generic error model
i = genericIndex[error_id]
emUL = genericErrors[i]
studentError = StudentError(response=response, atime=response.atime,
errorModel=emUL, author=response.author)
studentError.save()
def store_response(r, course, parentUL, errorModels, genericErrors,
genericIndex, tzinfo=timezone.get_default_timezone()):
'load response w/ username, confidence, selfeval, errors'
user = get_or_create_user(r['username'])
confidence = Response.CONF_CHOICES[r['confidence']][0]
atime = dateparse.parse_datetime(r['submit_time'])
atime = timezone.make_aware(atime, tzinfo)
response = Response(unitLesson=parentUL, lesson=parentUL.lesson,
text=r['answer'], course=course, author=user,
confidence=confidence, atime=atime)
if 'selfeval' in r:
response.selfeval = r['selfeval']
response.save()
store_response_errors(r, errorModels, response, genericErrors,
genericIndex)
return response
def add_concept_resource(conceptID, unit, conceptIDdict=()):
'get concept by courseletsConcept:ID or wikipedia ID, add to unit'
if conceptID.startswith('courseletsConcept:'):
ulID = int(conceptID[18:])
elif conceptID in conceptIDdict:
ulID = conceptIDdict[conceptID]
else:
ulID = None
if ulID is not None:
ul = UnitLesson.objects.get(pk=ulID)
lesson = ul.lesson
concept = lesson.concept
if not concept:
raise ValueError('%s does not link to a concept!' % conceptID)
else:
concept, lesson = Concept.get_from_sourceDB(conceptID, unit.addedBy)
if unit.unitlesson_set.filter(lesson__concept=concept).count() <= 0:
UnitLesson.create_from_lesson(lesson, unit) # attach as unit resource
return concept
def store_new_question(q, unit, concept,
tzinfo=timezone.get_default_timezone(),
kind=Lesson.ORCT_QUESTION):
'create new lessons for question, answer, and add to this unit'
lesson = Lesson(title=q['title'], text=q['text'], addedBy=unit.addedBy,
kind=kind)
if 'date_added' in q:
d = dateparse.parse_date(q['date_added'])
atime = datetime(d.year, d.month, d.day)
lesson.atime = timezone.make_aware(atime, tzinfo)
lesson.save_root(concept)
unitLesson = UnitLesson.create_from_lesson(lesson, unit, order='APPEND',
addAnswer=True)
answer = unitLesson._answer.lesson # get auto-created record
answer.title = q['title'] + ' Answer' # update answer text
answer.text = q['answer']
answer.save()
return unitLesson
def store_question(q, course, unit, genericErrors, genericIndex,
conceptIDdict=(), **kwargs):
'store question linked to concept, error models, answer, responses'
conceptID = q['tests'][0] # link to first concept
concept = add_concept_resource(conceptID, unit, conceptIDdict)
unitLesson = store_new_question(q, unit, concept, **kwargs)
errorModels = store_errors(q, concept, unitLesson, conceptIDdict)
for r in q.get('responses', ()):
store_response(r, course, unitLesson, errorModels, genericErrors,
genericIndex)
print 'saved %s: %d error models, %d responses' \
% (unitLesson.lesson.title, len(errorModels),
len(q.get('responses', ())))
return unitLesson
def index_generic_errors(unit):
'extract generic error models and construct phrase index'
genericErrors = unit.get_aborts()
l = []
for i, ul in enumerate(genericErrors):
l.append((i, ul.lesson.title))
genericIndex = PhraseIndex(l)
return genericErrors, genericIndex
def load_orct_data(infile='orctmerge.json', course=None, unit=None,
courseID=None, unitID=None, conceptIDfile=None):
'load ORCT questions, responses etc into this unit'
if course is None:
course = Course.objects.get(pk=courseID)
if unit is None:
unit = Unit.objects.get(pk=unitID)
genericErrors, genericIndex = index_generic_errors(unit)
orctData = load_json(infile)
if conceptIDfile:
conceptIDdict = load_json(conceptIDfile)
else:
conceptIDdict = {}
for q in orctData:
if q.get('kind', 'SKIP') == 'question':
store_question(q, course, unit, genericErrors, genericIndex,
conceptIDdict)
def load_json(infile):
with codecs.open(infile, 'r', encoding='utf-8') as ifile:
data = json.load(ifile)
return data
class PhraseIndex(object):
def __init__(self, t, nword=2):
'construct phrase index for list of entries of the form [(id, text),]'
self.nword = nword
d = {}
self.sizes = {}
for i, text in t:
n, l = self.get_phrases(text)
self.sizes[i] = n # save numbers of phrases
for j in range(n): # index all phrases in this text
phrase = tuple(l[j:j + nword])
try:
d[phrase].append(i)
except KeyError:
d[phrase] = [i]
self.d = d
def get_phrases(self, text):
'split into words, handling case < nword gracefully'
l = text.split()
if len(l) > self.nword:
return len(l) - self.nword + 1, l
else: # handle short phrases gracefully to allow matching
return 1, l
def __getitem__(self, text):
'find entry with highest phrase match fraction'
n, l = self.get_phrases(text)
counts = {}
for j in range(n):
phrase = tuple(l[j:j + self.nword])
for i in self.d.get(phrase, ()):
counts[i] = counts.get(i, 0) + 1
if not counts:
raise KeyError
l = []
for i, c in counts.items(): # compute match fractions
l.append((c / float(self.sizes[i]), i))
l.sort()
return l[-1][1] # return id with highest match fraction
|
derdmitry/socraticqs2
|
mysite/ct/load_json.py
|
Python
|
apache-2.0
| 9,131
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import re
from ._common import *
class Customize(SubCommand):
def __init__(self, *args, **kwargs):
super(Customize, self).__init__(*args, **kwargs)
@classmethod
def addParser(self, cmdLineParser, subparsers):
parser = cmdLineParser.getSubParser(
"customize", subparsers,
help="Customize VMs")
parser.add_argument(
"name",
metavar="name",
help="VM to clone from")
parser.add_argument(
"--csm", type=str,
metavar="customization",
help="Path to customication file")
parser.add_argument(
"--nic-add", nargs="+",
default=[], type=cmdLineParser.nicAddType,
metavar="<nic-add>",
dest="nicAdd",
help="Customize network interfaces.\n" \
"[mac=,ip=x.x.x.x/mask,gw=]")
parser.set_defaults(cloneArgs=["name", "csm", "nicAdd"])
@export
def customize(self, name=None, csm=None, nicAdd=[]):
self._checkType(name, (str, vim.VirtualMachine))
self._checkType(csm, (str, type(None)))
self._checkType(nicAdd, list)
for nc in nicAdd:
assert "map" in nc
assert "ip" in nc
assert "mask" in nc
assert "gw" in nc
self._checkType(nc["map"], str)
self._checkType(nc["ip"], str)
self._checkType(nc["mask"], int)
self._checkType(nc["gw"], str)
regexps = [re.compile("^{}$".format(re.escape(name)))]
vm = self.getRegisteredVms(regexps=regexps)[0]
customSpec = vim.vm.customization.Specification()
if csm:
customSpec = self.getCSMByName(csm)
for nicDesc in nicAdd:
mac = nicDesc["map"]
ip = nicDesc["ip"]
mask = nicDesc["mask"]
gw = nicDesc["gw"]
adapterMap = vim.vm.customization.AdapterMapping()
if mac:
adapterMap.macAddress = mac
if not ip:
adapterMap.ip = vim.vm.customization.DhcpIpGenerator()
else:
adapterMap.ip = vim.vm.customization.FixedIp(ipAddress=ip)
if mask:
adapterMap.subnetMask = mask
if gw:
adapterMap.gateway = gw
# adapterMap.dnsDomain
csm.nicSettingsMap.append(adapterMap)
# hostname/domain
# ident = vim.vm.customization.LinuxPrep(hostName=hostname, domain=domain)
# customSpec.identity = ident
# windows ???
# dnsServerList, dnsSuffixList
# globalIPSettings = vim.vm.customization.GlobalIPSettings(dnsServerList=dnsServerList,
# dnsSuffixList=dnsSuffixList)
# customSpec.globalIPSettings = globalIPSettings
task = vm.CustomizeVM(customSpec)
vcTask = VcTask(task)
vcTask.waitTaskDone()
if vcTask.isSuccess():
self.logger.info("Success")
else:
msg = vcTask.error()
self.logger.error("Failed {}".format(repr(msg)))
raise RuntimeError("CMS failed")
return 0
|
dahuebi/vsmomi
|
vsmomi/commands/customize.py
|
Python
|
apache-2.0
| 3,426
|
#! /usr/bin/env python
# Copyright 2016 WebAssembly Community Group participants
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
A bunch of hackish fixups for testing of SpiderMonkey support. We should
get rid of these ASAP.
This is meant to be run using BINARYEN_SCRIPTS in emcc, and not standalone.
'''
import os
import shutil
import subprocess
import sys
import emscripten
binaryen_root = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
js_target = sys.argv[1]
wast_target = sys.argv[2]
wasm_target = wast_target[:-5] + '.wasm'
base_wast_target = os.path.basename(wast_target)
base_wasm_target = os.path.basename(wasm_target)
def fix(js, before, after):
assert js.count(before) == 1
return js.replace(before, after)
# fix up js
js = open(js_target).read()
# use the wasm, not wast
js = js.replace('"' + base_wast_target + '"', '"' + base_wasm_target + '"')
js = js.replace("'" + base_wast_target + "'", "'" + base_wasm_target + "'")
open(js_target, 'w').write(js)
shutil.copyfile(wast_target + '.mappedGlobals', wasm_target + '.mappedGlobals')
# convert to binary using spidermonkey
'''
using something like
mozjs -e 'os.file.writeTypedArrayToFile("moz.wasm",
new Uint8Array(wasmTextToBinary(os.file.readFile("test/hello_world.wast"))))'
investigate with
>>> map(chr, map(ord, open('moz.wasm').read()))
or
python -c "print str(map(chr,map(ord,
open('a.out.wasm').read()))).replace(',', '\n')"
'''
subprocess.check_call(
emscripten.shared.SPIDERMONKEY_ENGINE +
['-e', 'os.file.writeTypedArrayToFile("' + wasm_target +
'", new Uint8Array(wasmTextToBinary(os.file.readFile("' +
wast_target + '"))))'])
|
ddcc/binaryen
|
scripts/spidermonkify.py
|
Python
|
apache-2.0
| 2,172
|
#for small Solr Cores, use Luke
#for large Solr Cores, use rows=0to50000
from __future__ import print_function
from pprint import pprint
import json, requests, sys, os, argparse
def lukeHandler(solrURL, outF):
lukeURL = "http://" + os.environ["SOLR_SIM_USER"]+":"+os.environ["SOLR_SIM_PASS"]+ "@" + solrURL.split("://")[-1].rstrip('/') + "/admin/luke?fl=id&numTerms=7120000&wt=json"
#print(lukeURL)
try:
lukeResponse = requests.get(lukeURL, verify=False).json()
topTerms = lukeResponse["fields"]["id"]["topTerms"]
i=0
while i < len(topTerms):
print(topTerms[i], file=outF)
i+=2
except Exception as e:
print(e)
def solrHandler(solrURL, outF, startRow):
solrURL = "http://{0}:{1}@{2}/select?q=*:*&fl=id&start={3}&rows=500000&wt=json".format(os.environ["SOLR_SIM_USER"], os.environ["SOLR_SIM_PASS"], solrURL.split("://")[-1].rstrip('/'), startRow)
#print(solrURL)
solrResponse = requests.get(solrURL, verify=False).json()
if solrResponse['responseHeader']['status'] == 0:
for document in solrResponse['response']['docs']:
print(document["id"], file=outF)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="fetch all document ids efficiently from Solr index")
parser.add_argument('--solrURL', required=True, help="Solr Core URL")
#parser.add_argument('--outFile', required=True, help="text file containing doc IDs") and args.outFile
parser.add_argument('--startRow', required=True, help="start index, 0, 500000, 1000000, etc")
args = parser.parse_args()
if args.solrURL and args.startRow: # http://imagecat.dyndns.org/solr/dhsnewimagecatdev
outStr = "doc_ids_{0}.txt".format(args.startRow)
with open(outStr, 'w') as outF:
solrHandler(args.solrURL, outF, args.startRow)
|
harsham05/new590DR
|
src/fetch_all_solr_doc_ids.py
|
Python
|
apache-2.0
| 1,941
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ctypes
import os
import subprocess
import time
import signal as signal_module
from robot.utils import (ConnectionCache, abspath, cmdline2list,
encode_to_system, decode_output, is_list_like,
is_truthy, secs_to_timestr, timestr_to_secs,
IRONPYTHON, JYTHON)
from robot.version import get_version
from robot.api import logger
class Process(object):
"""Robot Framework test library for running processes.
This library utilizes Python's
[http://docs.python.org/2/library/subprocess.html|subprocess]
module and its
[http://docs.python.org/2/library/subprocess.html#subprocess.Popen|Popen]
class.
The library has following main usages:
- Running processes in system and waiting for their completion using
`Run Process` keyword.
- Starting processes on background using `Start Process`.
- Waiting started process to complete using `Wait For Process` or
stopping them with `Terminate Process` or `Terminate All Processes`.
This library is new in Robot Framework 2.8.
== Table of contents ==
- `Specifying command and arguments`
- `Process configuration`
- `Active process`
- `Result object`
- `Boolean arguments`
- `Example`
- `Shortcuts`
- `Keywords`
= Specifying command and arguments =
Both `Run Process` and `Start Process` accept the command to execute and
all arguments passed to the command as separate arguments. This makes usage
convenient and also allows these keywords to automatically escape possible
spaces and other special characters in commands and arguments. Notice that
if a command accepts options that themselves accept values, these options
and their values must be given as separate arguments.
When `running processes in shell`, it is also possible to give the whole
command to execute as a single string. The command can then contain
multiple commands to be run together. When using this approach, the caller
is responsible on escaping.
Examples:
| `Run Process` | ${tools}${/}prog.py | argument | second arg with spaces |
| `Run Process` | java | -jar | ${jars}${/}example.jar | --option | value |
| `Run Process` | prog.py "one arg" && tool.sh | shell=yes | cwd=${tools} |
Starting from Robot Framework 2.8.6, possible non-string arguments are
converted to strings automatically.
= Process configuration =
`Run Process` and `Start Process` keywords can be configured using
optional ``**configuration`` keyword arguments. Configuration arguments
must be given after other arguments passed to these keywords and must
use syntax like ``name=value``. Available configuration arguments are
listed below and discussed further in sections afterwards.
| = Name = | = Explanation = |
| shell | Specifies whether to run the command in shell or not. |
| cwd | Specifies the working directory. |
| env | Specifies environment variables given to the process. |
| env:<name> | Overrides the named environment variable(s) only. |
| stdout | Path of a file where to write standard output. |
| stderr | Path of a file where to write standard error. |
| alias | Alias given to the process. |
Note that because ``**configuration`` is passed using ``name=value`` syntax,
possible equal signs in other arguments passed to `Run Process` and
`Start Process` must be escaped with a backslash like ``name\\=value``.
See `Run Process` for an example.
== Running processes in shell ==
The ``shell`` argument specifies whether to run the process in a shell or
not. By default shell is not used, which means that shell specific commands,
like ``copy`` and ``dir`` on Windows, are not available. You can, however,
run shell scripts and batch files without using a shell.
Giving the ``shell`` argument any non-false value, such as ``shell=True``,
changes the program to be executed in a shell. It allows using the shell
capabilities, but can also make the process invocation operating system
dependent. Having a shell between the actually started process and this
library can also interfere communication with the process such as stopping
it and reading its outputs. Because of these problems, it is recommended
to use the shell only when absolutely necessary.
When using a shell it is possible to give the whole command to execute
as a single string. See `Specifying command and arguments` section for
examples and more details in general.
== Current working directory ==
By default the child process will be executed in the same directory
as the parent process, the process running tests, is executed. This
can be changed by giving an alternative location using the ``cwd`` argument.
Forward slashes in the given path are automatically converted to
backslashes on Windows.
`Standard output and error streams`, when redirected to files,
are also relative to the current working directory possibly set using
the ``cwd`` argument.
Example:
| `Run Process` | prog.exe | cwd=${ROOT}/directory | stdout=stdout.txt |
== Environment variables ==
By default the child process will get a copy of the parent process's
environment variables. The ``env`` argument can be used to give the
child a custom environment as a Python dictionary. If there is a need
to specify only certain environment variable, it is possible to use the
``env:<name>=<value>`` format to set or override only that named variables.
It is also possible to use these two approaches together.
Examples:
| `Run Process` | program | env=${environ} |
| `Run Process` | program | env:http_proxy=10.144.1.10:8080 | env:PATH=%{PATH}${:}${PROGDIR} |
| `Run Process` | program | env=${environ} | env:EXTRA=value |
== Standard output and error streams ==
By default processes are run so that their standard output and standard
error streams are kept in the memory. This works fine normally,
but if there is a lot of output, the output buffers may get full and
the program can hang. Additionally on Jython, everything written to
these in-memory buffers can be lost if the process is terminated.
To avoid the above mentioned problems, it is possible to use ``stdout``
and ``stderr`` arguments to specify files on the file system where to
redirect the outputs. This can also be useful if other processes or
other keywords need to read or manipulate the outputs somehow.
Given ``stdout`` and ``stderr`` paths are relative to the `current working
directory`. Forward slashes in the given paths are automatically converted
to backslashes on Windows.
As a special feature, it is possible to redirect the standard error to
the standard output by using ``stderr=STDOUT``.
Regardless are outputs redirected to files or not, they are accessible
through the `result object` returned when the process ends.
Examples:
| ${result} = | `Run Process` | program | stdout=${TEMPDIR}/stdout.txt | stderr=${TEMPDIR}/stderr.txt |
| `Log Many` | stdout: ${result.stdout} | stderr: ${result.stderr} |
| ${result} = | `Run Process` | program | stderr=STDOUT |
| `Log` | all output: ${result.stdout} |
Note that the created output files are not automatically removed after
the test run. The user is responsible to remove them if needed.
== Alias ==
A custom name given to the process that can be used when selecting the
`active process`.
Examples:
| `Start Process` | program | alias=example |
| `Run Process` | python | -c | print 'hello' | alias=hello |
= Active process =
The test library keeps record which of the started processes is currently
active. By default it is latest process started with `Start Process`,
but `Switch Process` can be used to select a different one. Using
`Run Process` does not affect the active process.
The keywords that operate on started processes will use the active process
by default, but it is possible to explicitly select a different process
using the ``handle`` argument. The handle can be the identifier returned by
`Start Process` or an ``alias`` explicitly given to `Start Process` or
`Run Process`.
= Result object =
`Run Process`, `Wait For Process` and `Terminate Process` keywords return a
result object that contains information about the process execution as its
attributes. The same result object, or some of its attributes, can also
be get using `Get Process Result` keyword. Attributes available in the
object are documented in the table below.
| = Attribute = | = Explanation = |
| rc | Return code of the process as an integer. |
| stdout | Contents of the standard output stream. |
| stderr | Contents of the standard error stream. |
| stdout_path | Path where stdout was redirected or ``None`` if not redirected. |
| stderr_path | Path where stderr was redirected or ``None`` if not redirected. |
Example:
| ${result} = | `Run Process` | program |
| `Should Be Equal As Integers` | ${result.rc} | 0 |
| `Should Match` | ${result.stdout} | Some t?xt* |
| `Should Be Empty` | ${result.stderr} | |
| ${stdout} = | `Get File` | ${result.stdout_path} |
| `Should Be Equal` | ${stdout} | ${result.stdout} |
| `File Should Be Empty` | ${result.stderr_path} | |
= Boolean arguments =
Some keywords accept arguments that are handled as Boolean values true or
false. If such an argument is given as a string, it is considered false if
it is either empty or case-insensitively equal to ``false`` or ``no``.
Other strings are considered true regardless their value, and other
argument types are tested using same
[http://docs.python.org/2/library/stdtypes.html#truth-value-testing|rules
as in Python].
True examples:
| `Terminate Process` | kill=True | # Strings are generally true. |
| `Terminate Process` | kill=yes | # Same as the above. |
| `Terminate Process` | kill=${TRUE} | # Python ``True`` is true. |
| `Terminate Process` | kill=${42} | # Numbers other than 0 are true. |
False examples:
| `Terminate Process` | kill=False | # String ``false`` is false. |
| `Terminate Process` | kill=no | # Also string ``no`` is false. |
| `Terminate Process` | kill=${EMPTY} | # Empty string is false. |
| `Terminate Process` | kill=${FALSE} | # Python ``False`` is false. |
Note that prior to Robot Framework 2.8 all non-empty strings, including
``false``, were considered true. Additionally, ``no`` is considered false
only in Robot Framework 2.9 and newer.
= Example =
| ***** Settings *****
| Library Process
| Suite Teardown `Terminate All Processes` kill=True
|
| ***** Test Cases *****
| Example
| `Start Process` program arg1 arg2 alias=First
| ${handle} = `Start Process` command.sh arg | command2.sh shell=True cwd=/path
| ${result} = `Run Process` ${CURDIR}/script.py
| `Should Not Contain` ${result.stdout} FAIL
| `Terminate Process` ${handle}
| ${result} = `Wait For Process` First
| `Should Be Equal As Integers` ${result.rc} 0
"""
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
ROBOT_LIBRARY_VERSION = get_version()
TERMINATE_TIMEOUT = 30
KILL_TIMEOUT = 10
def __init__(self):
self._processes = ConnectionCache('No active process.')
self._results = {}
def run_process(self, command, *arguments, **configuration):
"""Runs a process and waits for it to complete.
``command`` and ``*arguments`` specify the command to execute and
arguments passed to it. See `Specifying command and arguments` for
more details.
``**configuration`` contains additional configuration related to
starting processes and waiting for them to finish. See `Process
configuration` for more details about configuration related to starting
processes. Configuration related to waiting for processes consists of
``timeout`` and ``on_timeout`` arguments that have same semantics as
with `Wait For Process` keyword. By default there is no timeout, and
if timeout is defined the default action on timeout is ``terminate``.
Returns a `result object` containing information about the execution.
Note that possible equal signs in ``*arguments`` must be escaped
with a backslash (e.g. ``name\\=value``) to avoid them to be passed in
as ``**configuration``.
Examples:
| ${result} = | Run Process | python | -c | print 'Hello, world!' |
| Should Be Equal | ${result.stdout} | Hello, world! |
| ${result} = | Run Process | ${command} | stderr=STDOUT | timeout=10s |
| ${result} = | Run Process | ${command} | timeout=1min | on_timeout=continue |
| ${result} = | Run Process | java -Dname\\=value Example | shell=True | cwd=${EXAMPLE} |
This keyword does not change the `active process`.
``timeout`` and ``on_timeout`` arguments are new in Robot Framework
2.8.4.
"""
current = self._processes.current
timeout = configuration.pop('timeout', None)
on_timeout = configuration.pop('on_timeout', 'terminate')
try:
handle = self.start_process(command, *arguments, **configuration)
return self.wait_for_process(handle, timeout, on_timeout)
finally:
self._processes.current = current
def start_process(self, command, *arguments, **configuration):
"""Starts a new process on background.
See `Specifying command and arguments` and `Process configuration`
for more information about the arguments, and `Run Process` keyword
for related examples.
Makes the started process new `active process`. Returns an identifier
that can be used as a handle to active the started process if needed.
Starting from Robot Framework 2.8.5, processes are started so that
they create a new process group. This allows sending signals to and
terminating also possible child processes. This is not supported by
Jython in general nor by Python versions prior to 2.7 on Windows.
"""
config = ProcessConfig(**configuration)
command = self._get_command(command, arguments, config.shell)
self._log_start(command, config)
process = subprocess.Popen(command, **config.full_config)
self._results[process] = ExecutionResult(process,
config.stdout_stream,
config.stderr_stream)
return self._processes.register(process, alias=config.alias)
def _get_command(self, command, args, use_shell):
command = [encode_to_system(item) for item in [command] + list(args)]
if not use_shell:
return command
if args:
return subprocess.list2cmdline(command)
return command[0]
def _log_start(self, command, config):
if is_list_like(command):
command = self.join_command_line(command)
logger.info('Starting process:\n%s' % command)
logger.debug('Process configuration:\n%s' % config)
def is_process_running(self, handle=None):
"""Checks is the process running or not.
If ``handle`` is not given, uses the current `active process`.
Returns ``True`` if the process is still running and ``False`` otherwise.
"""
return self._processes[handle].poll() is None
def process_should_be_running(self, handle=None,
error_message='Process is not running.'):
"""Verifies that the process is running.
If ``handle`` is not given, uses the current `active process`.
Fails if the process has stopped.
"""
if not self.is_process_running(handle):
raise AssertionError(error_message)
def process_should_be_stopped(self, handle=None,
error_message='Process is running.'):
"""Verifies that the process is not running.
If ``handle`` is not given, uses the current `active process`.
Fails if the process is still running.
"""
if self.is_process_running(handle):
raise AssertionError(error_message)
def wait_for_process(self, handle=None, timeout=None, on_timeout='continue'):
"""Waits for the process to complete or to reach the given timeout.
The process to wait for must have been started earlier with
`Start Process`. If ``handle`` is not given, uses the current
`active process`.
``timeout`` defines the maximum time to wait for the process. It can be
given in
[http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#time-format|
various time formats] supported by Robot Framework, for example, ``42``,
``42 s``, or ``1 minute 30 seconds``.
``on_timeout`` defines what to do if the timeout occurs. Possible values
and corresponding actions are explained in the table below. Notice
that reaching the timeout never fails the test.
| = Value = | = Action = |
| continue | The process is left running (default). |
| terminate | The process is gracefully terminated. |
| kill | The process is forcefully stopped. |
See `Terminate Process` keyword for more details how processes are
terminated and killed.
If the process ends before the timeout or it is terminated or killed,
this keyword returns a `result object` containing information about
the execution. If the process is left running, Python ``None`` is
returned instead.
Examples:
| # Process ends cleanly | | |
| ${result} = | Wait For Process | example |
| Process Should Be Stopped | example | |
| Should Be Equal As Integers | ${result.rc} | 0 |
| # Process does not end | | |
| ${result} = | Wait For Process | timeout=42 secs |
| Process Should Be Running | | |
| Should Be Equal | ${result} | ${NONE} |
| # Kill non-ending process | | |
| ${result} = | Wait For Process | timeout=1min 30s | on_timeout=kill |
| Process Should Be Stopped | | |
| Should Be Equal As Integers | ${result.rc} | -9 |
``timeout`` and ``on_timeout`` are new in Robot Framework 2.8.2.
"""
process = self._processes[handle]
logger.info('Waiting for process to complete.')
if timeout:
timeout = timestr_to_secs(timeout)
if not self._process_is_stopped(process, timeout):
logger.info('Process did not complete in %s.'
% secs_to_timestr(timeout))
return self._manage_process_timeout(handle, on_timeout.lower())
return self._wait(process)
def _manage_process_timeout(self, handle, on_timeout):
if on_timeout == 'terminate':
return self.terminate_process(handle)
elif on_timeout == 'kill':
return self.terminate_process(handle, kill=True)
else:
logger.info('Leaving process intact.')
return None
def _wait(self, process):
result = self._results[process]
result.rc = process.wait() or 0
result.close_streams()
logger.info('Process completed.')
return result
def terminate_process(self, handle=None, kill=False):
"""Stops the process gracefully or forcefully.
If ``handle`` is not given, uses the current `active process`.
By default first tries to stop the process gracefully. If the process
does not stop in 30 seconds, or ``kill`` argument is given a true value,
(see `Boolean arguments`) kills the process forcefully. Stops also all
the child processes of the originally started process.
Waits for the process to stop after terminating it. Returns a `result
object` containing information about the execution similarly as `Wait
For Process`.
On Unix-like machines graceful termination is done using ``TERM (15)``
signal and killing using ``KILL (9)``. Use `Send Signal To Process`
instead if you just want to send either of these signals without
waiting for the process to stop.
On Windows graceful termination is done using ``CTRL_BREAK_EVENT``
event and killing using Win32 API function ``TerminateProcess()``.
Examples:
| ${result} = | Terminate Process | |
| Should Be Equal As Integers | ${result.rc} | -15 | # On Unixes |
| Terminate Process | myproc | kill=true |
Limitations:
- Graceful termination is not supported on Windows by Jython nor by
Python versions prior to 2.7. Process is killed instead.
- Stopping the whole process group is not supported by Jython at all
nor by Python versions prior to 2.7 on Windows.
- On Windows forceful kill only stops the main process, not possible
child processes.
Automatically killing the process if termination fails as well as
returning a result object are new features in Robot Framework 2.8.2.
Terminating also possible child processes, including using
``CTRL_BREAK_EVENT`` on Windows, is new in Robot Framework 2.8.5.
"""
process = self._processes[handle]
if not hasattr(process, 'terminate'):
raise RuntimeError('Terminating processes is not supported '
'by this Python version.')
terminator = self._kill if is_truthy(kill) else self._terminate
try:
terminator(process)
except OSError:
if not self._process_is_stopped(process, self.KILL_TIMEOUT):
raise
logger.debug('Ignored OSError because process was stopped.')
return self._wait(process)
def _kill(self, process):
logger.info('Forcefully killing process.')
if hasattr(os, 'killpg'):
os.killpg(process.pid, signal_module.SIGKILL)
else:
process.kill()
if not self._process_is_stopped(process, self.KILL_TIMEOUT):
raise RuntimeError('Failed to kill process.')
def _terminate(self, process):
logger.info('Gracefully terminating process.')
# Sends signal to the whole process group both on POSIX and on Windows
# if supported by the interpreter.
if hasattr(os, 'killpg'):
os.killpg(process.pid, signal_module.SIGTERM)
elif hasattr(signal_module, 'CTRL_BREAK_EVENT'):
if IRONPYTHON:
# https://ironpython.codeplex.com/workitem/35020
ctypes.windll.kernel32.GenerateConsoleCtrlEvent(
signal_module.CTRL_BREAK_EVENT, process.pid)
else:
process.send_signal(signal_module.CTRL_BREAK_EVENT)
else:
process.terminate()
if not self._process_is_stopped(process, self.TERMINATE_TIMEOUT):
logger.info('Graceful termination failed.')
self._kill(process)
def terminate_all_processes(self, kill=False):
"""Terminates all still running processes started by this library.
This keyword can be used in suite teardown or elsewhere to make
sure that all processes are stopped,
By default tries to terminate processes gracefully, but can be
configured to forcefully kill them immediately. See `Terminate Process`
that this keyword uses internally for more details.
"""
for handle in range(1, len(self._processes) + 1):
if self.is_process_running(handle):
self.terminate_process(handle, kill=kill)
self.__init__()
def send_signal_to_process(self, signal, handle=None, group=False):
"""Sends the given ``signal`` to the specified process.
If ``handle`` is not given, uses the current `active process`.
Signal can be specified either as an integer as a signal name. In the
latter case it is possible to give the name both with or without ``SIG``
prefix, but names are case-sensitive. For example, all the examples
below send signal ``INT (2)``:
| Send Signal To Process | 2 | | # Send to active process |
| Send Signal To Process | INT | | |
| Send Signal To Process | SIGINT | myproc | # Send to named process |
This keyword is only supported on Unix-like machines, not on Windows.
What signals are supported depends on the system. For a list of
existing signals on your system, see the Unix man pages related to
signal handling (typically ``man signal`` or ``man 7 signal``).
By default sends the signal only to the parent process, not to possible
child processes started by it. Notice that when `running processes in
shell`, the shell is the parent process and it depends on the system
does the shell propagate the signal to the actual started process.
To send the signal to the whole process group, ``group`` argument can
be set to any true value (see `Boolean arguments`). This is not
supported by Jython, however.
New in Robot Framework 2.8.2. Support for ``group`` argument is new
in Robot Framework 2.8.5.
"""
if os.sep == '\\':
raise RuntimeError('This keyword does not work on Windows.')
process = self._processes[handle]
signum = self._get_signal_number(signal)
logger.info('Sending signal %s (%d).' % (signal, signum))
if is_truthy(group) and hasattr(os, 'killpg'):
os.killpg(process.pid, signum)
elif hasattr(process, 'send_signal'):
process.send_signal(signum)
else:
raise RuntimeError('Sending signals is not supported '
'by this Python version.')
def _get_signal_number(self, int_or_name):
try:
return int(int_or_name)
except ValueError:
return self._convert_signal_name_to_number(int_or_name)
def _convert_signal_name_to_number(self, name):
try:
return getattr(signal_module,
name if name.startswith('SIG') else 'SIG' + name)
except AttributeError:
raise RuntimeError("Unsupported signal '%s'." % name)
def get_process_id(self, handle=None):
"""Returns the process ID (pid) of the process as an integer.
If ``handle`` is not given, uses the current `active process`.
Notice that the pid is not the same as the handle returned by
`Start Process` that is used internally by this library.
"""
return self._processes[handle].pid
def get_process_object(self, handle=None):
"""Return the underlying ``subprocess.Popen`` object.
If ``handle`` is not given, uses the current `active process`.
"""
return self._processes[handle]
def get_process_result(self, handle=None, rc=False, stdout=False,
stderr=False, stdout_path=False, stderr_path=False):
"""Returns the specified `result object` or some of its attributes.
The given ``handle`` specifies the process whose results should be
returned. If no ``handle`` is given, results of the current `active
process` are returned. In either case, the process must have been
finishes before this keyword can be used. In practice this means
that processes started with `Start Process` must be finished either
with `Wait For Process` or `Terminate Process` before using this
keyword.
If no other arguments than the optional ``handle`` are given, a whole
`result object` is returned. If one or more of the other arguments
are given any true value, only the specified attributes of the
`result object` are returned. These attributes are always returned
in the same order as arguments are specified in the keyword signature.
See `Boolean arguments` section for more details about true and false
values.
Examples:
| Run Process | python | -c | print 'Hello, world!' | alias=myproc |
| # Get result object | | |
| ${result} = | Get Process Result | myproc |
| Should Be Equal | ${result.rc} | ${0} |
| Should Be Equal | ${result.stdout} | Hello, world! |
| Should Be Empty | ${result.stderr} | |
| # Get one attribute | | |
| ${stdout} = | Get Process Result | myproc | stdout=true |
| Should Be Equal | ${stdout} | Hello, world! |
| # Multiple attributes | | |
| ${stdout} | ${stderr} = | Get Process Result | myproc | stdout=yes | stderr=yes |
| Should Be Equal | ${stdout} | Hello, world! |
| Should Be Empty | ${stderr} | |
Although getting results of a previously executed process can be handy
in general, the main use case for this keyword is returning results
over the remote library interface. The remote interface does not
support returning the whole result object, but individual attributes
can be returned without problems.
New in Robot Framework 2.8.2.
"""
result = self._results[self._processes[handle]]
if result.rc is None:
raise RuntimeError('Getting results of unfinished processes '
'is not supported.')
attributes = self._get_result_attributes(result, rc, stdout, stderr,
stdout_path, stderr_path)
if not attributes:
return result
elif len(attributes) == 1:
return attributes[0]
return attributes
def _get_result_attributes(self, result, *includes):
attributes = (result.rc, result.stdout, result.stderr,
result.stdout_path, result.stderr_path)
includes = (is_truthy(incl) for incl in includes)
return tuple(attr for attr, incl in zip(attributes, includes) if incl)
def switch_process(self, handle):
"""Makes the specified process the current `active process`.
The handle can be an identifier returned by `Start Process` or
the ``alias`` given to it explicitly.
Example:
| Start Process | prog1 | alias=process1 |
| Start Process | prog2 | alias=process2 |
| # currently active process is process2 |
| Switch Process | process1 |
| # now active process is process1 |
"""
self._processes.switch(handle)
def _process_is_stopped(self, process, timeout):
stopped = lambda: process.poll() is not None
max_time = time.time() + timeout
while time.time() <= max_time and not stopped():
time.sleep(min(0.1, timeout))
return stopped()
def split_command_line(self, args, escaping=False):
"""Splits command line string into a list of arguments.
String is split from spaces, but argument surrounded in quotes may
contain spaces in them. If ``escaping`` is given a true value, then
backslash is treated as an escape character. It can escape unquoted
spaces, quotes inside quotes, and so on, but it also requires using
double backslashes when using Windows paths.
Examples:
| @{cmd} = | Split Command Line | --option "value with spaces" |
| Should Be True | $cmd == ['--option', 'value with spaces'] |
New in Robot Framework 2.9.2.
"""
return cmdline2list(args, escaping=escaping)
def join_command_line(self, *args):
"""Joins arguments into one command line string.
In resulting command line string arguments are delimited with a space,
arguments containing spaces are surrounded with quotes, and possible
quotes are escaped with a backslash.
If this keyword is given only one argument and that is a list like
object, then the values of that list are joined instead.
Example:
| ${cmd} = | Join Command Line | --option | value with spaces |
| Should Be Equal | ${cmd} | --option "value with spaces" |
New in Robot Framework 2.9.2.
"""
if len(args) == 1 and is_list_like(args[0]):
args = args[0]
return subprocess.list2cmdline(args)
class ExecutionResult(object):
def __init__(self, process, stdout, stderr, rc=None):
self._process = process
self.stdout_path = self._get_path(stdout)
self.stderr_path = self._get_path(stderr)
self.rc = rc
self._stdout = None
self._stderr = None
self._custom_streams = [stream for stream in (stdout, stderr)
if self._is_custom_stream(stream)]
def _get_path(self, stream):
return stream.name if self._is_custom_stream(stream) else None
def _is_custom_stream(self, stream):
return stream not in (subprocess.PIPE, subprocess.STDOUT)
@property
def stdout(self):
if self._stdout is None:
self._read_stdout()
return self._stdout
@property
def stderr(self):
if self._stderr is None:
self._read_stderr()
return self._stderr
def _read_stdout(self):
self._stdout = self._read_stream(self.stdout_path, self._process.stdout)
def _read_stderr(self):
self._stderr = self._read_stream(self.stderr_path, self._process.stderr)
def _read_stream(self, stream_path, stream):
if stream_path:
stream = open(stream_path, 'r')
elif not self._is_open(stream):
return ''
try:
return self._format_output(stream.read())
except IOError: # http://bugs.jython.org/issue2218
return ''
finally:
if stream_path:
stream.close()
def _is_open(self, stream):
return stream and not stream.closed
def _format_output(self, output):
if output.endswith('\n'):
output = output[:-1]
return decode_output(output, force=True)
def close_streams(self):
standard_streams = self._get_and_read_standard_streams(self._process)
for stream in standard_streams + self._custom_streams:
if self._is_open(stream):
stream.close()
def _get_and_read_standard_streams(self, process):
stdin, stdout, stderr = process.stdin, process.stdout, process.stderr
if stdout:
self._read_stdout()
if stderr:
self._read_stderr()
return [stdin, stdout, stderr]
def __str__(self):
return '<result object with rc %d>' % self.rc
class ProcessConfig(object):
def __init__(self, cwd=None, shell=False, stdout=None, stderr=None,
alias=None, env=None, **rest):
self.cwd = self._get_cwd(cwd)
self.stdout_stream = self._new_stream(stdout)
self.stderr_stream = self._get_stderr(stderr, stdout, self.stdout_stream)
self.shell = is_truthy(shell)
self.alias = alias
self.env = self._construct_env(env, rest)
def _get_cwd(self, cwd):
if cwd:
return cwd.replace('/', os.sep)
return abspath('.')
def _new_stream(self, name):
if name:
name = name.replace('/', os.sep)
return open(os.path.join(self.cwd, name), 'w')
return subprocess.PIPE
def _get_stderr(self, stderr, stdout, stdout_stream):
if stderr and stderr in ['STDOUT', stdout]:
if stdout_stream != subprocess.PIPE:
return stdout_stream
return subprocess.STDOUT
return self._new_stream(stderr)
def _construct_env(self, env, extra):
if env:
env = dict((encode_to_system(k), encode_to_system(v))
for k, v in env.items())
for key in extra:
if not key.startswith('env:'):
raise RuntimeError("Keyword argument '%s' is not supported by "
"this keyword." % key)
if env is None:
env = os.environ.copy()
env[encode_to_system(key[4:])] = encode_to_system(extra[key])
return env
@property
def full_config(self):
config = {'stdout': self.stdout_stream,
'stderr': self.stderr_stream,
'stdin': subprocess.PIPE,
'shell': self.shell,
'cwd': self.cwd,
'env': self.env,
'universal_newlines': True}
if not JYTHON:
self._add_process_group_config(config)
return config
def _add_process_group_config(self, config):
if hasattr(os, 'setsid'):
config['preexec_fn'] = os.setsid
if hasattr(subprocess, 'CREATE_NEW_PROCESS_GROUP'):
config['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
# FIXME: Convert to __unicode__ or at least remove encode_to_system.
# Also add tests!!
def __str__(self):
return encode_to_system("""\
cwd = %s
stdout_stream = %s
stderr_stream = %s
shell = %r
alias = %s
env = %r""" % (self.cwd, self.stdout_stream, self.stderr_stream,
self.shell, self.alias, self.env))
|
moto-timo/robotframework
|
src/robot/libraries/Process.py
|
Python
|
apache-2.0
| 39,800
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for basic building blocks used in eager mode RevNet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.eager.python.examples.revnet import blocks
def compute_degree(g1, g2, eps=1e-7):
"""Compute the degree between two vectors using their usual inner product."""
def _dot(u, v):
return tf.reduce_sum(u * v)
g1_norm = tf.sqrt(_dot(g1, g1))
g2_norm = tf.sqrt(_dot(g2, g2))
if g1_norm.numpy() == 0 and g2_norm.numpy() == 0:
cosine = 1. - eps
else:
g1_norm = 1. if g1_norm.numpy() == 0 else g1_norm
g2_norm = 1. if g2_norm.numpy() == 0 else g2_norm
cosine = _dot(g1, g2) / g1_norm / g2_norm
# Restrict to arccos range
cosine = tf.minimum(tf.maximum(cosine, eps - 1.), 1. - eps)
degree = tf.acos(cosine) * 180. / 3.141592653589793
return degree
def _validate_block_call_channels_last(block_factory, test):
"""Generic testing function for `channels_last` data format.
Completes a set of tests varying data format, stride, and batch normalization
configured train vs test time.
Args:
block_factory: constructor of one of blocks.InitBlock, blocks.FinalBlock,
blocks._ResidualInner
test: tf.test.TestCase object
"""
with tf.device("/cpu:0"): # NHWC format
input_shape = (8, 8, 128)
data_shape = (16,) + input_shape
x = tf.random_normal(shape=data_shape)
# Stride 1
block = block_factory(
filters=128,
strides=(1, 1),
input_shape=input_shape,
data_format="channels_last")
y_tr, y_ev = block(x, training=True), block(x, training=False)
test.assertEqual(y_tr.shape, y_ev.shape)
test.assertEqual(y_ev.shape, (16, 8, 8, 128))
test.assertNotAllClose(y_tr, y_ev)
# Stride of 2
block = block_factory(
filters=128,
strides=(2, 2),
input_shape=input_shape,
data_format="channels_last")
y_tr, y_ev = block(x, training=True), block(x, training=False)
test.assertEqual(y_tr.shape, y_ev.shape)
test.assertEqual(y_ev.shape, (16, 4, 4, 128))
test.assertNotAllClose(y_tr, y_ev)
def _validate_block_call_channels_first(block_factory, test):
"""Generic testing function for `channels_first` data format.
Completes a set of tests varying data format, stride, and batch normalization
configured train vs test time.
Args:
block_factory: constructor of one of blocks.InitBlock, blocks.FinalBlock,
blocks._ResidualInner
test: tf.test.TestCase object
"""
if not tf.test.is_gpu_available():
test.skipTest("GPU not available")
with tf.device("/gpu:0"): # Default NCHW format
input_shape = (128, 8, 8)
data_shape = (16,) + input_shape
x = tf.random_normal(shape=data_shape)
# Stride of 1
block = block_factory(filters=128, strides=(1, 1), input_shape=input_shape)
y_tr, y_ev = block(x, training=True), block(x, training=False)
test.assertEqual(y_tr.shape, y_ev.shape)
test.assertEqual(y_ev.shape, (16, 128, 8, 8))
test.assertNotAllClose(y_tr, y_ev)
# Stride of 2
block = block_factory(filters=128, strides=(2, 2), input_shape=input_shape)
y_tr, y_ev = block(x, training=True), block(x, training=False)
test.assertEqual(y_tr.shape, y_ev.shape)
test.assertEqual(y_ev.shape, (16, 128, 4, 4))
test.assertNotAllClose(y_tr, y_ev)
class RevBlockTest(tf.test.TestCase):
def _check_grad_angle(self, grads, grads_true, atol=1e0):
"""Check the angle between two list of vectors are all close."""
for g1, g2 in zip(grads, grads_true):
degree = compute_degree(g1, g2)
self.assertLessEqual(degree, atol)
def test_backward_grads_channels_first(self):
"""Test `backward` function with `channels_first` data format."""
if not tf.test.is_gpu_available():
self.skipTest("GPU not available")
with tf.device("/gpu:0"): # Default NCHW format
# Stride 1
input_shape = (128, 8, 8)
data_shape = (16,) + input_shape
x = tf.random_normal(shape=data_shape, dtype=tf.float64)
dy = tf.random_normal(shape=data_shape, dtype=tf.float64)
dy1, dy2 = tf.split(dy, num_or_size_splits=2, axis=1)
block = blocks.RevBlock(
n_res=3,
filters=128,
strides=(1, 1),
input_shape=input_shape,
fused=False,
dtype=tf.float64)
with tf.GradientTape() as tape:
tape.watch(x)
x1, x2 = tf.split(x, num_or_size_splits=2, axis=1)
y1, y2 = block((x1, x2), training=True)
y = tf.concat((y1, y2), axis=1)
# Compute grads from reconstruction
(dx1, dx2), dw = block.backward_grads(
x=(x1, x2), y=(y1, y2), dy=(dy1, dy2), training=True)
dx = tf.concat((dx1, dx2), axis=1)
vars_ = block.trainable_variables
# Compute true grads
grads = tape.gradient(y, [x] + vars_, output_gradients=dy)
dx_true, dw_true = grads[0], grads[1:]
self.assertAllClose(dx_true, dx)
self.assertAllClose(dw_true, dw)
self._check_grad_angle(dx_true, dx)
self._check_grad_angle(dw_true, dw)
# Stride 2
x = tf.random_normal(shape=data_shape, dtype=tf.float64)
dy = tf.random_normal(shape=(16, 128, 4, 4), dtype=tf.float64)
dy1, dy2 = tf.split(dy, num_or_size_splits=2, axis=1)
block = blocks.RevBlock(
n_res=3,
filters=128,
strides=(2, 2),
input_shape=input_shape,
fused=False,
dtype=tf.float64)
with tf.GradientTape() as tape:
tape.watch(x)
x1, x2 = tf.split(x, num_or_size_splits=2, axis=1)
y1, y2 = block((x1, x2), training=True)
y = tf.concat((y1, y2), axis=1)
# Compute grads from reconstruction
(dx1, dx2), dw = block.backward_grads(
x=(x1, x2), y=(y1, y2), dy=(dy1, dy2), training=True)
dx = tf.concat((dx1, dx2), axis=1)
vars_ = block.trainable_variables
# Compute true grads
grads = tape.gradient(y, [x] + vars_, output_gradients=dy)
dx_true, dw_true = grads[0], grads[1:]
self.assertAllClose(dx_true, dx)
self.assertAllClose(dw_true, dw)
self._check_grad_angle(dx_true, dx)
self._check_grad_angle(dw_true, dw)
class _ResidualTest(tf.test.TestCase):
def test_backward_grads_channels_first(self):
"""Test `backward_grads` function with `channels_first` data format."""
if not tf.test.is_gpu_available():
self.skipTest("GPU not available")
with tf.device("/gpu:0"): # Default NCHW format
input_shape = (128, 8, 8)
data_shape = (16,) + input_shape
# Use double precision for testing
x_true = tf.random_normal(shape=data_shape, dtype=tf.float64)
dy = tf.random_normal(shape=data_shape, dtype=tf.float64)
dy1, dy2 = tf.split(dy, num_or_size_splits=2, axis=1)
residual = blocks._Residual(
filters=128,
strides=(1, 1),
input_shape=input_shape,
fused=False,
dtype=tf.float64)
with tf.GradientTape() as tape:
tape.watch(x_true)
x1_true, x2_true = tf.split(x_true, num_or_size_splits=2, axis=1)
y1, y2 = residual((x1_true, x2_true), training=True)
y = tf.concat((y1, y2), axis=1)
# Gradients computed due to reversibility
(x1, x2), (dx1, dx2), dw = residual.backward_grads(
y=(y1, y2), dy=(dy1, dy2), training=True)
x = tf.concat((x1, x2), axis=1)
dx = tf.concat((dx1, dx2), axis=1)
# True gradients computed by the tape
grads = tape.gradient(
y, [x_true] + residual.trainable_variables, output_gradients=dy)
dx_true, dw_true = grads[0], grads[1:]
self.assertAllClose(x_true, x)
self.assertAllClose(dx_true, dx)
self.assertAllClose(dw_true, dw)
class _ResidualInnerTest(tf.test.TestCase):
def test_call(self):
"""Test `call` function."""
_validate_block_call_channels_first(blocks._ResidualInner, self)
_validate_block_call_channels_last(blocks._ResidualInner, self)
class _BottleneckResidualInner(tf.test.TestCase):
def test_call(self):
"""Test `call` function."""
_validate_block_call_channels_first(blocks._BottleneckResidualInner, self)
_validate_block_call_channels_last(blocks._BottleneckResidualInner, self)
if __name__ == "__main__":
tf.enable_eager_execution()
tf.test.main()
|
aselle/tensorflow
|
tensorflow/contrib/eager/python/examples/revnet/blocks_test.py
|
Python
|
apache-2.0
| 9,119
|
__author__ = 'Viktor Winkelmann'
__all__ = ['DataRecognizer', 'PluginManager', 'ProtocolDissector']
|
vikwin/pcapfex
|
core/Plugins/__init__.py
|
Python
|
apache-2.0
| 99
|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import copy
import json
from streamalert.shared.logger import get_logger
from streamalert.shared.lookup_tables.core import LookupTables
from streamalert.shared.lookup_tables.utils import LookupTablesMagic
from streamalert_cli.utils import CLICommand, generate_subparser, set_parser_epilog
LOGGER = get_logger(__name__)
class LookupTablesCommand(CLICommand):
description = 'Describe and manage your LookupTables'
@classmethod
def setup_subparser(cls, subparser):
# FIXME (derek.wang) Refactor this into a more robust command-nesting framework
template = '''\
Available Sub-Commands:
{}
Examples:
manage.py lookup-tables [describe-tables|get|set]
'''
subcommands = cls._subcommands()
set_parser_epilog(
subparser,
epilog=(
template.format('\n'.join([
'\t{command: <{pad}}{description}'.format(
command=command,
pad=30,
# FIXME (Derek.wang)
# Ryan suggested that we could implement a __str__ or __repr__ function
# for each of the CLICommand classes
description=subcommand.description
)
for command, subcommand
in subcommands.items()
]))
)
)
lookup_tables_subparsers = subparser.add_subparsers()
for subcommand in subcommands.values():
subcommand.setup_subparser(lookup_tables_subparsers)
@classmethod
def handler(cls, options, config):
subcommands = cls._subcommands()
if options.subcommand in subcommands:
return subcommands[options.subcommand].handler(options, config)
LOGGER.error('Unhandled lookup-tables subcommand %s', options.subcommand)
@classmethod
def _subcommands(cls):
return {
# FIXME (derek.wang) Put the command strings into the commands themselves, so the
# subparsers can be registered easily
'describe-tables': LookupTablesDescribeTablesSubCommand,
'get': LookupTablesGetKeySubCommand,
'set': LookupTablesSetSubCommand,
'list-add': LookupTablesListAddSubCommand,
'set-from-json-file': LookupTablesSetFromFile,
}
class LookupTablesSetFromFile(CLICommand):
description = 'Set a LookupTable key from a JSON file'
@classmethod
def setup_subparser(cls, subparser):
set_parser = generate_subparser(
subparser,
'set-from-json-file',
description='Pushes the contents of a given json file into the LookupTable key',
subcommand=True
)
set_parser_epilog(
set_parser,
epilog=(
'''\
Examples:
manage.py lookup-tables set-from-json-file -t [table] -k [key] -f \
[path/to/file.json]
'''
)
)
set_parser.add_argument(
'-t',
'--table',
help='Name of the LookupTable',
required=True
)
set_parser.add_argument(
'-k',
'--key',
help='Key to modify on the LookupTable',
required=True
)
set_parser.add_argument(
'-f',
'--file',
help='Path to the json file, relative to the current working directory',
required=True
)
@classmethod
def handler(cls, options, config):
print('==== LookupTables; Set from JSON File ====')
core = LookupTables.get_instance(config=config)
print(' Table: {}'.format(options.table))
print(' Key: {}'.format(options.key))
print(' File: {}'.format(options.file))
table = core.table(options.table)
old_value = table.get(options.key)
with open(options.file, "r") as json_file_fp:
new_value = json.load(json_file_fp)
print(' Value: {} --> {}'.format(
json.dumps(old_value, indent=2, sort_keys=True),
json.dumps(new_value, indent=2, sort_keys=True)
))
LookupTablesMagic.set_table_value(table, options.key, new_value)
return True
class LookupTablesListAddSubCommand(CLICommand):
description = 'Add a value to a list-typed LookupTables key'
@classmethod
def setup_subparser(cls, subparser):
set_parser = generate_subparser(
subparser,
'list-add',
description='Sets a key on the requested LookupTable',
subcommand=True
)
set_parser_epilog(
set_parser,
epilog=(
'''\
Examples:
manage.py lookup-tables list-add -t [table] -k [key] -v [value]
'''
)
)
set_parser.add_argument(
'-t',
'--table',
help='Name of the LookupTable',
required=True
)
set_parser.add_argument(
'-k',
'--key',
help='Key to modify on the LookupTable',
required=True
)
set_parser.add_argument(
'-v',
'--value',
help='Value to add to the key',
required=True
)
set_parser.add_argument(
'-u',
'--unique',
help='Remove duplicate values from the final list',
action='store_true'
)
set_parser.add_argument(
'-s',
'--sort',
help='Sort the final list',
action='store_true'
)
@classmethod
def handler(cls, options, config):
print('==== LookupTables; List Add Key ====')
table_name = options.table
key = options.key
core = LookupTables.get_instance(config=config)
print(' Table: {}'.format(table_name))
print(' Key: {}'.format(key))
table = core.table(table_name)
old_value = table.get(key)
if old_value is None:
old_value = []
if not isinstance(old_value, list):
print(' ERROR: The current value is not a list: {}'.format(old_value))
return False
new_value = copy.copy(old_value)
new_value.append(options.value)
if options.unique:
new_value = list(set(new_value))
if options.sort:
new_value = sorted(new_value)
print(' Value: {} --> {}'.format(old_value, new_value))
LookupTablesMagic.set_table_value(table, key, new_value)
return True
class LookupTablesDescribeTablesSubCommand(CLICommand):
description = 'Show information about all currently configured LookupTables'
@classmethod
def setup_subparser(cls, subparser):
describe_tables_parser = generate_subparser(
subparser,
'describe-tables',
description='Shows metadata about all currently configured LookupTables',
subcommand=True
)
set_parser_epilog(
describe_tables_parser,
epilog=(
'''\
Examples:
manage.py lookup-tables describe-tables
'''
)
)
@classmethod
def handler(cls, options, config):
print('==== LookupTables; Describe Tables ====\n')
lookup_tables = LookupTablesMagic.get_all_tables(LookupTables.get_instance(config=config))
print('{} Tables:\n'.format(len(lookup_tables)))
for table in lookup_tables.values():
print(' Table Name: {}'.format(table.table_name))
print(' Driver Id: {}'.format(table.driver_id))
print(' Driver Type: {}\n'.format(table.driver_type))
class LookupTablesGetKeySubCommand(CLICommand):
description = 'Retrieve a key from an existing LookupTable'
@classmethod
def setup_subparser(cls, subparser):
get_parser = generate_subparser(
subparser,
'get',
description='Retrieves a key from the requested LookupTable',
subcommand=True
)
set_parser_epilog(
get_parser,
epilog=(
'''\
Examples:
manage.py lookup-tables get -t [table] -k [key]
'''
)
)
get_parser.add_argument(
'-t',
'--table',
help='Name of the LookupTable',
required=True
)
get_parser.add_argument(
'-k',
'--key',
help='Key to fetch on the LookupTable',
required=True
)
@classmethod
def handler(cls, options, config):
table_name = options.table
key = options.key
print('==== LookupTables; Get Key ====')
LookupTables.get_instance(config=config)
print(' Table: {}'.format(table_name))
print(' Key: {}'.format(key))
value = LookupTables.get(table_name, key)
print()
print(' Type: {}'.format(type(value)))
if isinstance(value, (list, dict)):
# Render lists and dicts a bit better to make them easier to read
print(' Value:')
print(json.dumps(value, indent=2, sort_keys=True))
else:
print(' Value: {}'.format(value))
print()
return True
class LookupTablesSetSubCommand(CLICommand):
description = 'Update the key of an existing LookupTable'
@classmethod
def setup_subparser(cls, subparser):
set_parser = generate_subparser(
subparser,
'set',
description='Sets a key on the requested LookupTable',
subcommand=True
)
set_parser_epilog(
set_parser,
epilog=(
'''\
Examples:
manage.py lookup-tables set -t [table] -k [key] -v [value]
'''
)
)
set_parser.add_argument(
'-t',
'--table',
help='Name of the LookupTable',
required=True
)
set_parser.add_argument(
'-k',
'--key',
help='Key to set on the LookupTable',
required=True
)
set_parser.add_argument(
'-v',
'--value',
help='Value to save into LookupTable',
required=True
)
set_parser.add_argument(
'-j',
'--json',
help='Interpret the value as a JSON-encoded string',
action='store_true'
)
@classmethod
def handler(cls, options, config):
print('==== LookupTables; Set Key ====')
table_name = options.table
key = options.key
if options.json:
try:
new_value = json.loads(options.value)
except json.decoder.JSONDecodeError as e:
print(' ERROR: Input is not valid JSON:')
print(e)
return False
else:
new_value = options.value
core = LookupTables.get_instance(config=config)
print(' Table: {}'.format(table_name))
print(' Key: {}'.format(key))
table = core.table(table_name)
old_value = table.get(key)
print(' Value: {} --> {}'.format(old_value, new_value))
LookupTablesMagic.set_table_value(table, key, new_value)
return True
|
airbnb/streamalert
|
streamalert_cli/lookup_tables/handler.py
|
Python
|
apache-2.0
| 12,263
|
import os
import synapse.tests.utils as s_test
import synapse.common as s_common
import synapse.lib.output as s_output
import synapse.lib.msgpack as s_msgpack
import synapse.tools.storm as s_t_storm
class StormCliTest(s_test.SynTest):
async def test_tools_storm(self):
async with self.getTestCore() as core:
await core.addTagProp('foo', ('int', {}), {})
pars = s_t_storm.getArgParser()
opts = pars.parse_args(('woot',))
self.eq('woot', opts.cortex)
q = '$lib.model.ext.addFormProp(inet:ipv4, "_test:score", (int, $lib.dict()), $lib.dict())'
await core.callStorm(q)
async with core.getLocalProxy() as proxy:
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('[inet:ipv4=1.2.3.4 +#foo=2012 +#bar +#baz:foo=10 :_test:score=7]')
text = str(outp)
self.isin('.....', text)
self.isin('inet:ipv4=1.2.3.4', text)
self.isin(':type = unicast', text)
self.isin(':_test:score = 7', text)
self.isin('.created = ', text)
self.isin('#bar', text)
self.isin('#baz:foo = 10', text)
self.isin('#foo = (2012/01/01 00:00:00.000, 2012/01/01 00:00:00.001)', text)
self.isin('complete. 1 nodes in', text)
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('!quit')
self.isin('o/', str(outp))
self.true(scli.isfini)
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('!help')
self.isin('!quit', str(outp))
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('$lib.print(woot)')
self.isin('woot', str(outp))
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('$lib.warn(woot)')
self.isin('WARNING: woot', str(outp))
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('---')
self.isin("---\n ^\nSyntax Error: No terminal defined for '-' at line 1 col 2", str(outp))
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('spin |' + ' ' * 80 + '---')
self.isin("... ---\n ^", str(outp))
outp = s_output.OutPutStr()
async with await s_t_storm.StormCli.anit(proxy, outp=outp) as scli:
await scli.runCmdLine('---' + ' ' * 80 + 'spin')
self.isin("--- ...\n ^", str(outp))
lurl = core.getLocalUrl()
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, '$lib.print(woot)'), outp=outp)
self.isin('woot', str(outp))
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, f'!runfile --help'), outp=outp)
self.isin('Run a local storm file', str(outp))
with self.getTestDir() as dirn:
path = os.path.join(dirn, 'foo.storm')
with open(path, 'wb') as fd:
fd.write(b'$lib.print(woot)')
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, f'!runfile {path}'), outp=outp)
self.isin(f'running storm file: {path}', str(outp))
self.isin('woot', str(outp))
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, f'!runfile /newp.storm'), outp=outp)
self.isin(f'no such file: /newp.storm', str(outp))
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, f'!pushfile /newp'), outp=outp)
self.isin(f'no such file: /newp', str(outp))
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, f'!pushfile {path}'), outp=outp)
text = str(outp)
self.isin(f'uploading file: {path}', text)
self.isin(':name = foo.storm', text)
self.isin(':sha256 = c00adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3', text)
outp = s_output.OutPutStr()
path = os.path.join(dirn, 'bar.storm')
await s_t_storm.main((lurl, f'!pullfile c00adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3 {path}'), outp=outp)
text = str(outp)
self.isin('downloading sha256: c00adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3', text)
self.isin(f'saved to: {path}', text)
with s_common.genfile(path) as fd:
self.isin('woot', fd.read().decode())
outp = s_output.OutPutStr()
await s_t_storm.main((lurl, f'!pullfile c11adfcc316f8b00772cdbce2505b9ea539d74f42861801eceb1017a44344ed3 {path}'), outp=outp)
text = str(outp)
self.isin('Axon does not contain the requested file.', text)
await scli.runCmdLine('[test:str=foo +#foo +#bar +#baz]')
await scli.runCmdLine('[test:str=bar +#foo +#bar +#baz]')
path = os.path.join(dirn, 'export1.nodes')
await s_t_storm.main((lurl, f'!export {path} {{ test:str }}'), outp=outp)
text = str(outp)
self.isin(f'saved 2 nodes to: {path}', text)
with open(path, 'rb') as fd:
byts = fd.read()
podes = [i[1] for i in s_msgpack.Unpk().feed(byts)]
self.sorteq(('bar', 'foo'), [p[0][1] for p in podes])
for pode in podes:
self.sorteq(('bar', 'baz', 'foo'), pode[1]['tags'])
path = os.path.join(dirn, 'export2.nodes')
q = f'!export {path} {{ test:str }} --include-tags foo bar'
await s_t_storm.main((lurl, q), outp=outp)
text = str(outp)
self.isin(f'saved 2 nodes to: {path}', text)
with open(path, 'rb') as fd:
byts = fd.read()
podes = [i[1] for i in s_msgpack.Unpk().feed(byts)]
self.sorteq(('bar', 'foo'), [p[0][1] for p in podes])
for pode in podes:
self.sorteq(('bar', 'foo'), pode[1]['tags'])
path = os.path.join(dirn, 'export3.nodes')
q = f'!export {path} {{ test:str }} --no-tags'
await s_t_storm.main((lurl, q), outp=outp)
text = str(outp)
self.isin(f'saved 2 nodes to: {path}', text)
with open(path, 'rb') as fd:
byts = fd.read()
podes = [i[1] for i in s_msgpack.Unpk().feed(byts)]
self.sorteq(('bar', 'foo'), [p[0][1] for p in podes])
for pode in podes:
self.eq({}, pode[1]['tags'])
await s_t_storm.main((lurl, f'!export {path} {{ test:newp }}'), outp=outp)
text = str(outp)
self.isin('No property named test:newp.', text)
|
vertexproject/synapse
|
synapse/tests/test_tools_storm.py
|
Python
|
apache-2.0
| 7,931
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1PodList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'items': 'list[V1Pod]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None):
"""
V1PodList - a model defined in Swagger
"""
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""
Gets the api_version of this V1PodList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1PodList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1PodList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1PodList.
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""
Gets the items of this V1PodList.
List of pods. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md
:return: The items of this V1PodList.
:rtype: list[V1Pod]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1PodList.
List of pods. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md
:param items: The items of this V1PodList.
:type: list[V1Pod]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`")
self._items = items
@property
def kind(self):
"""
Gets the kind of this V1PodList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1PodList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1PodList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1PodList.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1PodList.
Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The metadata of this V1PodList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1PodList.
Standard list metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param metadata: The metadata of this V1PodList.
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1PodList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
mbohlool/client-python
|
kubernetes/client/models/v1_pod_list.py
|
Python
|
apache-2.0
| 6,459
|
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import json
import os
import re
import subprocess
import sys
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import time
from pyversion import is_python3
if is_python3():
import urllib.request
import urllib.error
else:
import urllib2
import imp
urllib = imp.new_module('urllib')
urllib.request = urllib2
urllib.error = urllib2
from signal import SIGTERM
from error import GitError, UploadError
from trace import Trace
if is_python3():
from http.client import HTTPException
else:
from httplib import HTTPException
from git_command import GitCommand
from git_command import ssh_sock
from git_command import terminate_ssh_clients
R_HEADS = 'refs/heads/'
R_TAGS = 'refs/tags/'
ID_RE = re.compile(r'^[0-9a-f]{40}$')
REVIEW_CACHE = dict()
def IsId(rev):
return ID_RE.match(rev)
def _key(name):
parts = name.split('.')
if len(parts) < 2:
return name.lower()
parts[ 0] = parts[ 0].lower()
parts[-1] = parts[-1].lower()
return '.'.join(parts)
class GitConfig(object):
_ForUser = None
@classmethod
def ForUser(cls):
if cls._ForUser is None:
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
return cls._ForUser
@classmethod
def ForRepository(cls, gitdir, defaults=None):
return cls(configfile = os.path.join(gitdir, 'config'),
defaults = defaults)
def __init__(self, configfile, defaults=None, jsonFile=None):
self.file = configfile
self.defaults = defaults
self._cache_dict = None
self._section_dict = None
self._remotes = {}
self._branches = {}
self._json = jsonFile
if self._json is None:
self._json = os.path.join(
os.path.dirname(self.file),
'.repo_' + os.path.basename(self.file) + '.json')
def Has(self, name, include_defaults = True):
"""Return true if this configuration file has the key.
"""
if _key(name) in self._cache:
return True
if include_defaults and self.defaults:
return self.defaults.Has(name, include_defaults = True)
return False
def GetBoolean(self, name):
"""Returns a boolean from the configuration file.
None : The value was not defined, or is not a boolean.
True : The value was set to true or yes.
False: The value was set to false or no.
"""
v = self.GetString(name)
if v is None:
return None
v = v.lower()
if v in ('true', 'yes'):
return True
if v in ('false', 'no'):
return False
return None
def GetString(self, name, all_keys=False):
"""Get the first value for a key, or None if it is not defined.
This configuration file is used first, if the key is not
defined or all_keys = True then the defaults are also searched.
"""
try:
v = self._cache[_key(name)]
except KeyError:
if self.defaults:
return self.defaults.GetString(name, all_keys = all_keys)
v = []
if not all_keys:
if v:
return v[0]
return None
r = []
r.extend(v)
if self.defaults:
r.extend(self.defaults.GetString(name, all_keys = True))
return r
def SetString(self, name, value):
"""Set the value(s) for a key.
Only this configuration file is modified.
The supplied value should be either a string,
or a list of strings (to store multiple values).
"""
key = _key(name)
try:
old = self._cache[key]
except KeyError:
old = []
if value is None:
if old:
del self._cache[key]
self._do('--unset-all', name)
elif isinstance(value, list):
if len(value) == 0:
self.SetString(name, None)
elif len(value) == 1:
self.SetString(name, value[0])
elif old != value:
self._cache[key] = list(value)
self._do('--replace-all', name, value[0])
for i in range(1, len(value)):
self._do('--add', name, value[i])
elif len(old) != 1 or old[0] != value:
self._cache[key] = [value]
self._do('--replace-all', name, value)
def GetRemote(self, name):
"""Get the remote.$name.* configuration values as an object.
"""
try:
r = self._remotes[name]
except KeyError:
r = Remote(self, name)
self._remotes[r.name] = r
return r
def GetBranch(self, name):
"""Get the branch.$name.* configuration values as an object.
"""
try:
b = self._branches[name]
except KeyError:
b = Branch(self, name)
self._branches[b.name] = b
return b
def GetSubSections(self, section):
"""List all subsection names matching $section.*.*
"""
return self._sections.get(section, set())
def HasSection(self, section, subsection = ''):
"""Does at least one key in section.subsection exist?
"""
try:
return subsection in self._sections[section]
except KeyError:
return False
def UrlInsteadOf(self, url):
"""Resolve any url.*.insteadof references.
"""
for new_url in self.GetSubSections('url'):
old_url = self.GetString('url.%s.insteadof' % new_url)
if old_url is not None and url.startswith(old_url):
return new_url + url[len(old_url):]
return url
@property
def _sections(self):
d = self._section_dict
if d is None:
d = {}
for name in self._cache.keys():
p = name.split('.')
if 2 == len(p):
section = p[0]
subsect = ''
else:
section = p[0]
subsect = '.'.join(p[1:-1])
if section not in d:
d[section] = set()
d[section].add(subsect)
self._section_dict = d
return d
@property
def _cache(self):
if self._cache_dict is None:
self._cache_dict = self._Read()
return self._cache_dict
def _Read(self):
d = self._ReadJson()
if d is None:
d = self._ReadGit()
self._SaveJson(d)
return d
def _ReadJson(self):
try:
if os.path.getmtime(self._json) \
<= os.path.getmtime(self.file):
os.remove(self._json)
return None
except OSError:
return None
try:
Trace(': parsing %s', self.file)
fd = open(self._json)
try:
return json.load(fd)
finally:
fd.close()
except (IOError, ValueError):
os.remove(self._json)
return None
def _SaveJson(self, cache):
try:
fd = open(self._json, 'w')
try:
json.dump(cache, fd, indent=2)
finally:
fd.close()
except (IOError, TypeError):
if os.path.exists(self.json):
os.remove(self._json)
def _ReadGit(self):
"""
Read configuration data from git.
This internal method populates the GitConfig cache.
"""
c = {}
d = self._do('--null', '--list')
if d is None:
return c
for line in d.decode('utf-8').rstrip('\0').split('\0'): # pylint: disable=W1401
# Backslash is not anomalous
if '\n' in line:
key, val = line.split('\n', 1)
else:
key = line
val = None
if key in c:
c[key].append(val)
else:
c[key] = [val]
return c
def _do(self, *args):
command = ['config', '--file', self.file]
command.extend(args)
p = GitCommand(None,
command,
capture_stdout = True,
capture_stderr = True)
if p.Wait() == 0:
return p.stdout
else:
GitError('git config %s: %s' % (str(args), p.stderr))
class RefSpec(object):
"""A Git refspec line, split into its components:
forced: True if the line starts with '+'
src: Left side of the line
dst: Right side of the line
"""
@classmethod
def FromString(cls, rs):
lhs, rhs = rs.split(':', 2)
if lhs.startswith('+'):
lhs = lhs[1:]
forced = True
else:
forced = False
return cls(forced, lhs, rhs)
def __init__(self, forced, lhs, rhs):
self.forced = forced
self.src = lhs
self.dst = rhs
def SourceMatches(self, rev):
if self.src:
if rev == self.src:
return True
if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
return True
return False
def DestMatches(self, ref):
if self.dst:
if ref == self.dst:
return True
if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
return True
return False
def MapSource(self, rev):
if self.src.endswith('/*'):
return self.dst[:-1] + rev[len(self.src) - 1:]
return self.dst
def __str__(self):
s = ''
if self.forced:
s += '+'
if self.src:
s += self.src
if self.dst:
s += ':'
s += self.dst
return s
_master_processes = []
_master_keys = set()
_ssh_master = True
_master_keys_lock = None
def init_ssh():
"""Should be called once at the start of repo to init ssh master handling.
At the moment, all we do is to create our lock.
"""
global _master_keys_lock
assert _master_keys_lock is None, "Should only call init_ssh once"
_master_keys_lock = _threading.Lock()
def _open_ssh(host, port=None):
global _ssh_master
# Acquire the lock. This is needed to prevent opening multiple masters for
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
# one that was passed to repo init.
_master_keys_lock.acquire()
try:
# Check to see whether we already think that the master is running; if we
# think it's already running, return right away.
if port is not None:
key = '%s:%s' % (host, port)
else:
key = host
if key in _master_keys:
return True
if not _ssh_master \
or 'GIT_SSH' in os.environ \
or sys.platform in ('win32', 'cygwin'):
# failed earlier, or cygwin ssh can't do this
#
return False
# We will make two calls to ssh; this is the common part of both calls.
command_base = ['ssh',
'-o','ControlPath %s' % ssh_sock(),
host]
if port is not None:
command_base[1:1] = ['-p', str(port)]
# Since the key wasn't in _master_keys, we think that master isn't running.
# ...but before actually starting a master, we'll double-check. This can
# be important because we can't tell that that 'git@myhost.com' is the same
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
check_command = command_base + ['-O','check']
try:
Trace(': %s', ' '.join(check_command))
check_process = subprocess.Popen(check_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
check_process.communicate() # read output, but ignore it...
isnt_running = check_process.wait()
if not isnt_running:
# Our double-check found that the master _was_ infact running. Add to
# the list of keys.
_master_keys.add(key)
return True
except Exception:
# Ignore excpetions. We we will fall back to the normal command and print
# to the log there.
pass
command = command_base[:1] + \
['-M', '-N'] + \
command_base[1:]
try:
Trace(': %s', ' '.join(command))
p = subprocess.Popen(command)
except Exception as e:
_ssh_master = False
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
% (host,port, str(e)), file=sys.stderr)
return False
_master_processes.append(p)
_master_keys.add(key)
time.sleep(1)
return True
finally:
_master_keys_lock.release()
def close_ssh():
global _master_keys_lock
terminate_ssh_clients()
for p in _master_processes:
try:
os.kill(p.pid, SIGTERM)
p.wait()
except OSError:
pass
del _master_processes[:]
_master_keys.clear()
d = ssh_sock(create=False)
if d:
try:
os.rmdir(os.path.dirname(d))
except OSError:
pass
# We're done with the lock, so we can delete it.
_master_keys_lock = None
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
def GetSchemeFromUrl(url):
m = URI_ALL.match(url)
if m:
return m.group(1)
return None
def _preconnect(url):
m = URI_ALL.match(url)
if m:
scheme = m.group(1)
host = m.group(2)
if ':' in host:
host, port = host.split(':')
else:
port = None
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
return _open_ssh(host, port)
return False
m = URI_SCP.match(url)
if m:
host = m.group(1)
return _open_ssh(host)
return False
class Remote(object):
"""Configuration options related to a remote.
"""
def __init__(self, config, name):
self._config = config
self.name = name
self.url = self._Get('url')
self.review = self._Get('review')
self.projectname = self._Get('projectname')
self.fetch = list(map(RefSpec.FromString,
self._Get('fetch', all_keys=True)))
self._review_url = None
def _InsteadOf(self):
globCfg = GitConfig.ForUser()
urlList = globCfg.GetSubSections('url')
longest = ""
longestUrl = ""
for url in urlList:
key = "url." + url + ".insteadOf"
insteadOfList = globCfg.GetString(key, all_keys=True)
for insteadOf in insteadOfList:
if self.url.startswith(insteadOf) \
and len(insteadOf) > len(longest):
longest = insteadOf
longestUrl = url
if len(longest) == 0:
return self.url
return self.url.replace(longest, longestUrl, 1)
def PreConnectFetch(self):
connectionUrl = self._InsteadOf()
return _preconnect(connectionUrl)
def ReviewUrl(self, userEmail):
if self._review_url is None:
if self.review is None:
return None
u = self.review
if u.split(':')[0] not in ('http', 'https', 'sso'):
u = 'http://%s' % u
if u.endswith('/Gerrit'):
u = u[:len(u) - len('/Gerrit')]
if u.endswith('/ssh_info'):
u = u[:len(u) - len('/ssh_info')]
if not u.endswith('/'):
u += '/'
http_url = u
if u in REVIEW_CACHE:
self._review_url = REVIEW_CACHE[u]
elif 'REPO_HOST_PORT_INFO' in os.environ:
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
self._review_url = self._SshReviewUrl(userEmail, host, port)
REVIEW_CACHE[u] = self._review_url
elif u.startswith('sso:'):
self._review_url = u # Assume it's right
REVIEW_CACHE[u] = self._review_url
else:
try:
info_url = u + 'ssh_info'
info = urllib.request.urlopen(info_url).read()
if info == 'NOT_AVAILABLE' or '<' in info:
# If `info` contains '<', we assume the server gave us some sort
# of HTML response back, like maybe a login page.
#
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
self._review_url = http_url
else:
host, port = info.split()
self._review_url = self._SshReviewUrl(userEmail, host, port)
except urllib.error.HTTPError as e:
raise UploadError('%s: %s' % (self.review, str(e)))
except urllib.error.URLError as e:
raise UploadError('%s: %s' % (self.review, str(e)))
except HTTPException as e:
raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
REVIEW_CACHE[u] = self._review_url
return self._review_url + self.projectname
def _SshReviewUrl(self, userEmail, host, port):
username = self._config.GetString('review.%s.username' % self.review)
if username is None:
username = userEmail.split('@')[0]
return 'ssh://%s@%s:%s/' % (username, host, port)
def ToLocal(self, rev):
"""Convert a remote revision string to something we have locally.
"""
if IsId(rev):
return rev
if rev.startswith(R_TAGS):
return rev
if not rev.startswith('refs/'):
rev = R_HEADS + rev
for spec in self.fetch:
if spec.SourceMatches(rev):
return spec.MapSource(rev)
raise GitError('remote %s does not have %s' % (self.name, rev))
def WritesTo(self, ref):
"""True if the remote stores to the tracking ref.
"""
for spec in self.fetch:
if spec.DestMatches(ref):
return True
return False
def ResetFetch(self, mirror=False):
"""Set the fetch refspec to its default value.
"""
if mirror:
dst = 'refs/heads/*'
else:
dst = 'refs/remotes/%s/*' % self.name
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
def Save(self):
"""Save this remote to the configuration.
"""
self._Set('url', self.url)
self._Set('review', self.review)
self._Set('projectname', self.projectname)
self._Set('fetch', list(map(str, self.fetch)))
def _Set(self, key, value):
key = 'remote.%s.%s' % (self.name, key)
return self._config.SetString(key, value)
def _Get(self, key, all_keys=False):
key = 'remote.%s.%s' % (self.name, key)
return self._config.GetString(key, all_keys = all_keys)
class Branch(object):
"""Configuration options related to a single branch.
"""
def __init__(self, config, name):
self._config = config
self.name = name
self.merge = self._Get('merge')
r = self._Get('remote')
if r:
self.remote = self._config.GetRemote(r)
else:
self.remote = None
@property
def LocalMerge(self):
"""Convert the merge spec to a local name.
"""
if self.remote and self.merge:
return self.remote.ToLocal(self.merge)
return None
def Save(self):
"""Save this branch back into the configuration.
"""
if self._config.HasSection('branch', self.name):
if self.remote:
self._Set('remote', self.remote.name)
else:
self._Set('remote', None)
self._Set('merge', self.merge)
else:
fd = open(self._config.file, 'ab')
try:
fd.write('[branch "%s"]\n' % self.name)
if self.remote:
fd.write('\tremote = %s\n' % self.remote.name)
if self.merge:
fd.write('\tmerge = %s\n' % self.merge)
finally:
fd.close()
def _Set(self, key, value):
key = 'branch.%s.%s' % (self.name, key)
return self._config.SetString(key, value)
def _Get(self, key, all_keys=False):
key = 'branch.%s.%s' % (self.name, key)
return self._config.GetString(key, all_keys = all_keys)
|
ceejatec/git-repo
|
git_config.py
|
Python
|
apache-2.0
| 19,314
|
"""
Django settings for TheWeather project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y4s7z7zt^&5z+6z@dleqo4hv3fg7!7r@hb#k&-hacvp&0s2fw$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'weatherapp'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'TheWeather.urls'
TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_PATH,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'TheWeather.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
ON_HEROKU = os.environ.get('ON_HEROKU') #<---- this captures the ON_HEROKU variable from the environment and assigns it to ON_HEROKU.
if ON_HEROKU == '1':
DATABASES = {
'default': dj_database_url.config(default=os.environ.get('DATABASE_URL'))
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
#DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
#}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
#Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
edugr87/proyecto-iv
|
TheWeather/settings.py
|
Python
|
artistic-2.0
| 3,944
|
'''
Defines a dialog box for the user to edit a flight.
'''
import wx
from wx.lib import masked
import pytz
import flight
from flight import format_date, format_time
from flight import string_to_datetime, datetime_to_wx
from airport import NoSuchAirportError
import airport
# TODO: move out into utility module
def set_property(name):
def decorator(method):
def fget(self):
return getattr(self, name)
def fset(self, other):
setattr(self, name, other)
method(self)
return property(fget=fget, fset=fset)
return decorator
def string_to_wx_datetime(date_string, context):
return datetime_to_wx(string_to_datetime(date_string, context))
def set_date(wx_datetime, date):
wx_datetime.SetDay(date.GetDay())
wx_datetime.SetMonth(date.GetMonth())
wx_datetime.SetYear(date.GetYear())
def set_time(wx_datetime, time):
wx_datetime.SetHour(time.GetHour())
wx_datetime.SetMinute(time.GetMinute())
wx_datetime.SetSecond(time.GetSecond())
class ProxyFlight(object):
LINE_CHANGED = object()
COMPONENT_CHANGED = object()
def __init__(self, real_flight):
self.flight = real_flight
self.context = real_flight.dept_time
self._line = str(real_flight)
# help out the code analysis tools
(self._departs, self._arrives,
self._dept_time, self._arr_time) = None, None, None, None
self.listeners = []
self._split_line(catch_errors=True)
def _split_line(self, catch_errors=False):
components = None
try:
components = flight.parse_line(self._line)
except ValueError:
if catch_errors:
self._departs = ''
self._arrives = ''
self._dept_time = wx.DateTime.Today()
self._arr_time = wx.DateTime.Today()
return
else:
raise
(self._departs, self._arrives) = (components['departs'],
components['arrives'])
self._dept_time = string_to_wx_datetime(components['dept_time'],
self.context)
self._arr_time = string_to_wx_datetime(components['arr_time'],
self.context)
def _compute_line(self):
values = {
'departs': self.departs,
'dept_time': ' '.join((format_date(self.dept_time),
format_time(self.dept_time))),
'arrives': self.arrives,
'arr_time': ' '.join((format_date(self.arr_time),
format_time(self.arr_time))),
}
self._line = ('%(departs)s %(dept_time)s %(arrives)s %(arr_time)s' %
values)
@set_property('_line')
def line(self):
self.update(ProxyFlight.COMPONENT_CHANGED)
@set_property('_departs')
def departs(self):
self.update(ProxyFlight.LINE_CHANGED)
@set_property('_arrives')
def arrives(self):
self.update(ProxyFlight.LINE_CHANGED)
@set_property('_dept_time')
def dept_time(self):
self.update(ProxyFlight.LINE_CHANGED)
def set_dept_date(self, date):
set_date(self._dept_time, date)
self.update(ProxyFlight.LINE_CHANGED)
def set_dept_time(self, time):
set_time(self._dept_time, time)
self.update(ProxyFlight.LINE_CHANGED)
@set_property('_arr_time')
def arr_time(self):
self.update(ProxyFlight.LINE_CHANGED)
def set_arr_date(self, date):
set_date(self._arr_time, date)
self.update(ProxyFlight.LINE_CHANGED)
def set_arr_time(self, time):
set_time(self._arr_time, time)
self.update(ProxyFlight.LINE_CHANGED)
def add_listener(self, listener):
self.listeners.append(listener)
def update(self, event):
if event is ProxyFlight.COMPONENT_CHANGED:
self._split_line()
elif event is ProxyFlight.LINE_CHANGED:
self._compute_line()
for listener in self.listeners:
listener.on_update(event)
def to_flight(self):
return flight.from_line(self._line)
def assign(self):
self.flight.assign(self.to_flight())
@classmethod
def from_flight(cls, other):
return ProxyFlight(other)
def timezones():
if AddAirportDialog.TIMEZONES is None:
AddAirportDialog.TIMEZONES = ['US/Pacific',
'US/Mountain',
'US/Central',
'US/Eastern']
used = set(AddAirportDialog.TIMEZONES)
us_timezones = [tz for tz in pytz.all_timezones
if 'US' in tz and tz not in used]
us_timezones.sort()
AddAirportDialog.TIMEZONES.append('')
AddAirportDialog.TIMEZONES += us_timezones
used.update(us_timezones)
other_timezones = [tz for tz in pytz.all_timezones
if tz not in used]
other_timezones.sort()
AddAirportDialog.TIMEZONES.append('')
AddAirportDialog.TIMEZONES += other_timezones
return AddAirportDialog.TIMEZONES
class AddAirportDialog(object):
TIMEZONES = None
def __init__(self, code, parent):
self.ui = wx.Dialog(parent)
self.ui.SetTitle('New Airport')
timezone_combo = wx.Choice(parent=self.ui,
choices=timezones())
self.ui.Bind(wx.EVT_CHOICE, self.on_timezone, timezone_combo)
self.timezone = ''
timezone_sizer = wx.BoxSizer(wx.HORIZONTAL)
timezone_sizer.Add(wx.StaticText(parent=self.ui, label='Timezone: '))
timezone_sizer.Add(timezone_combo, proportion=1)
timezone_sizer.Layout()
self.ok_button = wx.Button(parent=self.ui,
label='Create', id=wx.ID_OK)
self.ui.Bind(wx.EVT_BUTTON, self.on_ok, self.ok_button)
self.ok_button.Enable(False)
cancel_button = wx.Button(parent=self.ui, id=wx.ID_CANCEL)
self.ui.Bind(wx.EVT_BUTTON, self.on_cancel, cancel_button)
button_sizer = wx.StdDialogButtonSizer()
button_sizer.AddButton(self.ok_button)
button_sizer.AddButton(cancel_button)
button_sizer.Realize()
message = ("Airport %s isn't in the list of recognized airport " +
"codes. Create it?") % code
self.code = code
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(wx.StaticText(parent=self.ui, label=message),
flag=wx.ALL, border=5)
sizer.Add(timezone_sizer, flag=wx.ALL, border=5)
sizer.Add(button_sizer, flag=wx.ALL | wx.ALIGN_RIGHT, border=5)
self.ui.SetSizerAndFit(sizer)
def ask(self):
return self.ui.ShowModal()
def on_ok(self, dummy_event):
airport.add_airport(airport.Airport(self.code, self.timezone))
self.ui.EndModal(wx.ID_OK)
def on_cancel(self, dummy_event):
self.ui.EndModal(wx.ID_CANCEL)
def on_timezone(self, event):
self.timezone = event.GetString()
self.ok_button.Enable(self.timezone != '')
class EditDialog(wx.Dialog):
'''
Allows the user to edit a flight that's been entered.
'''
def __init__(self, tracked_flight, *args, **kwargs):
'''
Creates a new dialog box bound to the given flight. Extra
arguments are passed along to the wx.Dialog constructor.
'''
wx.Dialog.__init__(self, *args, **kwargs)
self.proxy_flight = ProxyFlight.from_flight(tracked_flight)
self.proxy_flight.add_listener(self)
self.updating_controls = False
self.updating_line = False
self.SetTitle('Edit flight')
sizer = wx.BoxSizer(wx.VERTICAL)
self.quick_field = wx.TextCtrl(parent=self)
self.quick_field.SetValue(self.proxy_flight.line)
self.Bind(wx.EVT_TEXT, self.QuickFieldChanged, self.quick_field)
controls_sizer = wx.FlexGridSizer(2, 6, 5, 5)
controls_sizer.Add(wx.StaticText(self, label='Departs'),
flag=wx.ALIGN_CENTER_VERTICAL)
self.departs_combo = wx.ComboBox(self,
value=self.proxy_flight.departs)
controls_sizer.Add(self.departs_combo)
controls_sizer.Add(wx.StaticText(self, label='on'),
flag=wx.ALIGN_CENTER_VERTICAL)
self.dept_date_picker = wx.DatePickerCtrl(self, style=wx.DP_DROPDOWN)
controls_sizer.Add(self.dept_date_picker)
controls_sizer.Add(wx.StaticText(self, label='at'),
flag=wx.ALIGN_CENTER_VERTICAL)
self.dept_time_picker = masked.TimeCtrl(self, format='24HHMM')
controls_sizer.Add(self.dept_time_picker)
controls_sizer.Add(wx.StaticText(self, label='Arrives'),
flag=wx.ALIGN_CENTER_VERTICAL)
self.arrives_combo = wx.ComboBox(self,
value=self.proxy_flight.arrives)
controls_sizer.Add(self.arrives_combo)
controls_sizer.Add(wx.StaticText(self, label='on'),
flag=wx.ALIGN_CENTER_VERTICAL)
self.arr_date_picker = wx.DatePickerCtrl(self, style=wx.DP_DROPDOWN)
controls_sizer.Add(self.arr_date_picker)
controls_sizer.Add(wx.StaticText(self, label='at'),
flag=wx.ALIGN_CENTER_VERTICAL)
self.arr_time_picker = masked.TimeCtrl(self, format='24HHMM')
controls_sizer.Add(self.arr_time_picker)
self.UpdateControls()
self.Bind(wx.EVT_TEXT, self.DepartsComboChanged, self.departs_combo)
self.Bind(wx.EVT_DATE_CHANGED, self.DeptDatePickerChanged,
self.dept_date_picker)
self.Bind(masked.EVT_TIMEUPDATE, self.DeptTimePickerChanged,
self.dept_time_picker)
self.Bind(wx.EVT_TEXT, self.ArrivesComboChanged, self.arrives_combo)
self.Bind(wx.EVT_DATE_CHANGED, self.ArrDatePickerChanged,
self.arr_date_picker)
self.Bind(masked.EVT_TIMEUPDATE, self.ArrTimePickerChanged,
self.arr_time_picker)
button_sizer = wx.StdDialogButtonSizer()
button = wx.Button(parent=self, id=wx.ID_OK)
button.SetDefault()
self.Bind(wx.EVT_BUTTON, self.OkButton, button)
button_sizer.AddButton(button)
button = wx.Button(parent=self, id=wx.ID_CANCEL)
button_sizer.AddButton(button)
self.Bind(wx.EVT_BUTTON, self.CancelButton, button)
button_sizer.Realize()
sizer.Add(self.quick_field, flag=wx.GROW | wx.ALL, border=5)
sizer.Add(controls_sizer, flag=wx.ALL, border=5)
sizer.Add(button_sizer, flag=wx.ALL | wx.ALIGN_RIGHT, border=5)
self.SetSizer(sizer)
sizer.Fit(self)
def OkButton(self, _evt):
def commit_action():
self.proxy_flight.assign()
if try_flight(commit_action, self):
self.GetParent().Refresh()
self.Destroy()
def CancelButton(self, _evt):
self.Destroy()
def ErrorMessageBox(self, title, message):
# TODO: refactor (duplicated in listview)
dialog = wx.MessageDialog(self, message, title, wx.OK)
dialog.ShowModal()
dialog.Destroy()
def DepartsComboChanged(self, _evt):
self.proxy_flight.departs = self.departs_combo.GetValue()
self.UpdateComboColors()
def DeptDatePickerChanged(self, _evt):
self.proxy_flight.set_dept_date(self.dept_date_picker.GetValue())
def DeptTimePickerChanged(self, _evt):
self.proxy_flight.set_dept_time(self.dept_time_picker.GetWxDateTime())
def ArrivesComboChanged(self, _evt):
self.proxy_flight.arrives = self.arrives_combo.GetValue()
self.UpdateComboColors()
def ArrDatePickerChanged(self, _evt):
self.proxy_flight.set_arr_date(self.arr_date_picker.GetValue())
def ArrTimePickerChanged(self, _evt):
self.proxy_flight.set_arr_time(self.arr_time_picker.GetWxDateTime())
def QuickFieldChanged(self, evt):
try:
self.proxy_flight.line = evt.GetString()
self.quick_field.SetBackgroundColour(wx.Colour(127, 255, 127))
except ValueError:
self.quick_field.SetBackgroundColour(wx.Colour(255, 127, 127))
self.quick_field.Refresh()
def on_update(self, event):
if event == ProxyFlight.LINE_CHANGED:
if not self.updating_controls:
self.updating_line = True
self.quick_field.SetValue(self.proxy_flight.line)
self.updating_line = False
elif event == ProxyFlight.COMPONENT_CHANGED:
if not self.updating_line:
self.updating_controls = True
self.UpdateControls()
self.updating_controls = False
def UpdateControls(self):
self.departs_combo.SetValue(self.proxy_flight.departs)
self.dept_date_picker.SetValue(self.proxy_flight.dept_time)
self.dept_time_picker.SetWxDateTime(self.proxy_flight.dept_time)
self.arrives_combo.SetValue(self.proxy_flight.arrives)
self.arr_date_picker.SetValue(self.proxy_flight.arr_time)
self.arr_time_picker.SetWxDateTime(self.proxy_flight.arr_time)
self.UpdateComboColors()
def UpdateComboColors(self):
try:
airport.get_airport(self.proxy_flight.departs)
except NoSuchAirportError:
self.departs_combo.SetBackgroundColour(wx.Colour(255, 255, 127))
else:
self.departs_combo.SetBackgroundColour(wx.WHITE)
self.departs_combo.Refresh()
try:
airport.get_airport(self.proxy_flight.arrives)
except NoSuchAirportError:
self.arrives_combo.SetBackgroundColour(wx.Colour(255, 255, 127))
else:
self.arrives_combo.SetBackgroundColour(wx.WHITE)
self.arrives_combo.Refresh()
def try_flight(action, ui_context):
while True:
try:
action()
except ValueError, err:
ui_context.ErrorMessageBox('Invalid flight string', err.message)
return False
except NoSuchAirportError, err:
result = AddAirportDialog(err.code, parent=ui_context).ask()
if result == wx.ID_CANCEL:
return False
else:
return True
|
futurulus/falcon
|
edit.py
|
Python
|
bsd-2-clause
| 14,558
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import xml.etree.ElementTree as ET
from indra.assemblers.pysb import PysbAssembler
import indra.assemblers.pysb.assembler as pa
from indra.assemblers.pysb.assembler import Policy, Param
from indra.assemblers.pysb.preassembler import PysbPreassembler
from indra.statements import *
from pysb import bng, WILD, Monomer, Annotation
from pysb.testing import with_model
from nose.tools import raises
def test_pysb_assembler_complex1():
member1 = Agent('BRAF')
member2 = Agent('MEK1')
stmt = Complex([member1, member2])
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 2
assert len(model.monomers) == 2
def test_pysb_assembler_complex2():
member1 = Agent('BRAF')
member2 = Agent('MEK1')
member3 = Agent('ERK1')
stmt = Complex([member1, member2, member3])
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 6
assert len(model.monomers) == 3
def test_pysb_assembler_complex3():
hras = Agent('HRAS')
member1 = Agent('BRAF', bound_conditions=[BoundCondition(hras, True)])
member2 = Agent('MEK1')
stmt = Complex([member1, member2])
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 2
assert len(model.monomers) == 3
def test_pysb_assembler_complex_twostep():
member1 = Agent('BRAF')
member2 = Agent('MEK1')
stmt = Complex([member1, member2])
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
assert len(model.rules) == 2
assert len(model.monomers) == 2
def test_pysb_assembler_complex_multiway():
member1 = Agent('BRAF')
member2 = Agent('MEK1')
member3 = Agent('ERK1')
stmt = Complex([member1, member2, member3])
pa = PysbAssembler([stmt])
model = pa.make_model(policies='multi_way')
assert len(model.rules) == 2
assert len(model.monomers) == 3
def test_pysb_assembler_actsub():
stmt = ActiveForm(Agent('BRAF', mutations=[MutCondition('600', 'V', 'E')]),
'activity', True)
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
assert len(model.rules) == 0
assert len(model.monomers) == 1
def test_pysb_assembler_phos_noenz():
enz = None
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 0
assert len(model.monomers) == 0
def test_pysb_assembler_dephos_noenz():
enz = None
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 0
assert len(model.monomers) == 0
def test_pysb_assembler_phos1():
enz = Agent('BRAF')
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_pysb_assembler_phos2():
hras = Agent('HRAS')
enz = Agent('BRAF', bound_conditions=[BoundCondition(hras, True)])
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 3
def test_pysb_assembler_phos3():
hras = Agent('HRAS')
erk1 = Agent('ERK1')
enz = Agent('BRAF', bound_conditions=[BoundCondition(hras, True)])
sub = Agent('MEK1', bound_conditions=[BoundCondition(erk1, True)])
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 4
def test_pysb_assembler_phos4():
hras = Agent('HRAS')
erk1 = Agent('ERK1')
enz = Agent('BRAF', bound_conditions=[BoundCondition(hras, True)])
sub = Agent('MEK1', bound_conditions=[BoundCondition(erk1, False)])
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 4
def test_pysb_assembler_autophos1():
enz = Agent('MEK1')
stmt = Autophosphorylation(enz, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 1
def test_pysb_assembler_autophos2():
raf1 = Agent('RAF1')
enz = Agent('MEK1', bound_conditions=[BoundCondition(raf1, True)])
stmt = Autophosphorylation(enz, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_pysb_assembler_autophos3():
egfr = Agent('EGFR')
enz = Agent('EGFR', bound_conditions=[BoundCondition(egfr, True)])
stmt = Autophosphorylation(enz, 'tyrosine')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 1
def test_pysb_assembler_transphos1():
egfr = Agent('EGFR')
enz = Agent('EGFR', bound_conditions=[BoundCondition(egfr, True)])
stmt = Transphosphorylation(enz, 'tyrosine')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 1
def test_pysb_assembler_act1():
egfr = Agent('EGFR')
subj = Agent('GRB2', bound_conditions=[BoundCondition(egfr, True)])
obj = Agent('SOS1')
stmt = Activation(subj, obj)
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 3
def test_pysb_assembler_dephos1():
phos = Agent('PP2A')
sub = Agent('MEK1')
stmt = Dephosphorylation(phos, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_pysb_assembler_dephos2():
phos = Agent('PP2A')
raf1 = Agent('RAF1')
sub = Agent('MEK1', bound_conditions=[BoundCondition(raf1, True)])
stmt = Dephosphorylation(phos, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 3
def test_pysb_assembler_gef1():
gef = Agent('SOS1')
ras = Agent('HRAS')
stmt = Gef(gef, ras)
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_pysb_assembler_gap1():
gap = Agent('NF1')
ras = Agent('HRAS')
stmt = Gap(gap, ras)
pa = PysbAssembler([stmt])
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_pysb_assembler_actmod1():
mek = Agent('MEK')
erk = Agent('ERK')
stmts = []
mc1 = ModCondition('phosphorylation', 'serine', '218')
mc2 = ModCondition('phosphorylation', 'serine', '222')
stmts.append(ActiveForm(Agent('MEK', mods=[mc1, mc2]), 'activity', True))
stmts.append(Phosphorylation(mek, erk, 'threonine', '185'))
stmts.append(Phosphorylation(mek, erk, 'tyrosine', '187'))
pa = PysbAssembler(stmts)
model = pa.make_model()
assert len(model.rules) == 2
assert len(model.monomers) == 2
model = pa.make_model(policies='two_step')
assert len(model.rules) == 5
def test_pysb_assembler_actmod2():
mek = Agent('MEK', activity=ActivityCondition('activity', True))
erk = Agent('ERK')
stmts = []
stmts.append(ActiveForm(Agent('MEK',
mods=[ModCondition('phosphorylation', 'serine', '218')]),
'activity', True))
stmts.append(ActiveForm(Agent('MEK',
mods=[ModCondition('phosphorylation', 'serine', '222')]),
'activity', True))
stmts.append(Phosphorylation(mek, erk, 'threonine', '185'))
stmts.append(Phosphorylation(mek, erk, 'tyrosine', '187'))
pa = PysbAssembler(stmts)
model = pa.make_model()
assert len(model.rules) == 4
assert len(model.monomers) == 2
model = pa.make_model(policies='two_step')
assert len(model.rules) == 9
def test_pysb_assembler_phos_twostep1():
enz = Agent('BRAF')
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
assert len(model.rules) == 3
assert len(model.monomers) == 2
def test_pysb_assembler_twostep_mixed():
member1 = Agent('BRAF')
member2 = Agent('RAF1')
st1 = Complex([member1, member2])
st2 = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st1, st2])
pa.make_model(policies='two_step')
assert len(pa.model.rules) == 5
assert len(pa.model.monomers) == 4
def test_pysb_assembler_phos_twostep_local():
enz = Agent('BRAF')
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
assert len(model.rules) == 3
assert len(model.monomers) == 2
def test_pysb_assembler_phos_twostep_local_to_global():
enz = Agent('BRAF')
sub = Agent('MEK1')
stmt = Phosphorylation(enz, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
# This call should have reverted to default policy
model = pa.make_model()
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_pysb_assembler_dephos_twostep1():
phos = Agent('PP2A')
sub = Agent('MEK1')
stmt = Dephosphorylation(phos, sub, 'serine', '222')
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
assert len(model.rules) == 3
assert len(model.monomers) == 2
def test_statement_specific_policies():
enz = Agent('BRAF')
sub = Agent('MEK1')
phos = Agent('PP2A')
stmt1 = Phosphorylation(enz, sub, 'serine', '222')
stmt2 = Dephosphorylation(phos, sub, 'serine', '222')
policies = {'Phosphorylation': 'two_step',
'Dephosphorylation': 'interactions_only'}
pa = PysbAssembler([stmt1, stmt2])
model = pa.make_model(policies=policies)
assert len(model.rules) == 4
assert len(model.monomers) == 3
def test_unspecified_statement_policies():
enz = Agent('BRAF')
sub = Agent('MEK1')
phos = Agent('PP2A')
stmt1 = Phosphorylation(enz, sub, 'serine', '222')
stmt2 = Dephosphorylation(phos, sub, 'serine', '222')
policies = {'Phosphorylation': 'two_step',
'other': 'interactions_only'}
pa = PysbAssembler([stmt1, stmt2])
model = pa.make_model(policies=policies)
assert len(model.rules) == 4
assert len(model.monomers) == 3
def test_activity_activity():
subj = Agent('KRAS')
obj = Agent('BRAF')
stmt = Activation(subj, obj)
pa = PysbAssembler([stmt])
model = pa.make_model(policies='interactions_only')
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_activity_activity2():
subj = Agent('KRAS')
obj = Agent('BRAF')
stmt = Activation(subj, obj)
pa = PysbAssembler([stmt])
model = pa.make_model(policies='one_step')
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_activity_activity2():
subj = Agent('Vemurafenib')
obj = Agent('BRAF')
stmt = Inhibition(subj, obj)
pa = PysbAssembler([stmt])
model = pa.make_model(policies='interactions_only')
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_activity_activity3():
subj = Agent('Vemurafenib')
obj = Agent('BRAF')
stmt = Inhibition(subj, obj)
pa = PysbAssembler([stmt])
model = pa.make_model(policies='one_step')
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_rule_name_str_1():
s = pa.get_agent_rule_str(Agent('BRAF'))
assert s == 'BRAF'
def test_rule_name_str_2():
a = Agent('GRB2',
bound_conditions=[BoundCondition(Agent('EGFR'), True)])
s = pa.get_agent_rule_str(a)
assert s == 'GRB2_EGFR'
def test_rule_name_str_3():
a = Agent('GRB2',
bound_conditions=[BoundCondition(Agent('EGFR'), False)])
s = pa.get_agent_rule_str(a)
assert s == 'GRB2_nEGFR'
def test_rule_name_str_4():
a = Agent('BRAF', mods=[ModCondition('phosphorylation', 'serine')])
s = pa.get_agent_rule_str(a)
assert s == 'BRAF_phosphoS'
def test_rule_name_str_5():
a = Agent('BRAF', mods=[ModCondition('phosphorylation', 'serine', '123')])
s = pa.get_agent_rule_str(a)
assert s == 'BRAF_phosphoS123'
def test_neg_act_mod():
mc = ModCondition('phosphorylation', 'serine', '123', False)
st1 = ActiveForm(Agent('BRAF', mods=[mc]), 'activity', True)
braf = Agent('BRAF', activity=ActivityCondition('active', True))
st2 = Phosphorylation(braf, Agent('MAP2K2'))
pa = PysbAssembler([st1, st2])
pa.make_model(policies='one_step')
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'S123': ('u', WILD)}
def test_pos_agent_mod():
mc = ModCondition('phosphorylation', 'serine', '123', True)
st = Phosphorylation(Agent('BRAF', mods=[mc]), Agent('MAP2K2'))
pa = PysbAssembler([st])
pa.make_model(policies='one_step')
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'S123': ('p', WILD)}
def test_neg_agent_mod():
mc = ModCondition('phosphorylation', 'serine', '123', False)
st = Phosphorylation(Agent('BRAF', mods=[mc]), Agent('MAP2K2'))
pa = PysbAssembler([st])
pa.make_model(policies='one_step')
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'S123': ('u', WILD)}
def test_mut():
mut = MutCondition('600', 'V', 'E')
st = Phosphorylation(Agent('BRAF', mutations=[mut]), Agent('MEK'))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'V600': 'E'}
def test_mut_missing1():
mut = MutCondition('600', 'V', None)
st = Phosphorylation(Agent('BRAF', mutations=[mut]), Agent('MEK'))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'V600': 'X'}
def test_mut_missing2():
mut = MutCondition('600', None, 'E')
st = Phosphorylation(Agent('BRAF', mutations=[mut]), Agent('MEK'))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'mut600': 'E'}
def test_mut_missing3():
mut = MutCondition(None, 'V', 'E')
st = Phosphorylation(Agent('BRAF', mutations=[mut]), Agent('MEK'))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'V': 'E'}
def test_mut_missing4():
mut = MutCondition(None, None, None)
st = Phosphorylation(Agent('BRAF', mutations=[mut]), Agent('MEK'))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.monomer.name == 'BRAF'
assert braf.site_conditions == {'mut': 'X'}
def test_agent_loc():
st = Phosphorylation(Agent('BRAF', location='cytoplasm'), Agent('MEK'))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
braf = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert braf.site_conditions == {'loc': 'cytoplasm'}
def test_translocation():
st = Translocation(Agent('FOXO3A'), 'nucleus', 'cytoplasm')
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
f1 = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert f1.site_conditions == {'loc': 'nucleus'}
f2 = r.product_pattern.complex_patterns[0].monomer_patterns[0]
assert f2.site_conditions == {'loc': 'cytoplasm'}
assert r.rate_forward.name == 'kf_foxo3a_nucleus_cytoplasm_1'
def test_translocation_to():
st = Translocation(Agent('FOXO3A'), None, 'nucleus')
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
f1 = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert f1.site_conditions == {'loc': 'cytoplasm'}
f2 = r.product_pattern.complex_patterns[0].monomer_patterns[0]
assert f2.site_conditions == {'loc': 'nucleus'}
assert r.rate_forward.name == 'kf_foxo3a_cytoplasm_nucleus_1'
def test_phos_atpdep():
st = Phosphorylation(Agent('BRAF'), Agent('MEK'), 'S', '222')
pa = PysbAssembler([st])
pa.make_model(policies='atp_dependent')
assert len(pa.model.rules) == 5
def test_set_context():
st = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st])
pa.make_model()
assert pa.model.parameters['MAP2K1_0'].value == pa.default_initial_amount
assert pa.model.parameters['MAPK3_0'].value == pa.default_initial_amount
pa.set_context('A375_SKIN')
assert pa.model.parameters['MAP2K1_0'].value > 10000
assert pa.model.parameters['MAPK3_0'].value > 10000
def test_set_context_monomer_notfound():
st = Phosphorylation(Agent('MAP2K1'), Agent('XYZ'))
pa = PysbAssembler([st])
pa.make_model()
assert pa.model.parameters['MAP2K1_0'].value == pa.default_initial_amount
assert pa.model.parameters['XYZ_0'].value == pa.default_initial_amount
pa.add_default_initial_conditions(100)
assert pa.model.parameters['MAP2K1_0'].value == 100
assert pa.model.parameters['XYZ_0'].value == 100
pa.set_context('A375_SKIN')
assert pa.model.parameters['MAP2K1_0'].value > 10000
assert pa.model.parameters['XYZ_0'].value == pa.default_initial_amount
def test_set_context_celltype_notfound():
st = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st])
pa.make_model()
pa.set_context('XYZ')
def test_annotation():
st = Phosphorylation(Agent('BRAF', db_refs = {'UP': 'P15056'}),
Agent('MAP2K2', db_refs = {'HGNC': '6842'}))
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.annotations) == 5
def test_annotation_regamount():
st1 = IncreaseAmount(Agent('BRAF', db_refs = {'UP': 'P15056'}),
Agent('MAP2K2', db_refs = {'HGNC': '6842'}))
st2 = DecreaseAmount(Agent('BRAF', db_refs = {'UP': 'P15056'}),
Agent('MAP2K2', db_refs = {'HGNC': '6842'}))
pa = PysbAssembler([st1, st2])
pa.make_model()
assert len(pa.model.annotations) == 8
def test_print_model():
st = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st])
pa.make_model()
pa.save_model('/dev/null')
def test_save_rst():
st = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st])
pa.make_model()
pa.save_rst('/dev/null')
def test_export_model():
st = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st])
pa.make_model()
exp_str = pa.export_model('kappa')
assert exp_str
exp_str = pa.export_model('bngl')
assert exp_str
exp_str = pa.export_model('sbml', file_name='/dev/null')
assert exp_str
def test_assemble_export_sbgn():
# Add various statements to test their assembly
st = Phosphorylation(Agent('BRAF'), Agent('MAP2K1'))
mc = ModCondition('phosphorylation', None, None, True)
st2 = Activation(Agent('MAP2K1', mods=[mc]), Agent('MAPK1'))
st3 = Complex([Agent('MAPK1'), Agent('DUSP6')])
st4 = DecreaseAmount(None, Agent('DUSP6'))
pa = PysbAssembler([st, st2, st3, st4])
pa.make_model()
# Export to SBGN
model = pa.export_model('sbgn')
assert model is not None
# Test that the right elements are there in the result
et = ET.fromstring(model)
from indra.assemblers.sbgn.assembler import sbgn_ns
sbgn_nss = {'s': sbgn_ns}
glyphs = et.findall('s:map/s:glyph', namespaces=sbgn_nss)
glyph_classes = [g.attrib.get('class') for g in glyphs]
assert glyph_classes.count('macromolecule') == 6
assert glyph_classes.count('complex') == 2
assert glyph_classes.count('process') == 10
return pa
def test_name_standardize():
n = pa._n('.*/- ^&#@$')
assert isinstance(n, str)
assert n == '__________'
n = pa._n('14-3-3')
assert isinstance(n, str)
assert n == 'p14_3_3'
n = pa._n('\U0001F4A9bar')
assert isinstance(n, str)
assert n == 'bar'
def test_generate_equations():
st = Phosphorylation(Agent('MAP2K1'), Agent('MAPK3'))
pa = PysbAssembler([st])
pa.make_model()
bng.generate_equations(pa.model)
def test_non_python_name_phos():
st = Phosphorylation(Agent('14-3-3'), Agent('BRAF kinase'))
pa = PysbAssembler([st])
pa.make_model()
names = [m.name for m in pa.model.monomers]
assert 'BRAF_kinase' in names
assert 'p14_3_3' in names
bng.generate_equations(pa.model)
def test_non_python_name_bind():
st = Complex([Agent('14-3-3'), Agent('BRAF kinase')])
pa = PysbAssembler([st])
pa.make_model()
bng.generate_equations(pa.model)
def test_decreaseamount_one_step():
subj = Agent('KRAS')
obj = Agent('BRAF')
st1 = DecreaseAmount(subj, obj)
st2 = DecreaseAmount(None, obj)
pa = PysbAssembler([st1, st2])
model = pa.make_model(policies='one_step')
assert len(model.rules) == 2
assert len(model.monomers) == 2
def test_decreaseamount_interactions_only():
subj = Agent('KRAS')
obj = Agent('BRAF')
st1 = DecreaseAmount(subj, obj)
st2 = DecreaseAmount(None, obj)
pa = PysbAssembler([st1, st2])
model = pa.make_model(policies='interactions_only')
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_increaseamount_one_step():
subj = Agent('KRAS')
obj = Agent('BRAF')
st1 = IncreaseAmount(subj, obj)
st2 = IncreaseAmount(None, obj)
pa = PysbAssembler([st1, st2])
model = pa.make_model(policies='one_step')
assert len(model.rules) == 2
assert len(model.monomers) == 2
def test_increaseamount_interactions_only():
subj = Agent('KRAS')
obj = Agent('BRAF')
st1 = IncreaseAmount(subj, obj)
st2 = IncreaseAmount(None, obj)
pa = PysbAssembler([st1, st2])
model = pa.make_model(policies='interactions_only')
assert len(model.rules) == 1
assert len(model.monomers) == 2
def test_missing_catalytic_default_site():
c8 = Agent('CASP8', activity=ActivityCondition('catalytic', True))
c3 = Agent('CASP3')
stmt = Activation(c8, c3, 'catalytic')
# Interactions only
pa = PysbAssembler([stmt])
model = pa.make_model(policies='interactions_only')
# One step
pa = PysbAssembler([stmt])
model = pa.make_model(policies='one_step')
# Two step
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
def test_missing_transcription_default_site():
p53 = Agent('TP53', activity=ActivityCondition('transcription', True))
bax = Agent('BAX')
stmt = Activation(p53, bax)
# Interactions only
pa = PysbAssembler([stmt])
model = pa.make_model(policies='interactions_only')
# One step
pa = PysbAssembler([stmt])
model = pa.make_model(policies='one_step')
# Two step
pa = PysbAssembler([stmt])
model = pa.make_model(policies='two_step')
def test_translocation_loc_special_char():
st = Translocation(Agent('KSR1'), 'cytoplasm', 'cell surface')
pa = PysbAssembler([st])
pa.make_model()
assert len(pa.model.rules) == 1
r = pa.model.rules[0]
f1 = r.reactant_pattern.complex_patterns[0].monomer_patterns[0]
assert f1.site_conditions == {'loc': 'cytoplasm'}
f2 = r.product_pattern.complex_patterns[0].monomer_patterns[0]
assert f2.site_conditions == {'loc': 'cell_surface'}
assert r.rate_forward.name == 'kf_ksr1_cytoplasm_cell_surface_1'
def test_parse_identifiers_url():
url1 = 'http://identifiers.org/foo/bar'
url2 = 'http://identifiers.org/hgnc/12345'
url3 = 'http://identifiers.org/hgnc/HGNC:12345'
url4 = 'http://identifiers.org/uniprot/12345'
url5 = 'http://identifiers.org/chebi/12345'
url6 = 'http://identifiers.org/interpro/12345'
url7 = 'http://identifiers.org/pfam/12345'
(ns, id) = pa.parse_identifiers_url(url1)
assert ns is None and id is None
(ns, id) = pa.parse_identifiers_url(url2)
assert ns is None and id is None
(ns, id) = pa.parse_identifiers_url(url3)
assert ns == 'HGNC' and id == '12345'
(ns, id) = pa.parse_identifiers_url(url4)
assert ns == 'UP' and id == '12345'
(ns, id) = pa.parse_identifiers_url(url5)
assert ns == 'CHEBI' and id == '12345'
(ns, id) = pa.parse_identifiers_url(url6)
assert ns == 'IP' and id == '12345'
(ns, id) = pa.parse_identifiers_url(url7)
assert ns == 'XFAM' and id == '12345'
@with_model
def test_get_mp_with_grounding():
foo = Agent('Foo', db_refs={'HGNC': 'foo'})
a = Agent('A', db_refs={'HGNC': '6840'})
b = Agent('B', db_refs={'HGNC': '6871'})
Monomer('A_monomer')
Monomer('B_monomer')
Annotation(A_monomer, 'http://identifiers.org/hgnc/HGNC:6840')
Annotation(B_monomer, 'http://identifiers.org/hgnc/HGNC:6871')
mps = list(pa.grounded_monomer_patterns(model, foo))
assert len(mps) == 0
mps = list(pa.grounded_monomer_patterns(model, a))
assert len(mps) == 1
assert mps[0].monomer == A_monomer
mps = list(pa.grounded_monomer_patterns(model, b))
assert len(mps) == 1
assert mps[0].monomer == B_monomer
@with_model
def test_get_mp_with_grounding_2():
a1 = Agent('A', mods=[ModCondition('phosphorylation', None, None)],
db_refs={'HGNC': '6840'})
a2 = Agent('A', mods=[ModCondition('phosphorylation', 'Y', '187')],
db_refs={'HGNC': '6840'})
Monomer('A_monomer', ['phospho', 'T185', 'Y187'],
{'phospho': 'y', 'T185':['u', 'p'], 'Y187':['u','p']})
Annotation(A_monomer, 'http://identifiers.org/hgnc/HGNC:6840')
A_monomer.site_annotations = [
Annotation(('phospho', 'y'), 'phosphorylation', 'is_modification'),
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation(('Y187', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
Annotation('Y187', 'Y', 'is_residue'),
Annotation('Y187', '187', 'is_position')
]
mps_1 = list(pa.grounded_monomer_patterns(model, a1))
assert len(mps_1) == 3
mps_2 = list(pa.grounded_monomer_patterns(model, a2))
assert len(mps_2) == 1
mp = mps_2[0]
assert mp.monomer == A_monomer
assert mp.site_conditions == {'Y187': ('p', WILD)}
# TODO Add test for unmodified agent!
# TODO Add test involving multiple (possibly degenerate) modifications!
# TODO Add test for generic double phosphorylation
def test_phospho_assemble_grounding():
a = Agent('MEK1', db_refs={'HGNC': '6840'})
b = Agent('ERK2', db_refs={'HGNC': '6871'})
b_phos = Agent('Foo', mods=[ModCondition('phosphorylation', None, None)],
db_refs={'HGNC': '6871'})
st1 = Phosphorylation(a, b, 'T', '185')
# One step
def check_policy(policy):
pysb_asmb = pa.PysbAssembler([st1])
model = pysb_asmb.make_model(policies=policy)
mps = list(pa.grounded_monomer_patterns(model, b_phos))
assert len(mps) == 1
assert mps[0].monomer.name == 'ERK2'
assert mps[0].site_conditions == {'T185': ('p', WILD)}
for policy in ('one_step', 'interactions_only', 'two_step',
'atp_dependent'):
check_policy(policy)
def test_phospho_mod_grounding():
a = Agent('MEK1', mods=[ModCondition('phosphorylation', 'S', '218'),
ModCondition('phosphorylation', 'S', '222')],
db_refs={'HGNC': '6840'})
b = Agent('ERK2', db_refs={'HGNC': '6871'})
a_phos = Agent('Foo', mods=[ModCondition('phosphorylation', None, None)],
db_refs={'HGNC': '6840'})
st1 = Phosphorylation(a, b, 'T', '185')
pysb_asmb = pa.PysbAssembler([st1])
model = pysb_asmb.make_model(policies='one_step')
mps = list(pa.grounded_monomer_patterns(model, a_phos))
assert len(mps) == 2
assert mps[0].monomer.name == 'MEK1'
assert mps[1].monomer.name == 'MEK1'
sc = [mp.site_conditions for mp in mps]
assert {'S218': ('p', WILD)} in sc
assert {'S222': ('p', WILD)} in sc
# Check if we get the doubly phosphorylated MonomerPattern
mps = list(pa.grounded_monomer_patterns(model, a))
assert len(mps) == 1
assert mps[0].monomer.name == 'MEK1'
assert mps[0].site_conditions == {'S218': ('p', WILD),
'S222': ('p', WILD)}
def test_multiple_grounding_mods():
mek = Agent('MEK1', db_refs={'HGNC': '6840'})
erk = Agent('ERK2', db_refs={'HGNC': '6871'})
cbl = Agent('CBL', db_refs={'HGNC': '1541'})
ub_phos_erk = Agent('ERK2',
mods=[ModCondition('phosphorylation', None, None),
ModCondition('ubiquitination', None, None)],
db_refs={'HGNC': '6871'})
st1 = Phosphorylation(mek, erk, 'T', '185')
st2 = Phosphorylation(mek, erk, 'Y', '187')
st3 = Ubiquitination(cbl, erk, 'K', '40')
st4 = Ubiquitination(cbl, erk, 'K', '50')
pysb_asmb = pa.PysbAssembler([st1, st2, st3, st4])
model = pysb_asmb.make_model(policies='one_step')
mps = list(pa.grounded_monomer_patterns(model, ub_phos_erk))
assert len(mps) == 4
assert mps[0].monomer.name == 'ERK2'
assert mps[1].monomer.name == 'ERK2'
assert mps[2].monomer.name == 'ERK2'
assert mps[3].monomer.name == 'ERK2'
def test_grounded_active_pattern():
a = Agent('A', db_refs={'HGNC': '1234'})
b = Agent('B', db_refs={'HGNC': '5678'})
b_phos = Agent('B', mods=[ModCondition('phosphorylation', 'S', '100')],
db_refs={'HGNC': '5678'})
b_act = Agent('B', activity=ActivityCondition('activity', True),
db_refs={'HGNC': '5678'})
st1 = Phosphorylation(a, b, 'S', '100')
st2 = ActiveForm(b_phos, 'activity', True)
pysba = PysbAssembler([st1, st2])
model = pysba.make_model(policies='one_step')
mps = list(pa.grounded_monomer_patterns(model, b_act))
def _check_mod_assembly(mod_class):
subj = Agent('KRAS')
obj = Agent('BRAF')
st1 = mod_class(subj, obj)
pa = PysbAssembler([st1])
model = pa.make_model(policies='interactions_only')
assert len(model.rules) == 1
assert len(model.monomers) == 2
pa = PysbAssembler([st1])
model = pa.make_model(policies='one_step')
assert len(model.rules) == 1
assert len(model.monomers) == 2
pa = PysbAssembler([st1])
model = pa.make_model(policies='two_step')
assert len(model.rules) == 3
assert len(model.monomers) == 2
def test_modification_assembly():
classes = AddModification.__subclasses__() + \
RemoveModification.__subclasses__()
for mod_class in classes:
_check_mod_assembly(mod_class)
def test_rule_annotation():
a = Agent('A', db_refs={'HGNC': '1234'})
b = Agent('B', db_refs={'HGNC': '5678'})
def check_rule_annotation(stmt, policy):
pa = PysbAssembler([stmt])
model = pa.make_model(policies=policy)
subj = [ann.object for ann in model.annotations
if ann.predicate == 'rule_has_subject']
obj = [ann.object for ann in model.annotations
if ann.predicate == 'rule_has_object']
assert len(subj) == 1
assert subj[0] == 'A'
assert len(obj) == 1
assert obj[0] == 'B'
classes = AddModification.__subclasses__() + \
RemoveModification.__subclasses__()
for mod_class in classes:
stmt = mod_class(a, b)
check_rule_annotation(stmt, 'one_step')
check_rule_annotation(stmt, 'two_step')
# Check ATP dependent phosphorylation
stmt = Phosphorylation(a, b)
check_rule_annotation(stmt, 'atp_dependent')
stmt = Activation(a, b)
check_rule_annotation(stmt, 'one_step')
#Skip Autophosphorylation and Transphosphorylation for now
#Gef
#Gap
def test_activeform_site():
a = Agent('A', db_refs={'HGNC': '1234'})
b = Agent('B', db_refs={'HGNC': '5678'})
b_phos = Agent('B', mods=[ModCondition('phosphorylation', 'Y', '200')],
db_refs={'HGNC': '5678'})
st1 = Phosphorylation(a, b, 'S', '100')
st2 = ActiveForm(b_phos, 'kinase', True)
pa = PysbAssembler([st1, st2])
model = pa.make_model(policies='one_step')
# TODO Do the same for mutation condition
# TODO Localization condition
# TODO Bound condition
# TODO Unphosphorylated/unmodified forms (try ubiquitinated/acetylated lysine)
def test_activation_subj1():
"""No subject activity is defined."""
st = Activation(Agent('a'), Agent('b'))
pa = PysbAssembler([st])
pa.make_model()
assert pa.model.monomers['a'].sites == []
left = pa.model.rules[0].reactant_pattern
subj_left = left.complex_patterns[0].monomer_patterns[0]
right = pa.model.rules[0].product_pattern
subj_right = right.complex_patterns[0].monomer_patterns[0]
assert subj_left.site_conditions == {}
assert subj_right.site_conditions == {}
def test_activation_subj2():
"""Subject activity is defined explicitly."""
a_act = Agent('a', activity=ActivityCondition('activity', True))
st = Activation(a_act, Agent('b'))
st2 = ActiveForm(Agent('a', mods=[ModCondition('phosphorylation')]),
'activity', True)
pa = PysbAssembler([st, st2])
pa.make_model()
assert pa.model.monomers['a'].sites == ['phospho']
left = pa.model.rules[0].reactant_pattern
subj_left = left.complex_patterns[0].monomer_patterns[0]
right = pa.model.rules[0].product_pattern
subj_right = right.complex_patterns[0].monomer_patterns[0]
assert subj_left.site_conditions == {u'phospho': (u'p', WILD)}
assert subj_right.site_conditions == {u'phospho': (u'p', WILD)}
def test_activation_subj3():
"""Subject activity is defined implicitly by another statement."""
a_act = Agent('a', activity=ActivityCondition('activity', True))
st = Activation(a_act, Agent('b'))
st2 = Activation(Agent('c'), Agent('a'))
pa = PysbAssembler([st, st2])
pa.make_model()
assert len(pa.model.rules) == 2
assert pa.model.monomers['a'].sites == ['activity']
left = pa.model.rules[0].reactant_pattern
subj_left = left.complex_patterns[0].monomer_patterns[0]
right = pa.model.rules[0].product_pattern
subj_right = right.complex_patterns[0].monomer_patterns[0]
assert subj_left.site_conditions == {u'activity': (u'active')}
assert subj_right.site_conditions == {u'activity': (u'active')}
def test_activation_subj4():
"""Subject activity is defined both explicitly and implicitly."""
a_act = Agent('a', activity=ActivityCondition('activity', True))
st = Activation(a_act, Agent('b'))
st2 = Activation(Agent('c'), Agent('a'))
st3 = ActiveForm(Agent('a', mods=[ModCondition('phosphorylation')]),
'activity', True)
pa = PysbAssembler([st, st2, st3])
pa.make_model()
assert set(pa.model.monomers['a'].sites) == set(['activity', 'phospho'])
left = pa.model.rules[0].reactant_pattern
subj_left = left.complex_patterns[0].monomer_patterns[0]
right = pa.model.rules[0].product_pattern
subj_right = right.complex_patterns[0].monomer_patterns[0]
assert subj_left.site_conditions == {u'phospho': (u'p', WILD)}
assert subj_right.site_conditions == {u'phospho': (u'p', WILD)}
def test_pysb_preassembler_replace_activities1():
st1 = ActiveForm(Agent('a', location='nucleus'), 'activity', True)
st2 = Phosphorylation(Agent('a',
activity=ActivityCondition('activity', True)),
Agent('b'))
ppa = PysbPreassembler([st1, st2])
ppa.replace_activities()
assert len(ppa.statements) == 2
assert ppa.statements[1].enz.location == 'nucleus'
def test_pysb_preassembler_replace_activities2():
a_act = Agent('a', activity=ActivityCondition('activity', True))
st = Activation(a_act, Agent('b'))
st2 = Activation(Agent('c'), Agent('a'))
ppa = PysbPreassembler([st, st2])
ppa.replace_activities()
assert len(ppa.statements) == 2
def test_pysb_preassembler_replace_activities3():
p = Agent('PPP2CA')
bc = BoundCondition(p, False)
erk = Agent('ERK')
mek1 = Agent('MEK', mods=[ModCondition('phosphorylation',
None, None, True)])
mek2 = Agent('MEK', activity=ActivityCondition('activity', True),
bound_conditions=[bc])
st2 = ActiveForm(mek1, 'activity', True)
st1 = Phosphorylation(mek2, erk)
ppa = PysbPreassembler([st1, st2])
ppa.replace_activities()
assert len(ppa.statements) == 2
assert ppa.statements[0].enz.mods
assert ppa.statements[0].enz.bound_conditions
def test_phos_michaelis_menten():
stmt = Phosphorylation(Agent('MEK'), Agent('ERK'))
pa = PysbAssembler([stmt])
pa.make_model(policies='michaelis_menten')
assert len(pa.model.parameters) == 4
def test_deubiq_michaelis_menten():
stmt = Deubiquitination(Agent('MEK'), Agent('ERK'))
pa = PysbAssembler([stmt])
pa.make_model(policies='michaelis_menten')
assert len(pa.model.parameters) == 4
def test_act_michaelis_menten():
stmt = Activation(Agent('MEK'), Agent('ERK'))
stmt2 = Inhibition(Agent('DUSP'), Agent('ERK'))
pa = PysbAssembler([stmt, stmt2])
pa.make_model(policies='michaelis_menten')
assert len(pa.model.parameters) == 7
def test_increaseamount_hill():
stmt = IncreaseAmount(Agent('TP53'), Agent('MDM2'))
pa = PysbAssembler([stmt])
pa.make_model(policies='hill')
pa.save_model()
assert len(pa.model.parameters) == 5
def test_convert_nosubj():
stmt = Conversion(None, [Agent('PIP2')], [Agent('PIP3')])
pa = PysbAssembler([stmt])
pa.make_model()
assert len(pa.model.parameters) == 3
assert len(pa.model.rules) == 1
assert len(pa.model.monomers) == 2
def test_convert_subj():
stmt = Conversion(Agent('PIK3CA'), [Agent('PIP2')], [Agent('PIP3')])
pa = PysbAssembler([stmt])
pa.make_model()
assert len(pa.model.parameters) == 4
assert len(pa.model.rules) == 1
assert len(pa.model.monomers) == 3
def test_activity_agent_rule_name():
stmt = Phosphorylation(Agent('BRAF',
activity=ActivityCondition('kinase',
True)),
Agent('MAP2K1',
activity=ActivityCondition('activity',
False)))
pa = PysbAssembler([stmt])
pa.make_model()
assert pa.model.rules[0].name == \
'BRAF_kin_phosphorylation_MAP2K1_act_inact_phospho', \
pa.model.rules[0].name
def test_policy_object():
stmt = Phosphorylation(Agent('a'), Agent('b'))
pa = PysbAssembler([stmt])
pol = Policy('two_step')
model = pa.make_model(policies={stmt.uuid: pol})
assert len(model.rules) == 3
assert str(pol) == 'Policy(two_step)'
def test_policy_parameters():
pol = Policy('two_step', parameters={'kf': Param('a', 1.0),
'kr': Param('b', 2.0),
'kc': Param('c', 3.0)})
# Make sure we can correctly stringify here
assert str(pol)
stmt = Deubiquitination(Agent('a'), Agent('b'))
pa = PysbAssembler([stmt])
model = pa.make_model(policies={stmt.uuid: pol})
assert model.parameters['c'].value == 3.0
@raises(pa.UnknownPolicyError)
def test_policy_object_invalid():
stmt = Phosphorylation(Agent('a'), Agent('b'))
pa = PysbAssembler([stmt])
model = pa.make_model(policies={'xyz': Policy('two_step')})
assert len(model.rules) == 3
def test_mod_parameter():
stmt = Phosphorylation(Agent('a'), Agent('b'))
pol = Policy('one_step', parameters={'kf': Param('my_kf_param', 0.99)})
pa = PysbAssembler([stmt])
model = pa.make_model(policies={stmt.uuid: pol})
assert model.parameters['my_kf_param'].value == 0.99
def test_policy_multiple():
pol1 = Policy('michaelis_menten', parameters={'Km': Param('my_Km', 1.0),
'kc': Param('my_kc', 1e-1)})
pol2 = Policy('one_step', parameters={'kf': Param('d', 10.0)})
stmt1 = Inhibition(Agent('a'), Agent('b'))
stmt2 = Translocation(Agent('a'), 'cytoplasm', 'nucleus')
pa = PysbAssembler([stmt1, stmt2])
model = pa.make_model(policies={stmt1.uuid: pol1,
stmt2.uuid: pol2})
assert model.parameters['d'].value == 10.0
print(model.expressions['a_deactivates_b_activity_rate'])
print(model.rules)
def test_kappa_im_export():
stmts = [Activation(Agent('a'), Agent('b')),
Activation(Agent('b',
activity=ActivityCondition('activity', True)),
Agent('c'))]
pa = PysbAssembler(stmts)
pa.make_model()
graph = pa.export_model('kappa_im', '/dev/null')
assert len(graph.nodes) == 2
assert len(graph.edges) == 1
def test_kappa_cm_export():
stmts = [Complex([Agent('a'), Agent('b')])]
pa = PysbAssembler(stmts)
pa.make_model()
graph = pa.export_model('kappa_cm', '/dev/null')
assert len(graph.nodes()) == 2
assert len(graph.edges()) == 1
|
pvtodorov/indra
|
indra/tests/test_pysb_assembler.py
|
Python
|
bsd-2-clause
| 42,841
|
"""
To flash the msp430 board, the following program can be used:
http://www.ti.com/tool/msp430-flasher
"""
from .arch import Msp430Arch
__all__ = ["Msp430Arch"]
|
windelbouwman/ppci-mirror
|
ppci/arch/msp430/__init__.py
|
Python
|
bsd-2-clause
| 166
|
#!/usr/bin/env python
__author__ = 'Casey Bryant'
from nose.plugins.attrib import attr
from pyon.datastore.datastore import DatastoreManager, DatastoreFactory
from pyon.core.bootstrap import CFG
from pyon.util.int_test import IonIntegrationTestCase
@attr('INT', group='datastore')
class TestCoverage(IonIntegrationTestCase):
def setUp(self):
self._start_container()
def test_coverage(self):
datastore = DatastoreFactory.get_datastore(datastore_name='coverage', config=CFG)
table_name = datastore._get_datastore_name()
delete_statement = ''.join(['DELETE FROM ', table_name, " WHERE id='test'"])
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(delete_statement)
statement = ''.join(['INSERT into ', table_name, " (id, name) VALUES ('test', 'insert')"])
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(statement)
statement = ''.join(['SELECT id, name FROM ', table_name, " WHERE id='test'"])
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(statement)
self.assertGreater(cur.rowcount, 0)
row = cur.fetchone()
self.assertEqual(row[0], 'test')
self.assertEqual(row[1], 'insert')
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(delete_statement)
def test_spans(self):
datastore = DatastoreFactory.get_datastore(datastore_name='coverage_spans', config=CFG)
table_name = datastore._get_datastore_name()
delete_statement = ''.join(['DELETE FROM ', table_name, " WHERE span_address='test'"])
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(delete_statement)
statement = ''.join(['INSERT into ', table_name, " (span_address, coverage_id, vertical_range) ",
"VALUES ('test', 'cov_1', '[1.0, 2.2]')"])
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(statement)
statement = ''.join(['SELECT span_address, coverage_id, lower(vertical_range), upper(vertical_range) FROM ',
table_name, " WHERE span_address='test'"])
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(statement)
self.assertGreater(cur.rowcount, 0)
row = cur.fetchone()
self.assertEqual(row[0], 'test')
self.assertEqual(row[1], 'cov_1')
self.assertEqual(float(row[2]), float(1.0))
self.assertEqual(float(row[3]), float(2.2))
with datastore.pool.cursor(**datastore.cursor_args) as cur:
cur.execute(delete_statement)
|
ooici/pyon
|
pyon/ion/test/test_coverage.py
|
Python
|
bsd-2-clause
| 2,789
|
"""
It contains all the projects that can be ignored in graph. For example, there
are many Mozilla libs with large download stat which are not really used by
community. See issue #22
"""
FLAGS = {
"rackspace-novaclient":
"https://github.com/rackerlabs/rackspace-novaclient",
"manifestparser": "https://pypi.org/project/manifestparser",
"mozrunner": "https://pypi.org/project/mozrunner",
"moznetwork": "https://pypi.org/project/moznetwork",
"mozdevice": "https://pypi.org/project/mozdevice",
"mozprofile": "https://pypi.org/project/mozprofile",
"mozprocess": "https://pypi.org/project/mozprocess",
"mozfile": "https://pypi.org/project/mozfile",
"mozinfo": "https://pypi.org/project/mozinfo",
"mozlog": "https://pypi.org/project/mozlog",
"mozcrash": "https://pypi.org/project/mozcrash",
"mozhttpd": "https://pypi.org/project/mozhttpd",
"moztest": "https://pypi.org/project/moztest",
"mozversion": "https://pypi.org/project/mozversion",
"marionette_client": "https://pypi.org/project/marionette_client",
"marionette-transport": "https://pypi.org/project/marionette-transport",
}
|
chhantyal/py3readiness
|
src/flags.py
|
Python
|
bsd-2-clause
| 1,145
|
"""
Renderers
"""
from collections import OrderedDict
from rest_framework import renderers
from . import utils
class JSONRenderer(renderers.JSONRenderer):
"""
Render a JSON response per the JSON API spec:
{
"data": [{
"type": "companies",
"id": 1,
"attributes": {
"name": "Mozilla",
"slug": "mozilla",
"date-created": "2014-03-13 16:33:37"
}
}, {
"type": "companies",
"id": 2,
...
}]
}
"""
media_type = 'application/vnd.api+json'
format = 'vnd.api+json'
def render(self, data, accepted_media_type=None, renderer_context=None):
view = renderer_context.get("view", None)
request = renderer_context.get("request", None)
from rest_framework_json_api.views import RelationshipView
if isinstance(view, RelationshipView):
# Special case for RelationshipView
render_data = OrderedDict([
('data', data)
])
links = view.get_links()
if links:
render_data.update({'links': links}),
return super(JSONRenderer, self).render(
render_data, accepted_media_type, renderer_context
)
# Get the resource name.
resource_name = utils.get_resource_name(renderer_context)
# If `resource_name` is set to None then render default as the dev
# wants to build the output format manually.
if resource_name is None or resource_name is False:
return super(JSONRenderer, self).render(
data, accepted_media_type, renderer_context
)
# If this is an error response, skip the rest.
if resource_name == 'errors':
if len(data) > 1 and isinstance(data, list):
data.sort(key=lambda x: x.get('source', {}).get('pointer', ''))
return super(JSONRenderer, self).render(
{resource_name: data}, accepted_media_type, renderer_context
)
json_api_included = list()
if view and hasattr(view, 'action') and view.action == 'list' and \
isinstance(data, dict) and 'results' in data:
# If detail view then json api spec expects dict, otherwise a list
# - http://jsonapi.org/format/#document-top-level
# The `results` key may be missing if unpaginated or an OPTIONS request
results = data["results"]
resource_serializer = results.serializer
# Get the serializer fields
fields = utils.get_serializer_fields(resource_serializer)
json_api_data = list()
for position in range(len(results)):
resource = results[position] # Get current resource
resource_instance = resource_serializer.instance[position] # Get current instance
json_api_data.append(
utils.build_json_resource_obj(fields, resource, resource_instance, resource_name))
included = utils.extract_included(fields, resource, resource_instance)
if included:
json_api_included.extend(included)
else:
# Check if data contains a serializer
if hasattr(data, 'serializer'):
fields = utils.get_serializer_fields(data.serializer)
resource_instance = data.serializer.instance
json_api_data = utils.build_json_resource_obj(fields, data, resource_instance, resource_name)
included = utils.extract_included(fields, data, resource_instance)
if included:
json_api_included.extend(included)
else:
json_api_data = data
# Make sure we render data in a specific order
render_data = OrderedDict()
if isinstance(data, dict) and data.get('links'):
render_data['links'] = data.get('links')
# format the api root link list
if view.__class__ and view.__class__.__name__ == 'APIRoot':
render_data['data'] = None
render_data['links'] = json_api_data
else:
render_data['data'] = json_api_data
if len(json_api_included) > 0:
# Iterate through compound documents to remove duplicates
seen = set()
unique_compound_documents = list()
for included_dict in json_api_included:
type_tuple = tuple((included_dict['type'], included_dict['id']))
if type_tuple not in seen:
seen.add(type_tuple)
unique_compound_documents.append(included_dict)
# Sort the items by type then by id
render_data['included'] = sorted(unique_compound_documents, key=lambda item: (item['type'], item['id']))
if isinstance(data, dict) and data.get('meta'):
render_data['meta'] = data.get('meta')
return super(JSONRenderer, self).render(
render_data, accepted_media_type, renderer_context
)
|
hnakamur/django-rest-framework-json-api
|
rest_framework_json_api/renderers.py
|
Python
|
bsd-2-clause
| 5,181
|
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Written (W) 2013 Heiko Strathmann
Written (W) 2013 Dino Sejdinovic
"""
from kameleon_mcmc.distribution.Flower import Flower
from kameleon_mcmc.experiments.ClusterTools import ClusterTools
from kameleon_mcmc.experiments.SingleChainExperiment import SingleChainExperiment
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolis import AdaptiveMetropolis
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import \
KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis
from numpy.lib.twodim_base import eye
from numpy.ma.core import zeros
import os
import sys
if __name__ == '__main__':
if len(sys.argv) != 3:
print "usage:", str(sys.argv[0]).split(os.sep)[-1], "<experiment_dir_base> <number_of_experiments>"
print "example:"
print "python " + str(sys.argv[0]).split(os.sep)[-1] + " /nfs/home1/ucabhst/kameleon_experiments/ 3"
exit()
experiment_dir_base = str(sys.argv[1])
n = int(str(sys.argv[2]))
# loop over parameters here
experiment_dir = experiment_dir_base + str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
print "running experiments", n, "times at base", experiment_dir
distribution = Flower(amplitude=6, frequency=6, variance=1, radius=10, dimension=8)
sigma = 5
print "using sigma", sigma
kernel = GaussianKernel(sigma=sigma)
for i in range(n):
mcmc_samplers = []
burnin=60000
num_iterations=120000
mcmc_samplers.append(KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin))
mean_est = zeros(distribution.dimension, dtype="float64")
cov_est = 1.0 * eye(distribution.dimension)
mcmc_samplers.append(AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est))
mcmc_samplers.append(AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est))
mcmc_samplers.append(StandardMetropolis(distribution, cov=cov_est))
start = zeros(distribution.dimension, dtype="float64")
mcmc_params = MCMCParams(start=start, num_iterations=num_iterations, burnin=burnin)
mcmc_chains = [MCMCChain(mcmc_sampler, mcmc_params) for mcmc_sampler in mcmc_samplers]
for mcmc_chain in mcmc_chains:
mcmc_chain.append_mcmc_output(StatisticsOutput())
experiments = [SingleChainExperiment(mcmc_chain, experiment_dir) for mcmc_chain in mcmc_chains]
for experiment in experiments:
ClusterTools.submit_experiment(experiment)
|
karlnapf/kameleon-mcmc
|
kameleon_mcmc/experiments/scripts/flower/flower.py
|
Python
|
bsd-2-clause
| 3,224
|
#!/usr/bin/env python3
# -*- coding:utf-8; mode:python3; indent-tabs-mode:nil; tab-width:4; -*-
""" Cached JSONized ATS files retrieved as JSON objects.
JSON data may also be returned to stdout, for use from the command line.
"""
import json
import os
import subprocess
import sys
from . import environment
# Cache directory
# ============================================================================
HOME = os.path.expanduser("~")
def user_directory(sub_directory):
""" `$HOME/sub_directory` if it exists or else None. """
result = None
directory = os.path.join(HOME, sub_directory)
if os.path.exists(directory):
result = directory
return result
CACHE_ROOT = (
os.getenv("LocalAppData") or
os.getenv("XDG_CACHE_HOME") or
user_directory("AppData/Local") or
user_directory("Local Settings/Application Data") or
user_directory("Library/Caches") or
user_directory(".cache") or
"/tmp")
CACHE = os.path.join(CACHE_ROOT, "PostiATS")
# Scanned directories for prefilling
# ============================================================================
ROOTS = [
environment.CWD,
environment.PATSHOME,
environment.PATSCONTRIB]
# Other constants
# ============================================================================
JSON_EXT = ".json"
SATS_EXT = ".sats"
DATS_EXT = ".dats"
TIMEOUT_DELAY = 3
POSTIATS_ENCODING = "iso-8859-15"
HELP = """\
Usage: %s -h|--help|--to-stdout file|--prefill|--purge|--directory
Or else used as a Python3 module.
* -h/--help: display this help.
* --to-stdout file: for command line use, return the JSON object to stdout,
where file, is a SATS or DATS file.
* --prefill: prefill the cache with ATS files from the distribution and
the current directory, recursively.
* --purge: purge the cache directory, removing all JSON files and directories
left empty.
* --directory: print the cache directory path.
"""
# Testing file types
# ============================================================================
def file_ext(path):
""" File extension of `path` including the leading dot. """
result = os.path.splitext(path)[1]
return result
def is_json_file(file_name):
""" True if file extension is “.json”. """
ext = file_ext(file_name)
result = ext == JSON_EXT
return result
def is_sats_file(file_name):
""" True if file extension is “.sats”. """
ext = file_ext(file_name)
result = ext == SATS_EXT
return result
def is_ats_file(file_name):
""" True if file extension is “.sats” or “.dats”. """
ext = file_ext(file_name)
result = ext in [SATS_EXT, DATS_EXT]
return result
# Resolving path
# ============================================================================
def path_elements(path):
""" Elements of `path`.
The list is never empty. The first element may be the empty string.
"""
result = []
(head, tail) = os.path.split(path)
while tail != "":
result.insert(0, tail)
(head, tail) = os.path.split(head)
result.insert(0, head)
return result
def resolved_path(path):
""" Path made absolute, with symbolic links resolve, and normalized. """
path = os.path.abspath(path)
elements = path_elements(path)
result = ""
for element in elements:
segment = element
segment_path = os.path.join(result, segment)
if os.path.islink(segment_path):
segment = os.readlink(segment_path)
result = os.path.join(result, segment)
result = os.path.normpath(result)
return result
def clean_path(path):
""" Path to be used for source and to derive cache path. """
return resolved_path(path)
# Scanning directories for prefilling
# ============================================================================
def files_from_root(root, accept):
""" Recursive list of files in `root` for which `accept` is True. """
for (dir_path, _dir_names, file_names) in os.walk(root, followlinks=True):
for file_name in file_names:
if accept(file_name):
path = os.path.join(dir_path, file_name)
yield path
def files_from_roots(roots, accept):
""" `files_from_root` for each non-None root in `roots`. """
for root in roots:
if root is not None:
yield from files_from_root(root, accept)
def ats_files():
""" `files_from_roots(ROOTS, is_ats_file)`. """
yield from files_from_roots(ROOTS, is_ats_file)
# Cached file names
# ============================================================================
def cached_ext(ext):
""" Extension for cached JSON file, from original extension.
“.(s|d)ats” will become “-(s|d)ats.json”, not “.(s|d)ats.json”. This is so
because the files are not to be ATS files anymore, they are to be JSON
files.
"""
if ext == "":
result = JSON_EXT
elif ext[0] == ".":
result = "-" + ext[1:] + JSON_EXT
else:
result = ext + JSON_EXT
return result
def get_cached_file_name(file_name):
""" Cached file name for `file_name`.
`file_name` is supposed to be a `clean_path`. The function can be invoked
on non-`clean_path`, but it will not be relevant.
"""
(directory, base_name) = os.path.split(file_name)
directory = os.path.relpath(directory, start="/")
new_directory = os.path.join(CACHE, directory)
(simple_name, ext) = os.path.splitext(base_name)
new_base_name = simple_name + cached_ext(ext)
result = os.path.join(new_directory, new_base_name)
return result
def get_source_file_name(json_name):
""" Source file name for cached `json_name`.
`file_name` is supposed to be a `clean_path`. The function can be invoked
on non-`clean_path`, but it will not be relevant.
"""
assert json_name.endswith(JSON_EXT)
(directory, base_name) = os.path.split(json_name)
new_directory = os.path.relpath(directory, start=CACHE)
new_directory = os.path.join("/", new_directory)
new_base_name = base_name[:-len(JSON_EXT)]
i = new_base_name.rfind("-")
if i != -1:
new_base_name = new_base_name[:i] + "." + new_base_name[i + 1:]
result = os.path.join(new_directory, new_base_name)
return result
# Generating JSON
# ============================================================================
def run(directory, command, encoding):
""" Run `command` in `directory` using `encoding` for stdout and stderr.
Return a tuple `(stdout, stderr, return_code)`. If `return_code` is None,
then stdout is always None too while stderr may be None or a string. Even
when `return_code` is zero, stderr may not be None or may be an empty
string.
"""
stdout = None
stderr = None
return_code = None
result = None
try:
process = subprocess.Popen(
command,
cwd=directory,
universal_newlines=False,
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# pylint: disable=unexpected-keyword-arg
(stdout, stderr) = process.communicate(timeout=TIMEOUT_DELAY)
return_code = process.returncode
except OSError:
stdout = None
stderr = None
return_code = None
except subprocess.TimeoutExpired as timeout:
stderr = getattr(timeout, "stderr", "")
process.kill()
(_ignored, stderr_tail) = process.communicate()
stderr += stderr_tail
stdout = None
return_code = None
if stdout is not None:
stdout = stdout.decode(encoding)
if stderr is not None:
stderr = stderr.decode(encoding)
result = (stdout, stderr, return_code)
return result
def make_cached_json(file_name):
""" Invoke `patsopt --jsonize-2` on `file_name`.
Backup the content in cache and return the content as a JSON object.
`file_name` is assumed to be from `environment.which`.
"""
result = None
path = clean_path(file_name)
working_directory = os.path.split(os.path.abspath(file_name))[0]
file_type_switch = "-s" if is_sats_file(file_name) else "-d"
command = []
command.append("patsopt")
command.append("--jsonize-2")
command.append(file_type_switch)
command.append(path)
(stdout, _stderr, return_code) = run(
working_directory,
command,
POSTIATS_ENCODING)
if return_code == 0:
cached_file_name = get_cached_file_name(path)
cached_directory = os.path.split(cached_file_name)[0]
if not os.path.exists(cached_directory):
os.makedirs(cached_directory)
output = open(cached_file_name, "w")
output.write(stdout)
output.close()
result = json.loads(stdout)
return result
# Retrieving JSON
# ============================================================================
def get_json_from_cache(file_name):
""" Try to get JSON from cache or else return None.
Check the JSON file is newer than the ATS file, so even if the file
is in the cache, None may still be returned if the cache version is
outdated.
`file_name` is assumed to be from `environment.which`.
"""
result = None
path = clean_path(file_name)
cached_file_name = get_cached_file_name(path)
if os.path.exists(cached_file_name):
time = os.path.getmtime(path)
cached_time = os.path.getmtime(cached_file_name)
if cached_time > time:
try:
source = open(cached_file_name, "r")
try:
result = json.load(source)
except ValueError:
pass
source.close()
except OSError:
# Includes IOError
pass
return result
def get_json(file_name):
""" Get JSON for `file_name`, from cache or (re-)generated.
Return `None` of not found.
Use `environment.which`.
"""
result = None
path = environment.which(file_name)
if path is not None:
result = get_json_from_cache(path)
if result is None:
result = make_cached_json(path)
return result
def get_json_to_stdout(file_name):
""" Print JSON data for `file_name` to stdout.
Useful to use the cache from the command line too, not only from Python
scripts importing this module.
"""
json_object = get_json(file_name)
if json_object is None:
print("Failed to evaluate %s" % file_name, file=sys.stderr)
sys.exit(1)
json.dump(json_object, sys.stdout)
print()
# Prefilling, purging and listing
# ============================================================================
def prefill_cache():
""" Cache jsonized ATS files from distribution and current directory. """
print("Prefilling cache.")
print("\rListing ATS files...", end="")
file_names = list(ats_files())
print("\rListing ATS files: done.")
index = 0
files_count = len(file_names)
cached_count = 0
for file_name in file_names:
index += 1
print("\rHandling ATS file #%i of %i" % (index, files_count), end="")
if get_json(file_name) is not None:
cached_count += 1
print("\nDone: %i file(s) cached." % cached_count)
def purge_cache():
""" Purge cache deleting JSON files and directories left empty. """
for (dir_path, dir_names, file_names) in os.walk(CACHE, topdown=False):
for file_name in file_names:
if is_json_file(file_name):
path = os.path.join(dir_path, file_name)
print("Removing file “%s”" % path)
os.remove(path)
for directory in dir_names:
path = os.path.join(dir_path, directory)
if not os.listdir(path):
print("Removing directory “%s”" % path)
os.rmdir(path)
def cached_files():
""" Yield cached JSON file names. """
for (dir_path, _dir_names, file_names) in os.walk(CACHE):
for file_name in file_names:
if is_json_file(file_name):
yield os.path.join(dir_path, file_name)
def list_cached():
""" Yield tuple (json_name, source_name) for cached files. """
for json_name in cached_files():
source_name = get_source_file_name(json_name)
yield (json_name, source_name)
# Main
# ============================================================================
def main():
""" Main. """
my_name = os.path.split(sys.argv[0])[1]
arg_error = True
if len(sys.argv) == 2:
arg1 = sys.argv[1]
if arg1 in ["-h", "--help"]:
arg_error = False
print(HELP % my_name)
elif arg1 == "--prefill":
arg_error = False
prefill_cache()
elif arg1 == "--purge":
arg_error = False
purge_cache()
elif arg1 == "--directory":
arg_error = False
print("Cache directory: %s" % CACHE)
if len(sys.argv) == 3:
arg1 = sys.argv[1]
arg2 = sys.argv[2]
if arg1 == "--to-stdout":
arg_error = False
get_json_to_stdout(arg2)
if arg_error:
print("ERROR: Invalid argument(s).", file=sys.stderr)
print(HELP % my_name, file=sys.stderr)
sys.exit(1)
|
Hibou57/PostiATS-Utilities
|
postiats/jsonized.py
|
Python
|
bsd-2-clause
| 13,384
|
import logging
import copy
from troposphere import Ref, Output, GetAtt, Tags, FindInMap, If, Equals
from troposphere import ec2, autoscaling, ecs
from troposphere.autoscaling import Tag as ASTag
from troposphere.iam import Role, InstanceProfile, Policy
from awacs.helpers.trust import get_default_assumerole_policy
from stacker.blueprints.variables.types import (
CFNCommaDelimitedList,
CFNNumber,
CFNString,
EC2KeyPairKeyName,
EC2SecurityGroupId,
EC2SubnetIdList,
EC2VPCId,
)
from .base import EmpireBase
from .policies import ecs_agent_policy, logstream_policy
CLUSTER_SG_NAME = "EmpireMinionSecurityGroup"
logger = logging.getLogger(__name__)
class EmpireMinion(EmpireBase):
VARIABLES = {
"VpcId": {
"type": EC2VPCId, "description": "Vpc Id"},
"DefaultSG": {
"type": EC2SecurityGroupId,
"description": "Top level security group."},
"PrivateSubnets": {
"type": EC2SubnetIdList,
"description": "Subnets to deploy private instances in."},
"AvailabilityZones": {
"type": CFNCommaDelimitedList,
"description": "Availability Zones to deploy instances in."},
"InstanceType": {
"type": CFNString,
"description": "Empire AWS Instance Type",
"default": "c4.2xlarge"},
"MinHosts": {
"type": CFNNumber,
"description": "Minimum # of empire minion instances.",
"default": "3"},
"MaxHosts": {
"type": CFNNumber,
"description": "Maximum # of empire minion instances.",
"default": "20"},
"SshKeyName": {"type": EC2KeyPairKeyName},
"ImageName": {
"type": CFNString,
"description": (
"The image name to use from the AMIMap (usually found in the "
"config file.)"
),
"default": "empire"},
"DockerVolumeSize": {
"type": CFNNumber,
"description": (
"Size, in GB, of the EBS volume where docker will store its "
"images and containers."
),
"default": "50"},
"SwapVolumeSize": {
"type": CFNNumber,
"description": (
"Size, in GB, of the EBS volume that will be turned into a "
"swap volume."
),
"default": "16"},
"DockerRegistry": {
"type": CFNString,
"description": (
"Optional docker registry where private images are located."
),
"default": "https://index.docker.io/v1/"},
"DockerRegistryUser": {
"type": CFNString,
"description": "User for authentication with docker registry."},
"DockerRegistryPassword": {
"type": CFNString,
"no_echo": True,
"description": (
"Password for authentication with docker registry."
)},
"DockerRegistryEmail": {
"type": CFNString,
"description": "Email for authentication with docker registry."},
"DisableStreamingLogs": {
"type": CFNString,
"description": (
"Disables streaming logging if set to anything. Note: Without "
"this Empire creates a kinesis stream per app that you deploy "
"in Empire."
),
"default": ""},
}
def create_conditions(self):
t = self.template
t.add_condition(
"EnableStreamingLogs",
Equals(Ref("DisableStreamingLogs"), ""))
def create_security_groups(self):
t = self.template
t.add_resource(
ec2.SecurityGroup(
CLUSTER_SG_NAME,
GroupDescription="EmpireMinionSecurityGroup",
VpcId=Ref("VpcId")))
t.add_output(
Output("SecurityGroup", Value=Ref(CLUSTER_SG_NAME)))
# Allow all ports within cluster
t.add_resource(
ec2.SecurityGroupIngress(
"EmpireMinionAllTCPAccess",
IpProtocol="-1", FromPort="-1", ToPort="-1",
SourceSecurityGroupId=Ref(CLUSTER_SG_NAME),
GroupId=Ref(CLUSTER_SG_NAME)))
# Application ELB Security Groups
# Internal
for elb in ("public", "private"):
group_name = "Empire%sAppELBSG" % elb.capitalize()
t.add_resource(
ec2.SecurityGroup(
group_name,
GroupDescription=group_name,
VpcId=Ref("VpcId"),
Tags=Tags(Name="%s-app-elb-sg" % elb)))
t.add_output(
Output(
"%sAppELBSG" % elb.capitalize(),
Value=Ref(group_name)))
# Allow ELB to talk to cluster on 9000-10000
t.add_resource(
ec2.SecurityGroupIngress(
"Empire%sAppPort9000To10000" % elb.capitalize(),
IpProtocol="tcp", FromPort=9000, ToPort=10000,
SourceSecurityGroupId=Ref(group_name),
GroupId=Ref(CLUSTER_SG_NAME)))
# When using Application Load Balancing, the port is chosen at
# random within an ephemeral port range.
t.add_resource(
ec2.SecurityGroupIngress(
"Empire%sAppPort32768To61000" % elb.capitalize(),
IpProtocol="tcp", FromPort=32768, ToPort=61000,
SourceSecurityGroupId=Ref(group_name),
GroupId=Ref(CLUSTER_SG_NAME)))
# Allow anything to talk to the ELB
# If internal only internal hosts will be able to talk to
# the elb
t.add_resource(
ec2.SecurityGroupIngress(
"Empire%sELBAllow80" % elb.capitalize(),
IpProtocol="tcp", FromPort=80, ToPort=80,
CidrIp="0.0.0.0/0",
GroupId=Ref(group_name)))
t.add_resource(
ec2.SecurityGroupIngress(
"Empire%sELBAllow443" % elb.capitalize(),
IpProtocol="tcp", FromPort=443, ToPort=443,
CidrIp="0.0.0.0/0",
GroupId=Ref(group_name)))
def build_block_device(self):
docker_volume = autoscaling.BlockDeviceMapping(
DeviceName="/dev/sdh",
Ebs=autoscaling.EBSBlockDevice(
DeleteOnTermination=True,
VolumeSize=Ref("DockerVolumeSize")))
swap_volume = autoscaling.BlockDeviceMapping(
DeviceName="/dev/sdi",
Ebs=autoscaling.EBSBlockDevice(
DeleteOnTermination=True,
VolumeSize=Ref("SwapVolumeSize")))
return [docker_volume, swap_volume]
def generate_iam_policies(self):
# Referencing NS like this within a resource name is deprecated, it's
# only done here to maintain backwards compatability for minion
# clusters.
ns = self.context.namespace
base_policies = [
Policy(
PolicyName="%s-ecs-agent" % ns,
PolicyDocument=ecs_agent_policy()),
]
with_logging = copy.deepcopy(base_policies)
with_logging.append(
Policy(
PolicyName="%s-kinesis-logging" % ns,
PolicyDocument=logstream_policy()
)
)
policies = If("EnableStreamingLogs", with_logging, base_policies)
return policies
def create_iam_profile(self):
t = self.template
ec2_role_policy = get_default_assumerole_policy()
t.add_resource(
Role(
"EmpireMinionRole",
AssumeRolePolicyDocument=ec2_role_policy,
Path="/",
Policies=self.generate_iam_policies()))
t.add_resource(
InstanceProfile(
"EmpireMinionProfile",
Path="/",
Roles=[Ref("EmpireMinionRole")]))
t.add_output(
Output("IAMRole", Value=Ref("EmpireMinionRole")))
def create_ecs_cluster(self):
t = self.template
t.add_resource(ecs.Cluster("EmpireMinionCluster"))
t.add_output(
Output("ECSCluster", Value=Ref("EmpireMinionCluster")))
def generate_seed_contents(self):
seed = [
"EMPIRE_HOSTGROUP=minion\n",
"ECS_CLUSTER=", Ref("EmpireMinionCluster"), "\n",
"DOCKER_REGISTRY=", Ref("DockerRegistry"), "\n",
"DOCKER_USER=", Ref("DockerRegistryUser"), "\n",
"DOCKER_PASS=", Ref("DockerRegistryPassword"), "\n",
"DOCKER_EMAIL=", Ref("DockerRegistryEmail"), "\n",
"ENABLE_STREAMING_LOGS=", If("EnableStreamingLogs", "true",
"false"), "\n",
]
return seed
def create_autoscaling_group(self):
t = self.template
t.add_resource(
autoscaling.LaunchConfiguration(
"EmpireMinionLaunchConfig",
IamInstanceProfile=GetAtt("EmpireMinionProfile", "Arn"),
ImageId=FindInMap(
"AmiMap",
Ref("AWS::Region"),
Ref("ImageName")),
BlockDeviceMappings=self.build_block_device(),
InstanceType=Ref("InstanceType"),
KeyName=Ref("SshKeyName"),
UserData=self.generate_user_data(),
SecurityGroups=[Ref("DefaultSG"), Ref(CLUSTER_SG_NAME)]))
t.add_resource(
autoscaling.AutoScalingGroup(
"EmpireMinionAutoscalingGroup",
AvailabilityZones=Ref("AvailabilityZones"),
LaunchConfigurationName=Ref("EmpireMinionLaunchConfig"),
MinSize=Ref("MinHosts"),
MaxSize=Ref("MaxHosts"),
VPCZoneIdentifier=Ref("PrivateSubnets"),
Tags=[ASTag("Name", "empire_minion", True)]))
|
remind101/stacker_blueprints
|
stacker_blueprints/empire/minion.py
|
Python
|
bsd-2-clause
| 10,176
|
from services import Service
from exceptions import NoSuchService
from ports import parse_ports_dict
class Project(object):
def __init__(self, name, host):
self.name = name
host.project = self
self.host = host
self.services = []
self.groups = []
def __repr__(self):
return "Project (%s)" % self.name
def make_service(self, name, **kwargs):
ports = kwargs.pop("ports", [])
return Service(
name=name,
ports=parse_ports_dict(ports),
**kwargs)
def get_service(self, name):
"""
Taken from Fig.
Retrievs a service by name.
"""
for service in self.services:
if service.name == name:
return service
raise NoSuchService("No such service %s" % name)
def filter_services(self, names):
"""
Taken from Fig.
Retrives a list of services by names.
If names is None, or an empty list return all.
"""
if names is None or len(names) == 0:
return self.services
else:
return [self.get_service(name) for name in names]
def filter_groups(self, services):
"""
Retrieves a list of groups based on their linked services.
"""
groups = []
for group in self.groups:
for service in services:
if service in group.services:
groups.append(group)
break
return groups
|
cameronmaske/skipper
|
skipper/project.py
|
Python
|
bsd-2-clause
| 1,538
|
"""
Integration tests for disco_dynamodb.py
"""
import json
from random import randint
from tests.helpers.integration_helpers import IntegrationTest
MOCK_TABLE_NAME = "IntegrationTest"
CREATE_CMD = "disco_dynamodb.py create --env {0} --config test/test_dynamodb_create.json --wait"
DELETE_CMD = "disco_dynamodb.py delete --table {0} --env {1} --wait"
LIST_CMD = "disco_dynamodb.py list"
class TestDiscoDynamoDB(IntegrationTest):
""" Test bin/disco_dynamodb.py """
def setUp(self):
"""
Generate random environment name for integration test env
"""
self.env_name = "env_{0}".format(randint(10000, 99999))
def test_create_and_delete_table(self):
""" Ensures we can create and delete DynamoDB table properly """
table_list_output = u"{0:<20} {1}".format(MOCK_TABLE_NAME, self.env_name)
try:
create_output = self.run_cmd(CREATE_CMD.format(self.env_name).split())
table = json.loads(self._remove_lines_from_logs(create_output))
self.assertEqual(table["TableStatus"], "ACTIVE")
self.assertEqual(table["TableName"], "{0}_{1}".format(MOCK_TABLE_NAME, self.env_name))
self.assertEqual(table["ProvisionedThroughput"]["WriteCapacityUnits"], 10)
self.assertEqual(table["ProvisionedThroughput"]["ReadCapacityUnits"], 10)
for key in table["KeySchema"]:
if key["KeyType"] == "HASH":
self.assertEqual(key["AttributeName"], "_id")
else:
self.assertEqual(key["AttributeName"], "mock_range_key")
self.assertEqual(table["GlobalSecondaryIndexes"][0]["IndexName"], "mock_index")
self.assertEqual(table["GlobalSecondaryIndexes"][0]["KeySchema"][0]["AttributeName"],
"mock_index_attr")
self.assertEqual(table["GlobalSecondaryIndexes"][0]["Projection"]["ProjectionType"], "ALL")
# Also assert that table can be found when running the list command
list_output = self.run_cmd(LIST_CMD.format(self.env_name).split())
lines = list_output.split('\n')
self.assertIn(table_list_output, lines)
finally:
delete_output = self.run_cmd(DELETE_CMD.format(MOCK_TABLE_NAME, self.env_name).split())
delete_output = json.loads(self._remove_lines_from_logs(delete_output))
self.assertEqual(delete_output["TableName"], "{0}_{1}".format(MOCK_TABLE_NAME, self.env_name))
self.assertEqual(delete_output["TableStatus"], "DELETED")
list_output = self.run_cmd(LIST_CMD.format(self.env_name).split())
lines = list_output.split('\n')
self.assertNotIn(table_list_output, lines)
def _remove_lines_from_logs(self, input_string):
lines = []
for line in input_string.split("\n"):
words = line.split()
try:
# If it quacks like a logging line...
if words[3] in ["WARNING", "WARN", "INFO", "DEBUG", "CRITICAL", "NOTSET"]:
continue
except IndexError:
pass
lines.append(line)
output_string = "\n".join(lines)
return output_string
|
amplifylitco/asiaq
|
tests/integration/test_disco_dynamodb.py
|
Python
|
bsd-2-clause
| 3,256
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django_choices_flow import Choices
mexican_food_list = ((1, 'nacho'), (2, 'qualcamole'), (3, 'taco'))
brazil_food_list = ((4, 'burger'), (5, 'pizza'), (6, 'feijoada'))
class MyChoices(Choices):
FOODS = (('mexican', mexican_food_list),
('brazil', brazil_food_list),)
class TestChoices(TestCase):
def setUp(self):
self.choices = MyChoices
def test_key_value_first(self):
self.assertEqual(self.choices.FOODS['mexican'],
((1, 'nacho'), (2, 'qualcamole'), (3, 'taco')))
def test_key_value_second(self):
self.assertEqual(self.choices.FOODS['brazil'],
((4, 'burger'), (5, 'pizza'), (6, 'feijoada')))
|
valdergallo/django-choices-flow
|
tests/test_subchoices.py
|
Python
|
bsd-2-clause
| 768
|
import zmq
from argparse import ArgumentParser
from pymavlink import mavutil
def main():
parser = ArgumentParser()
parser.add_argument("--device", help="MAVLink device to add to zmq", required=True)
parser.add_argument("--zmq", help="zmq url", required=True)
parser.add_argument("--prefix", help="Prefix for topic for this device", default="unnamed")
args = parser.parse_args()
try:
msrc = mavutil.mavlink_connection(args.device, planner_format=False,
notimestamps=True, robust_parsing=True)
except Exception, e:
print 'Could not connect to mavlink device at %s' % args.device
print e
return False
context = zmq.Context()
zmq_socket = context.socket(zmq.PUB)
try:
zmq_socket.connect(args.zmq)
except Exception, e:
print 'Failed to establish connection with zmq gateway'
print e
#send messages from mavlink connection to zmq gateway
try:
while True:
mav_msg = msrc.recv_match()
if mav_msg is not None:
topic = "%s.%s"%(args.prefix,mav_msg.get_type())
zmq_socket.send(topic,zmq.SNDMORE)
zmq_socket.send_pyobj(mav_msg)
except Exception, e:
print 'Bridge failed'
print e
zmq_socket.close()
context.term()
if __name__ == "__main__":
main()
|
btashton/mavlink-zmq
|
bridge/mavlink-zmq-bridge.py
|
Python
|
bsd-2-clause
| 1,405
|
import subprocess
import time
import json
import functools
import argparse
from colorama import init, Fore, Back, Style
def identity(*args):
if len(args) == 1:
return args[0]
return args
def unique(xs, sort=False, key=identity):
if sort:
items = sorted(xs, key=key)
else:
items = xs
seen = set()
return [x for x in items if not
(key(x) in seen or seen.add(key(x)))]
class TaskWarriorException(Exception):
def __init__(self, ret, out, err):
self.ret = ret
self.out = out
self.err = err
def __str__(self):
return 'Command failed with error {}'.format(self.ret)
class Task(object):
def __init__(self, dsrc, data):
self.dsrc = dsrc
self.data = data
def __lt__(self, other):
return self.modified() < other.modified()
def __repr__(self):
return repr(self.data)
def annotations(self):
if 'annotations' not in self.data:
return []
return self.data['annotations']
def set_annotations(self, annotations):
self.data['annotations'] = annotations
def status(self):
return self.data['status']
def set_status(self, status):
self.data['status'] = status
def urgency(self):
return self.data['urgency']
def set_urgency(self, urgency):
self.data['urgency'] = urgency
def modified(self):
def parsetime(s):
return time.strptime(s, '%Y%m%dT%H%M%SZ')
return parsetime(self.data['modified'])
def uuid(self):
return self.data['uuid']
def merge(self, other):
if self.modified() > other.modified():
return other.merge(self)
if self.dsrc == other.dsrc:
return other
else:
other.set_annotations(unique(self.annotations() + other.annotations(), sort=True,
key=lambda ann: ann['description']))
if self.status() == 'completed':
other.set_status('completed')
other.set_urgency(0)
return other
class Database(object):
def __init__(self, path):
self.path = path
self.tasks = self.load()
def load(self):
p = subprocess.Popen(['task', 'rc.data.location={}'.format(self.path), 'export'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
if p.returncode != 0:
raise TaskWarriorException(p.returncode, out, err)
self.data = [Task(self, task) for task in json.loads(out.decode('utf-8'))]
return out, err
def save(self):
p = subprocess.Popen(['task', 'rc.data.location={}'.format(self.path), 'import'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
data = json.dumps([task.data for task in self.data])
out, err = p.communicate(input=data.encode('utf-8'))
if p.returncode != 0:
raise TaskWarriorException(p.returncode, out, err)
return out, err
def merge(self, other):
export = self.data + other.data
tasks = {}
for task in sorted(export):
if task.uuid() not in tasks:
tasks[task.uuid()] = []
tasks[task.uuid()].append(task)
if len(tasks) == 0:
self.data = []
elif len(tasks) == 1:
self.data = list(tasks.values())[0]
else:
self.data = [functools.reduce(lambda task, other: task.merge(other),
value[1:], value[0])
for value in tasks.values()]
def do_merge(local, remote):
local = Database(local)
remote = Database(remote)
local.merge(remote)
out, err = local.save()
print(Fore.CYAN + out.decode('utf-8') + Style.RESET_ALL)
print(Fore.RED + err.decode('utf-8') + Style.RESET_ALL)
def __main__():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("local")
parser.add_argument("remote")
args = parser.parse_args()
do_merge(args.local, args.remote)
if __name__ == "__main__":
init()
try:
__main__()
except TaskWarriorException as ex:
print(Fore.CYAN + ex.out + Style.RESET_ALL)
print(Fore.RED + ex.err + Style.RESET_ALL)
|
mrschyte/taskwarrior-hooks
|
mergetool/taskmerge.py
|
Python
|
bsd-2-clause
| 4,514
|
# -*- mode: python; coding: utf-8 -*-
# Copyright (c) 2018 Radio Astronomy Software Group
# Licensed under the 2-clause BSD License
"""Class for reading and writing uvfits files."""
import os
import copy
import warnings
import numpy as np
from astropy import constants as const
from astropy.time import Time
from astropy.io import fits
from .uvdata import UVData
from .. import utils as uvutils
__all__ = ["UVFITS"]
class UVFITS(UVData):
"""
Defines a uvfits-specific subclass of UVData for reading and writing uvfits.
This class should not be interacted with directly, instead use the read_uvfits
and write_uvfits methods on the UVData class.
Attributes
----------
uvfits_required_extra : list of str
Names of optional UVParameters that are required for uvfits.
"""
uvfits_required_extra = [
"antenna_positions",
"gst0",
"rdate",
"earth_omega",
"dut1",
"timesys",
]
def _get_parameter_data(
self, vis_hdu, run_check_acceptability, background_lsts=True,
):
"""
Read just the random parameters portion of the uvfits file ("metadata").
Separated from full read so that header, metadata and data can be read
independently.
"""
# astropy.io fits reader scales date according to relevant PZER0 (?)
# uvfits standard is to have 2 DATE parameters, both floats:
# DATE (full day) and _DATE (fractional day)
# cotter uvfits files have one DATE that is a double
# using data.par('date') is general -- it will add them together if there are 2
self.time_array = vis_hdu.data.par("date")
self.Ntimes = len(np.unique(self.time_array))
# check if lst array is saved. It's not a standard metadata item in uvfits,
# but if the file was written with pyuvdata it may be present
# (depending on pyuvdata version)
proc = None
if "LST" in vis_hdu.data.parnames:
# angles in uvfits files are stored in degrees, so convert to radians
self.lst_array = np.deg2rad(vis_hdu.data.par("lst"))
if run_check_acceptability:
(
latitude,
longitude,
altitude,
) = self.telescope_location_lat_lon_alt_degrees
lst_array = uvutils.get_lst_for_time(
self.time_array, latitude, longitude, altitude
)
if not np.all(
np.isclose(
self.lst_array,
lst_array,
rtol=self._lst_array.tols[0],
atol=self._lst_array.tols[1],
)
):
warnings.warn(
"LST values stored in this file are not "
"self-consistent with time_array and telescope "
"location. Consider recomputing with "
"utils.get_lst_for_time."
)
else:
proc = self.set_lsts_from_time_array(background=background_lsts)
# if antenna arrays are present, use them. otherwise use baseline array
if "ANTENNA1" in vis_hdu.data.parnames and "ANTENNA2" in vis_hdu.data.parnames:
# Note: uvfits antennas are 1 indexed,
# need to subtract one to get to 0-indexed
self.ant_1_array = np.int32(vis_hdu.data.par("ANTENNA1")) - 1
self.ant_2_array = np.int32(vis_hdu.data.par("ANTENNA2")) - 1
subarray = np.int32(vis_hdu.data.par("SUBARRAY")) - 1
# error on files with multiple subarrays
if len(set(subarray)) > 1:
raise ValueError(
"This file appears to have multiple subarray "
"values; only files with one subarray are "
"supported."
)
else:
# cannot set this to be the baseline array because it uses the
# 256 convention, not our 2048 convention
bl_input_array = np.int64(vis_hdu.data.par("BASELINE"))
# get antenna arrays based on uvfits baseline array
self.ant_1_array, self.ant_2_array = self.baseline_to_antnums(
bl_input_array
)
# check for multi source files. NOW SUPPORTED, W00T!
if "SOURCE" in vis_hdu.data.parnames:
# Preserve the source info just in case the AIPS SU table is missing, and
# we need to revert things back.
self._set_multi_phase_center(preserve_phase_center_info=True)
source = vis_hdu.data.par("SOURCE")
self.phase_center_id_array = source.astype(int)
# get self.baseline_array using our convention
self.baseline_array = self.antnums_to_baseline(
self.ant_1_array, self.ant_2_array
)
self.Nbls = len(np.unique(self.baseline_array))
# initialize internal variables based on the antenna lists
self.Nants_data = int(np.union1d(self.ant_1_array, self.ant_2_array).size)
# check for suffixes in the baseline coordinate names indicating the
# baseline coordinate system
if (
"UU" in vis_hdu.data.parnames
and "VV" in vis_hdu.data.parnames
and "WW" in vis_hdu.data.parnames
):
uvw_names = ["UU", "VV", "WW"]
elif (
"UU---SIN" in vis_hdu.data.parnames
and "VV---SIN" in vis_hdu.data.parnames
and "WW---SIN" in vis_hdu.data.parnames
):
uvw_names = ["UU---SIN", "VV---SIN", "WW---SIN"]
elif (
"UU---NCP" in vis_hdu.data.parnames
and "VV---NCP" in vis_hdu.data.parnames
and "WW---NCP" in vis_hdu.data.parnames
):
uvw_names = ["UU---NCP", "VV---NCP", "WW---NCP"]
warnings.warn(
"The baseline coordinates (uvws) in this file are specified in the "
"---NCP coordinate system, which is does not agree with our baseline "
"coordinate conventions. Rotating the uvws to match our convention "
"(Note that this rotation has not been widely tested)."
)
else:
raise ValueError(
"There is no consistent set of baseline coordinates in this file. "
"The UU, VV and WW coordinate must have no suffix or the '---SIN' or "
"'---NCP' suffix and the suffixes must match on all three baseline "
"coordinate parameters."
)
# read baseline vectors in units of seconds, return in meters
# FITS uvw direction convention is opposite ours and Miriad's.
# So conjugate the visibilities and flip the uvws:
self.uvw_array = (-1) * (
np.array(
np.stack(
(
vis_hdu.data.par(uvw_names[0]),
vis_hdu.data.par(uvw_names[1]),
vis_hdu.data.par(uvw_names[2]),
)
)
)
* const.c.to("m/s").value
).T
if "INTTIM" in vis_hdu.data.parnames:
self.integration_time = np.asarray(
vis_hdu.data.par("INTTIM"), dtype=np.float64
)
else:
if self.Ntimes > 1:
# assume that all integration times in the file are the same
int_time = self._calc_single_integration_time()
self.integration_time = (
np.ones_like(self.time_array, dtype=np.float64) * int_time
)
else:
warnings.warn(
"The integration time is not specified and only one time is "
"present so it cannot be calculated from the difference between "
"integration times. Setting to None which will cause the check to "
"error. Set `run_check` to False to read in the file without "
"checking. Then set the integration_time (to an array of length "
"Nblts) directly on the object to allow futher processing."
)
if proc is not None:
proc.join()
def _get_data(
self,
vis_hdu,
antenna_nums,
antenna_names,
ant_str,
bls,
frequencies,
freq_chans,
times,
time_range,
lsts,
lst_range,
polarizations,
blt_inds,
read_metadata,
keep_all_metadata,
run_check,
check_extra,
run_check_acceptability,
strict_uvw_antpos_check,
fix_old_proj,
fix_use_ant_pos,
check_autos,
fix_autos,
):
"""
Read just the visibility and flag data of the uvfits file.
Separated from full read so header and metadata can be read without data.
"""
# figure out what data to read in
blt_inds, freq_inds, pol_inds, history_update_string = self._select_preprocess(
antenna_nums,
antenna_names,
ant_str,
bls,
frequencies,
freq_chans,
times,
time_range,
lsts,
lst_range,
polarizations,
blt_inds,
)
if blt_inds is not None:
blt_frac = len(blt_inds) / float(self.Nblts)
else:
blt_frac = 1
if freq_inds is not None:
freq_frac = len(freq_inds) * float(self.Nspws) / float(self.Nfreqs)
else:
freq_frac = 1
if pol_inds is not None:
pol_frac = len(pol_inds) / float(self.Npols)
else:
pol_frac = 1
min_frac = np.min([blt_frac, freq_frac, pol_frac])
if min_frac == 1:
# no select, read in all the data
if vis_hdu.header["NAXIS"] == 7:
raw_data_array = vis_hdu.data.data[:, 0, 0, :, :, :, :]
assert self.Nspws == raw_data_array.shape[1]
else:
# in many uvfits files the spw axis is left out,
# here we put it back in so the dimensionality stays the same
raw_data_array = vis_hdu.data.data[:, 0, 0, :, :, :]
raw_data_array = raw_data_array[:, np.newaxis, :, :]
else:
# do select operations on everything except data_array, flag_array
# and nsample_array
self._select_metadata(
blt_inds, freq_inds, pol_inds, history_update_string, keep_all_metadata
)
# just read in the right portions of the data and flag arrays
if blt_frac == min_frac:
if vis_hdu.header["NAXIS"] == 7:
raw_data_array = vis_hdu.data.data[blt_inds, :, :, :, :, :, :]
raw_data_array = raw_data_array[:, 0, 0, :, :, :, :]
assert self.Nspws == raw_data_array.shape[1]
else:
# in many uvfits files the spw axis is left out,
# here we put it back in so the dimensionality stays the same
raw_data_array = vis_hdu.data.data[blt_inds, :, :, :, :, :]
raw_data_array = raw_data_array[:, 0, 0, :, :, :]
raw_data_array = raw_data_array[:, np.newaxis, :, :, :]
if freq_frac < 1:
raw_data_array = raw_data_array[:, :, freq_inds, :, :]
if pol_frac < 1:
raw_data_array = raw_data_array[:, :, :, pol_inds, :]
elif freq_frac == min_frac:
if vis_hdu.header["NAXIS"] == 7:
raw_data_array = vis_hdu.data.data[:, :, :, :, freq_inds, :, :]
raw_data_array = raw_data_array[:, 0, 0, :, :, :, :]
assert self.Nspws == raw_data_array.shape[1]
else:
# in many uvfits files the spw axis is left out,
# here we put it back in so the dimensionality stays the same
raw_data_array = vis_hdu.data.data[:, :, :, freq_inds, :, :]
raw_data_array = raw_data_array[:, 0, 0, :, :, :]
raw_data_array = raw_data_array[:, np.newaxis, :, :, :]
if blt_frac < 1:
raw_data_array = raw_data_array[blt_inds, :, :, :, :]
if pol_frac < 1:
raw_data_array = raw_data_array[:, :, :, pol_inds, :]
else:
if vis_hdu.header["NAXIS"] == 7:
raw_data_array = vis_hdu.data.data[:, :, :, :, :, pol_inds, :]
raw_data_array = raw_data_array[:, 0, 0, :, :, :, :]
assert self.Nspws == raw_data_array.shape[1]
else:
# in many uvfits files the spw axis is left out,
# here we put it back in so the dimensionality stays the same
raw_data_array = vis_hdu.data.data[:, :, :, :, pol_inds, :]
raw_data_array = raw_data_array[:, 0, 0, :, :, :]
raw_data_array = raw_data_array[:, np.newaxis, :, :, :]
if blt_frac < 1:
raw_data_array = raw_data_array[blt_inds, :, :, :, :]
if freq_frac < 1:
raw_data_array = raw_data_array[:, :, freq_inds, :, :]
assert len(raw_data_array.shape) == 5
# Reshape the data array to be the right size if we are working w/ multiple
# spectral windows to be 'flex_spw' compliant
if self.Nspws > 1:
raw_data_array = np.reshape(
raw_data_array,
(self.Nblts, 1, self.Nfreqs, self.Npols, raw_data_array.shape[4]),
)
# FITS uvw direction convention is opposite ours and Miriad's.
# So conjugate the visibilities and flip the uvws:
self.data_array = (
raw_data_array[:, :, :, :, 0] - 1j * raw_data_array[:, :, :, :, 1]
)
self.flag_array = raw_data_array[:, :, :, :, 2] <= 0
self.nsample_array = np.abs(raw_data_array[:, :, :, :, 2])
if fix_old_proj:
self.fix_phase(use_ant_pos=fix_use_ant_pos)
# check if object has all required UVParameters set
if run_check:
self.check(
check_extra=check_extra,
run_check_acceptability=run_check_acceptability,
strict_uvw_antpos_check=strict_uvw_antpos_check,
allow_flip_conj=True,
check_autos=check_autos,
fix_autos=fix_autos,
)
def read_uvfits(
self,
filename,
antenna_nums=None,
antenna_names=None,
ant_str=None,
bls=None,
frequencies=None,
freq_chans=None,
times=None,
time_range=None,
lsts=None,
lst_range=None,
polarizations=None,
blt_inds=None,
keep_all_metadata=True,
read_data=True,
background_lsts=True,
run_check=True,
check_extra=True,
run_check_acceptability=True,
strict_uvw_antpos_check=False,
fix_old_proj=False,
fix_use_ant_pos=True,
check_autos=True,
fix_autos=True,
):
"""
Read in header, metadata and data from a uvfits file.
Supports reading only selected portions of the data.
Parameters
----------
filename : str
The uvfits file to read from.
antenna_nums : array_like of int, optional
The antennas numbers to include when reading data into the object
(antenna positions and names for the removed antennas will be retained
unless `keep_all_metadata` is False). This cannot be provided if
`antenna_names` is also provided. Ignored if read_data is False.
antenna_names : array_like of str, optional
The antennas names to include when reading data into the object
(antenna positions and names for the removed antennas will be retained
unless `keep_all_metadata` is False). This cannot be provided if
`antenna_nums` is also provided. Ignored if read_data is False.
bls : list of tuple, optional
A list of antenna number tuples (e.g. [(0, 1), (3, 2)]) or a list of
baseline 3-tuples (e.g. [(0, 1, 'xx'), (2, 3, 'yy')]) specifying baselines
to include when reading data into the object. For length-2 tuples,
the ordering of the numbers within the tuple does not matter. For
length-3 tuples, the polarization string is in the order of the two
antennas. If length-3 tuples are provided, `polarizations` must be
None. Ignored if read_data is False.
ant_str : str, optional
A string containing information about what antenna numbers
and polarizations to include when reading data into the object.
Can be 'auto', 'cross', 'all', or combinations of antenna numbers
and polarizations (e.g. '1', '1_2', '1x_2y'). See tutorial for more
examples of valid strings and the behavior of different forms for ant_str.
If '1x_2y,2y_3y' is passed, both polarizations 'xy' and 'yy' will
be kept for both baselines (1, 2) and (2, 3) to return a valid
pyuvdata object.
An ant_str cannot be passed in addition to any of `antenna_nums`,
`antenna_names`, `bls` args or the `polarizations` parameters,
if it is a ValueError will be raised. Ignored if read_data is False.
frequencies : array_like of float, optional
The frequencies to include when reading data into the object, each
value passed here should exist in the freq_array. Ignored if
read_data is False.
freq_chans : array_like of int, optional
The frequency channel numbers to include when reading data into the
object. Ignored if read_data is False.
times : array_like of float, optional
The times to include when reading data into the object, each value
passed here should exist in the time_array.
time_range : array_like of float, optional
The time range in Julian Date to keep in the object, must be
length 2. Some of the times in the object should fall between the
first and last elements. Cannot be used with `times`.
lsts : array_like of float, optional
The local sidereal times (LSTs) to keep in the object, each value
passed here should exist in the lst_array. Cannot be used with
`times`, `time_range`, or `lst_range`.
lst_range : array_like of float, optional
The local sidereal time (LST) range in radians to keep in the
object, must be of length 2. Some of the LSTs in the object should
fall between the first and last elements. If the second value is
smaller than the first, the LSTs are treated as having phase-wrapped
around LST = 2*pi = 0, and the LSTs kept on the object will run from
the larger value, through 0, and end at the smaller value.
polarizations : array_like of int, optional
The polarizations numbers to include when reading data into the
object, each value passed here should exist in the polarization_array.
Ignored if read_data is False.
blt_inds : array_like of int, optional
The baseline-time indices to include when reading data into the
object. This is not commonly used. Ignored if read_data is False.
keep_all_metadata : bool
Option to keep all the metadata associated with antennas, even those
that do not have data associated with them after the select option.
read_data : bool
Read in the visibility, nsample and flag data. If set to False, only
the metadata will be read in. Setting read_data to False results in
a metadata only object.
background_lsts : bool
When set to True, the lst_array is calculated in a background thread.
run_check : bool
Option to check for the existence and proper shapes of parameters
after after reading in the file (the default is True,
meaning the check will be run). Ignored if read_data is False.
check_extra : bool
Option to check optional parameters as well as required ones (the
default is True, meaning the optional parameters will be checked).
Ignored if read_data is False.
run_check_acceptability : bool
Option to check acceptable range of the values of parameters after
reading in the file (the default is True, meaning the acceptable
range check will be done). Ignored if read_data is False.
strict_uvw_antpos_check : bool
Option to raise an error rather than a warning if the check that
uvws match antenna positions does not pass.
fix_old_proj : bool
Applies a fix to uvw-coordinates and phasing, assuming that the old `phase`
method was used prior to writing the data, which had errors of the order of
one part in 1e4 - 1e5. See the phasing memo for more details. Default is
False.
fix_use_ant_pos : bool
If setting `fix_old_proj` to True, use the antenna positions to derive the
correct uvw-coordinates rather than using the baseline vectors. Default is
True.
check_autos : bool
Check whether any auto-correlations have non-zero imaginary values in
data_array (which should not mathematically exist). Default is True.
fix_autos : bool
If auto-correlations with imaginary values are found, fix those values so
that they are real-only in data_array. Default is True.
Raises
------
IOError
If filename doesn't exist.
ValueError
If incompatible select keywords are set (e.g. `ant_str` with other
antenna selectors, `times` and `time_range`) or select keywords
exclude all data or if keywords are set to the wrong type.
If the data have multi spw with different channel widths.
If the metadata are not internally consistent or missing.
"""
# update filename attribute
basename = os.path.basename(filename)
self.filename = [basename]
self._filename.form = (1,)
with fits.open(filename, memmap=True) as hdu_list:
vis_hdu = hdu_list[0] # assumes the visibilities are in the primary hdu
vis_hdr = vis_hdu.header.copy()
hdunames = uvutils._fits_indexhdus(hdu_list) # find the rest of the tables
# First get everything we can out of the header.
self._set_phased()
# check if we have an spw dimension
if vis_hdr["NAXIS"] == 7:
self.Nspws = vis_hdr.pop("NAXIS5")
self.spw_array = (
uvutils._fits_gethduaxis(vis_hdu, 5).astype(np.int64) - 1
)
# the axis number for phase center depends on if the spw exists
self.phase_center_ra_degrees = float(vis_hdr.pop("CRVAL6"))
self.phase_center_dec_degrees = float(vis_hdr.pop("CRVAL7"))
else:
self.Nspws = 1
self.spw_array = np.array([np.int64(0)])
# the axis number for phase center depends on if the spw exists
self.phase_center_ra_degrees = float(vis_hdr.pop("CRVAL5"))
self.phase_center_dec_degrees = float(vis_hdr.pop("CRVAL6"))
# get shapes
self.Npols = vis_hdr.pop("NAXIS3")
self.Nblts = vis_hdr.pop("GCOUNT")
if self.Nspws > 1:
# If this is multi-spw, use the 'flexible' spectral window setup
self._set_flex_spw()
uvfits_nchan = vis_hdr.pop("NAXIS4")
self.Nfreqs = uvfits_nchan * self.Nspws
self.flex_spw_id_array = np.transpose(
np.tile(np.arange(self.Nspws), (uvfits_nchan, 1))
).flatten()
fq_hdu = hdu_list[hdunames["AIPS FQ"]]
assert self.Nspws == fq_hdu.header["NO_IF"]
# TODO: This is fine for now, although I (karto) think that this
# is relative to the ref_freq, which can be specified as part of
# the AIPS SU table.
# Get rest freq value
ref_freq = uvutils._fits_gethduaxis(vis_hdu, 4)[0]
self.channel_width = np.transpose(
np.tile(abs(fq_hdu.data["CH WIDTH"]), (uvfits_nchan, 1))
).flatten()
self.freq_array = np.reshape(
np.transpose(
(
ref_freq
+ fq_hdu.data["IF FREQ"]
+ np.outer(np.arange(uvfits_nchan), fq_hdu.data["CH WIDTH"])
)
),
(1, -1),
)
else:
self.Nfreqs = vis_hdr.pop("NAXIS4")
self.freq_array = uvutils._fits_gethduaxis(vis_hdu, 4)
# TODO: Spw axis to be collapsed in future release
self.freq_array.shape = (1,) + self.freq_array.shape
self.channel_width = vis_hdr.pop("CDELT4")
self.polarization_array = np.int32(uvutils._fits_gethduaxis(vis_hdu, 3))
# other info -- not required but frequently used
self.object_name = vis_hdr.pop("OBJECT", None)
self.telescope_name = vis_hdr.pop("TELESCOP", None)
self.instrument = vis_hdr.pop("INSTRUME", None)
latitude_degrees = vis_hdr.pop("LAT", None)
longitude_degrees = vis_hdr.pop("LON", None)
altitude = vis_hdr.pop("ALT", None)
self.x_orientation = vis_hdr.pop("XORIENT", None)
blt_order_str = vis_hdr.pop("BLTORDER", None)
if blt_order_str is not None:
self.blt_order = tuple(blt_order_str.split(", "))
if self.blt_order == ("bda",):
self._blt_order.form = (1,)
self.history = str(vis_hdr.get("HISTORY", ""))
if not uvutils._check_history_version(
self.history, self.pyuvdata_version_str
):
self.history += self.pyuvdata_version_str
self.vis_units = vis_hdr.pop("BUNIT", "uncalib")
# Added here as a fix since some previous versions of UVData allowed for
# all caps versions of UNCALIB.
if self.vis_units == "UNCALIB":
self.vis_units = "uncalib"
self.phase_center_epoch = vis_hdr.pop("EPOCH", None)
# PHSFRAME is not a standard UVFITS keyword, but was used by older
# versions of pyuvdata. To ensure backwards compatibility, we look
# for it first to determine the coordinate frame for the data
self.phase_center_frame = vis_hdr.pop("PHSFRAME", None)
# If we don't find the special keyword PHSFRAME, try for the more
# FITS-standard RADESYS
if self.phase_center_frame is None:
self.phase_center_frame = vis_hdr.pop("RADESYS", None)
# If we still don't find anything, try the two 'special' variant names
# for the coordinate frame that seem to have been documented
if self.phase_center_frame is None:
self.phase_center_frame = vis_hdr.pop("RADESYSA", None)
if self.phase_center_frame is None:
self.phase_center_frame = vis_hdr.pop("RADESYSa", None)
# If we _still_ can't find anything, take a guess based on the value
# listed in the EPOCH. The behavior listed here is based off of the
# AIPS task REGRD (http://www.aips.nrao.edu/cgi-bin/ZXHLP2.PL?REGRD)
if self.phase_center_frame is None:
if self.phase_center_epoch is None:
self.phase_center_frame = "icrs"
else:
frame = "fk4" if (self.phase_center_epoch == 1950.0) else "fk5"
self.phase_center_frame = frame
self.extra_keywords = uvutils._get_fits_extra_keywords(
vis_hdr, keywords_to_skip=["DATE-OBS"]
)
# Next read the antenna table
ant_hdu = hdu_list[hdunames["AIPS AN"]]
# stuff in the header
if self.telescope_name is None:
self.telescope_name = ant_hdu.header["ARRNAM"]
self.gst0 = ant_hdu.header["GSTIA0"]
self.rdate = ant_hdu.header["RDATE"]
self.earth_omega = ant_hdu.header["DEGPDY"]
self.dut1 = ant_hdu.header["UT1UTC"]
if "TIMESYS" in ant_hdu.header.keys():
self.timesys = ant_hdu.header["TIMESYS"]
else:
# CASA misspells this one
self.timesys = ant_hdu.header["TIMSYS"]
if "FRAME" in ant_hdu.header.keys():
xyz_telescope_frame = ant_hdu.header["FRAME"]
else:
warnings.warn(
"Required Antenna keyword 'FRAME' not set; "
"Assuming frame is 'ITRF'."
)
xyz_telescope_frame = "ITRF"
# get telescope location and antenna positions.
# VLA incorrectly sets ARRAYX/ARRAYY/ARRAYZ to 0, and puts array center
# in the antenna positions themselves
if (
np.isclose(ant_hdu.header["ARRAYX"], 0)
and np.isclose(ant_hdu.header["ARRAYY"], 0)
and np.isclose(ant_hdu.header["ARRAYZ"], 0)
):
x_telescope = np.mean(ant_hdu.data["STABXYZ"][:, 0])
y_telescope = np.mean(ant_hdu.data["STABXYZ"][:, 1])
z_telescope = np.mean(ant_hdu.data["STABXYZ"][:, 2])
self.antenna_positions = ant_hdu.data.field("STABXYZ") - np.array(
[x_telescope, y_telescope, z_telescope]
)
else:
x_telescope = ant_hdu.header["ARRAYX"]
y_telescope = ant_hdu.header["ARRAYY"]
z_telescope = ant_hdu.header["ARRAYZ"]
# AIPS memo #117 says that antenna_positions should be relative to
# the array center, but in a rotated ECEF frame so that the x-axis
# goes through the local meridian.
rot_ecef_positions = ant_hdu.data.field("STABXYZ")
latitude, longitude, altitude = uvutils.LatLonAlt_from_XYZ(
np.array([x_telescope, y_telescope, z_telescope]),
check_acceptability=run_check_acceptability,
)
self.antenna_positions = uvutils.ECEF_from_rotECEF(
rot_ecef_positions, longitude
)
if xyz_telescope_frame == "ITRF":
self.telescope_location = np.array(
[x_telescope, y_telescope, z_telescope]
)
else:
if (
latitude_degrees is not None
and longitude_degrees is not None
and altitude is not None
):
self.telescope_location_lat_lon_alt_degrees = (
latitude_degrees,
longitude_degrees,
altitude,
)
# stuff in columns
ant_names = ant_hdu.data.field("ANNAME").tolist()
self.antenna_names = []
for ant_ind, name in enumerate(ant_names):
# Sometimes CASA writes antnames as bytes not strings.
# If the ant name is shorter than 8 characters, the trailing
# characters may be non-ascii.
# This is technically a FITS violation as FITS requires ascii.
# So we just ignore any non-ascii bytes in the decode.
if isinstance(name, bytes):
ant_name_str = str(name.decode("utf-8", "ignore"))
else:
ant_name_str = name
# remove non-printing ascii characters and exclamation points
ant_name_str = (
ant_name_str.replace("\x00", "")
.replace("\x07", "")
.replace("!", "")
)
self.antenna_names.append(ant_name_str)
# subtract one to get to 0-indexed values rather than 1-indexed values
self.antenna_numbers = ant_hdu.data.field("NOSTA") - 1
self.Nants_telescope = len(self.antenna_numbers)
if "DIAMETER" in ant_hdu.columns.names:
self.antenna_diameters = ant_hdu.data.field("DIAMETER")
try:
self.set_telescope_params()
except ValueError as ve:
warnings.warn(str(ve))
# Now read in the random parameter info
self._get_parameter_data(
vis_hdu, run_check_acceptability, background_lsts=background_lsts,
)
# If we find the source attribute in the FITS random paramter list,
# the multi_phase_center attribute will be set to True, and we should also
# expect that there must be an AIPS SU table.
if self.multi_phase_center and "AIPS SU" not in hdunames.keys():
warnings.warn(
"UVFITS file is missing AIPS SU table, which is required when "
"SOURCE is one of the `random paramters` in the main binary "
"table. Bypassing for now, but note that this file _may_ not "
"work correctly in UVFITS-based programs (e.g., AIPS, CASA)."
)
name = list(self.phase_center_catalog.keys())[0]
self.phase_center_ra = self.phase_center_catalog[name]["cat_lon"]
self.phase_center_dec = self.phase_center_catalog[name]["cat_lat"]
self.phase_center_frame = self.phase_center_catalog[name]["cat_frame"]
self.phase_center_epoch = self.phase_center_catalog[name]["cat_epoch"]
self.multi_phase_center = False
self._phase_center_id_array.required = False
self._Nphase.required = False
self._phase_center_catalog.required = False
self.object_name = name
self.Nphase = None
self.phase_center_catalog = None
self.phase_center_id_array = None
elif self.multi_phase_center:
su_hdu = hdu_list[hdunames["AIPS SU"]]
# We should have as many entries in the AIPS SU header as we have
# unique entries in the SOURCES random paramter (checked in the call
# to get_parameter_data above)
if len(su_hdu.data) != len(np.unique(self.phase_center_id_array)):
raise RuntimeError(
"The UVFITS file has a malformed AIPS SU table - number of "
"sources do not match the number of unique source IDs in the "
"primary data header."
) # pragma: no cover
# Reset the catalog, since it has some dummy information stored within
# it (that was pulled off the primary table)
self._remove_phase_center(list(self.phase_center_catalog.keys())[0])
# Set up these arrays so we can assign values to them
self.phase_center_app_ra = np.zeros(self.Nblts)
self.phase_center_app_dec = np.zeros(self.Nblts)
self.phase_center_app_pa = np.zeros(self.Nblts)
# Alright, we are off to the races!
for idx in range(len(su_hdu.data)):
# Grab the indv source entry
sou_info = su_hdu.data[idx]
sou_id = sou_info["ID. NO."]
sou_name = sou_info["SOURCE"]
sou_ra = sou_info["RAEPO"] * (np.pi / 180.0)
sou_dec = sou_info["DECEPO"] * (np.pi / 180.0)
sou_epoch = sou_info["EPOCH"]
sou_frame = "fk5"
self._add_phase_center(
sou_name,
cat_id=sou_id,
cat_type="sidereal",
cat_lon=sou_ra,
cat_lat=sou_dec,
cat_frame=sou_frame,
cat_epoch=sou_epoch,
info_source="uvfits file",
)
# Calculate the apparent coordinate values
self._set_app_coords_helper()
# fix up the uvws if in the NCP baseline coordinate frame.
# Must be done here because it requires the phase_center_app_dec
if "UU---NCP" in vis_hdu.data.parnames:
self.uvw_array = uvutils._rotate_one_axis(
self.uvw_array[:, :, None], self.phase_center_app_dec - np.pi / 2, 0
)[:, :, 0]
if not read_data:
# don't read in the data. This means the object is a metadata
# only object but that may not matter for many purposes.
return
# Now read in the data
self._get_data(
vis_hdu,
antenna_nums,
antenna_names,
ant_str,
bls,
frequencies,
freq_chans,
times,
time_range,
lsts,
lst_range,
polarizations,
blt_inds,
False,
keep_all_metadata,
run_check,
check_extra,
run_check_acceptability,
strict_uvw_antpos_check,
fix_old_proj,
fix_use_ant_pos,
check_autos,
fix_autos,
)
def write_uvfits(
self,
filename,
spoof_nonessential=False,
write_lst=True,
force_phase=False,
run_check=True,
check_extra=True,
run_check_acceptability=True,
strict_uvw_antpos_check=False,
check_autos=True,
fix_autos=False,
):
"""
Write the data to a uvfits file.
If using this method to write out a data set for import into CASA, users should
be aware that the `importuvifts` task does not currently support reading in
data sets where the number of antennas is > 255. If writing out such a data set
for use in CASA, we suggest using the measurement set writer (`UVData.write_ms`)
instead.
Parameters
----------
filename : str
The uvfits file to write to.
spoof_nonessential : bool
Option to spoof the values of optional UVParameters that are not set
but are required for uvfits files.
write_lst : bool
Option to write the LSTs to the metadata (random group parameters).
force_phase : bool
Option to automatically phase drift scan data to zenith of the first
timestamp.
run_check : bool
Option to check for the existence and proper shapes of parameters
before writing the file.
check_extra : bool
Option to check optional parameters as well as required ones.
run_check_acceptability : bool
Option to check acceptable range of the values of parameters before
writing the file.
strict_uvw_antpos_check : bool
Option to raise an error rather than a warning if the check that
uvws match antenna positions does not pass.
check_autos : bool
Check whether any auto-correlations have non-zero imaginary values in
data_array (which should not mathematically exist). Default is True.
fix_autos : bool
If auto-correlations with imaginary values are found, fix those values so
that they are real-only in data_array. Default is False.
Raises
------
ValueError
The `phase_type` of the object is "drift" and the `force_phase`
keyword is not set.
If the frequencies are not evenly spaced or are separated by more
than their channel width.
The polarization values are not evenly spaced.
Any of ['antenna_positions', 'gst0', 'rdate', 'earth_omega', 'dut1',
'timesys'] are not set on the object and `spoof_nonessential` is False.
If the `timesys` parameter is not set to "UTC".
TypeError
If any entry in extra_keywords is not a single string or number.
"""
if run_check:
self.check(
check_extra=check_extra,
run_check_acceptability=run_check_acceptability,
check_freq_spacing=True,
strict_uvw_antpos_check=strict_uvw_antpos_check,
check_autos=check_autos,
fix_autos=fix_autos,
)
if self.phase_type == "phased":
pass
elif self.phase_type == "drift":
if force_phase:
print(
"The data are in drift mode and do not have a "
"defined phase center. Phasing to zenith of the first "
"timestamp."
)
phase_time = Time(self.time_array[0], format="jd")
self.phase_to_time(phase_time)
else:
raise ValueError(
"The data are in drift mode. "
"Set force_phase to true to phase the data "
"to zenith of the first timestamp before "
"writing a uvfits file."
)
if self.flex_spw:
# If we have a 'flexible' spectral window, we will need to evaluate the
# frequency axis slightly differently.
if self.future_array_shapes:
freq_array_use = self.freq_array
else:
freq_array_use = self.freq_array[0, :]
nchan_list = []
start_freq_array = []
delta_freq_array = []
for idx in self.spw_array:
chan_mask = self.flex_spw_id_array == idx
nchan_list += [np.sum(chan_mask)]
start_freq_array += [freq_array_use[chan_mask][0]]
# Need the array direction here since channel_width is always supposed
# to be > 0, but channels can be in decending freq order
freq_dir = np.sign(np.median(np.diff(freq_array_use[chan_mask])))
delta_freq_array += [
np.median(self.channel_width[chan_mask]) * freq_dir
]
start_freq_array = np.reshape(np.array(start_freq_array), (1, -1)).astype(
np.float64
)
delta_freq_array = np.reshape(np.array(delta_freq_array), (1, -1)).astype(
np.float64
)
# We've constructed a couple of lists with relevant values, now time to
# check them to make sure that the data will write correctly
# Make sure that all the windows are of the same size
if len(np.unique(nchan_list)) != 1:
raise IndexError(
"UVFITS format cannot handle spectral windows of different sizes!"
)
# Make sure freq values are greater zero. Note that I think _technically
# one could write negative frequencies into the dataset, but I am pretty
# sure that reduction packages may balk hard.
if np.any(start_freq_array <= 0):
raise ValueError("Frequency values must be > 0 for UVFITS!")
# Make sure the delta values are non-zero
if np.any(delta_freq_array == 0):
raise ValueError("Something is wrong, frequency values not unique!")
# If we passed all the above checks, then it's time to fill some extra
# array values. Note that 'ref_freq' is something of a placeholder for
# other exciting things...
ref_freq = start_freq_array[0, 0]
else:
if self.future_array_shapes:
ref_freq = self.freq_array[0]
# we've already run the check_freq_spacing, so channel widths are the
# same to our tolerances
delta_freq_array = np.array([[np.median(self.channel_width)]]).astype(
np.float64
)
else:
ref_freq = self.freq_array[0, 0]
delta_freq_array = np.array([[self.channel_width]]).astype(np.float64)
if self.Npols > 1:
pol_indexing = np.argsort(np.abs(self.polarization_array))
polarization_array = self.polarization_array[pol_indexing]
if not uvutils._test_array_constant_spacing(polarization_array):
raise ValueError(
"The polarization values are not evenly spaced (probably "
"because of a select operation). The uvfits format "
"does not support unevenly spaced polarizations."
)
pol_spacing = polarization_array[1] - polarization_array[0]
else:
pol_indexing = np.asarray([0])
polarization_array = self.polarization_array
pol_spacing = 1
for p in self.extra():
param = getattr(self, p)
if param.name in self.uvfits_required_extra:
if param.value is None:
if spoof_nonessential:
param.apply_spoof()
setattr(self, p, param)
else:
raise ValueError(
"Required attribute {attribute} "
"for uvfits not defined. Define or "
"set spoof_nonessential to True to "
"spoof this attribute.".format(attribute=p)
)
# check for unflagged data with nsample = 0. Warn if any found
wh_nsample0 = np.where(self.nsample_array == 0)
if np.any(~self.flag_array[wh_nsample0]):
warnings.warn(
"Some unflagged data has nsample = 0. Flags and "
"nsamples are combined in uvfits files such that "
"these data will appear to be flagged."
)
uvfits_data_shape = (
self.Nblts,
1,
1,
self.Nspws,
self.Nfreqs // self.Nspws,
self.Npols,
1,
)
# Reshape the arrays so that they match the uvfits conventions
# FITS uvw direction convention is opposite ours and Miriad's.
# So conjugate the visibilities and flip the uvws:
data_array = np.reshape(np.conj(self.data_array), uvfits_data_shape)
weights_array = np.reshape(
self.nsample_array * np.where(self.flag_array, -1, 1), uvfits_data_shape,
)
data_array = data_array[:, :, :, :, :, pol_indexing, :]
weights_array = weights_array[:, :, :, :, :, pol_indexing, :]
uvfits_array_data = np.concatenate(
[data_array.real, data_array.imag, weights_array], axis=6
)
# FITS uvw direction convention is opposite ours and Miriad's.
# So conjugate the visibilities and flip the uvws:
uvw_array_sec = -1 * self.uvw_array / const.c.to("m/s").value
# uvfits convention is that there are two float32 time_arrays and the
# float64 sum of them + relevant PZERO = actual JD
# a common practice is to set the PZERO to the JD at midnight of the first time
jd_midnight = np.floor(self.time_array[0] - 0.5) + 0.5
time_array1 = np.float32(self.time_array - jd_midnight)
time_array2 = np.float32(
self.time_array - jd_midnight - np.float64(time_array1)
)
int_time_array = self.integration_time
baselines_use = self.antnums_to_baseline(
self.ant_1_array, self.ant_2_array, attempt256=True
)
# Set up dictionaries for populating hdu
# Note that uvfits antenna arrays are 1-indexed so we add 1
# to our 0-indexed arrays
group_parameter_dict = {
"UU ": uvw_array_sec[:, 0],
"VV ": uvw_array_sec[:, 1],
"WW ": uvw_array_sec[:, 2],
"DATE ": time_array1,
"DATE2 ": time_array2,
"BASELINE": baselines_use,
"SOURCE ": None,
"FREQSEL ": np.ones_like(self.time_array, dtype=np.float32),
"ANTENNA1": self.ant_1_array + 1,
"ANTENNA2": self.ant_2_array + 1,
"SUBARRAY": np.ones_like(self.ant_1_array),
"INTTIM ": int_time_array,
}
if self.multi_phase_center:
id_offset = np.any(
[
temp_dict["cat_id"] == 0
for temp_dict in self.phase_center_catalog.values()
]
)
group_parameter_dict["SOURCE "] = self.phase_center_id_array + id_offset
pscal_dict = {
"UU ": 1.0,
"VV ": 1.0,
"WW ": 1.0,
"DATE ": 1.0,
"DATE2 ": 1.0,
"BASELINE": 1.0,
"SOURCE ": 1.0,
"FREQSEL ": 1.0,
"ANTENNA1": 1.0,
"ANTENNA2": 1.0,
"SUBARRAY": 1.0,
"INTTIM ": 1.0,
}
pzero_dict = {
"UU ": 0.0,
"VV ": 0.0,
"WW ": 0.0,
"DATE ": jd_midnight,
"DATE2 ": 0.0,
"BASELINE": 0.0,
"SOURCE ": 0.0,
"FREQSEL ": 0.0,
"ANTENNA1": 0.0,
"ANTENNA2": 0.0,
"SUBARRAY": 0.0,
"INTTIM ": 0.0,
}
if write_lst:
# lst is a non-standard entry (it's not in the AIPS memo)
# but storing it can be useful (e.g. can avoid recalculating it on read)
# need to store it in 2 parts to get enough accuracy
# angles in uvfits files are stored in degrees, so first convert to degrees
lst_array_deg = np.rad2deg(self.lst_array)
lst_array_1 = np.float32(lst_array_deg)
lst_array_2 = np.float32(lst_array_deg - np.float64(lst_array_1))
group_parameter_dict["LST "] = lst_array_1
pscal_dict["LST "] = 1.0
pzero_dict["LST "] = 0.0
# list contains arrays of [u,v,w,date,baseline];
# each array has shape (Nblts)
parnames_use = ["UU ", "VV ", "WW ", "DATE ", "DATE2 "]
if np.max(self.ant_1_array) < 255 and np.max(self.ant_2_array) < 255:
# if the number of antennas is less than 256 then include both the
# baseline array and the antenna arrays in the group parameters.
# Otherwise just use the antenna arrays
parnames_use.append("BASELINE")
else:
warnings.warn(
"Found antenna numbers > 256 in this data set. This is permitted by "
"UVFITS standards, but may cause the `importuvfits` utility within "
"CASA to crash. If attempting to use this data set in CASA, consider "
"using the measurement set writer method (`write_ms`) instead."
)
if self.multi_phase_center:
parnames_use.append("SOURCE ")
parnames_use += ["ANTENNA1", "ANTENNA2", "SUBARRAY", "INTTIM "]
if write_lst:
parnames_use.append("LST ")
group_parameter_list = [
group_parameter_dict[parname] for parname in parnames_use
]
if write_lst:
# add second LST array part
parnames_use.append("LST ")
group_parameter_list.append(lst_array_2)
parnames_use_datefix = copy.deepcopy(parnames_use)
parnames_use_datefix[parnames_use_datefix.index("DATE2 ")] = "DATE "
hdu = fits.GroupData(
uvfits_array_data,
parnames=parnames_use_datefix,
pardata=group_parameter_list,
bitpix=-32,
)
hdu = fits.GroupsHDU(hdu)
for i, key in enumerate(parnames_use):
hdu.header["PSCAL" + str(i + 1) + " "] = pscal_dict[key]
hdu.header["PZERO" + str(i + 1) + " "] = pzero_dict[key]
# ISO string of first time in self.time_array
hdu.header["DATE-OBS"] = Time(self.time_array[0], scale="utc", format="jd").isot
hdu.header["CTYPE2 "] = "COMPLEX "
hdu.header["CRVAL2 "] = 1.0
hdu.header["CRPIX2 "] = 1.0
hdu.header["CDELT2 "] = 1.0
# Note: This axis is called STOKES to comply with the AIPS memo 117
# However, this confusing because it is NOT a true Stokes axis,
# it is really the polarization axis.
hdu.header["CTYPE3 "] = "STOKES "
hdu.header["CRVAL3 "] = float(polarization_array[0])
hdu.header["CRPIX3 "] = 1.0
hdu.header["CDELT3 "] = float(pol_spacing)
hdu.header["CTYPE4 "] = "FREQ "
hdu.header["CRVAL4 "] = ref_freq
hdu.header["CRPIX4 "] = 1.0
hdu.header["CDELT4 "] = delta_freq_array[0, 0]
hdu.header["CTYPE5 "] = "IF "
hdu.header["CRVAL5 "] = 1.0
hdu.header["CRPIX5 "] = 1.0
hdu.header["CDELT5 "] = 1.0
hdu.header["CTYPE6 "] = "RA"
hdu.header["CRVAL6 "] = self.phase_center_ra_degrees
hdu.header["CTYPE7 "] = "DEC"
hdu.header["CRVAL7 "] = self.phase_center_dec_degrees
hdu.header["BUNIT "] = self.vis_units
hdu.header["BSCALE "] = 1.0
hdu.header["BZERO "] = 0.0
name = "MULTI" if self.multi_phase_center else self.object_name
hdu.header["OBJECT "] = name
hdu.header["TELESCOP"] = self.telescope_name
hdu.header["LAT "] = self.telescope_location_lat_lon_alt_degrees[0]
hdu.header["LON "] = self.telescope_location_lat_lon_alt_degrees[1]
hdu.header["ALT "] = self.telescope_location_lat_lon_alt[2]
hdu.header["INSTRUME"] = self.instrument
if self.phase_center_epoch is not None:
hdu.header["EPOCH "] = float(self.phase_center_epoch)
# TODO: This is a keyword that should at some point get added for velocity
# reference stuff, although for right now pyuvdata doesn't do any sort of
# handling of this, so stub this out for now.
# hdu.header["SPECSYS "] = "TOPOCENT"
if self.phase_center_frame is not None:
# Previous versions of pyuvdata wrote this header as PHSFRAME
hdu.header["RADESYS"] = self.phase_center_frame
if self.x_orientation is not None:
hdu.header["XORIENT"] = self.x_orientation
if self.blt_order is not None:
blt_order_str = ", ".join(self.blt_order)
hdu.header["BLTORDER"] = blt_order_str
for line in self.history.splitlines():
hdu.header.add_history(line)
# end standard keywords; begin user-defined keywords
for key, value in self.extra_keywords.items():
# header keywords have to be 8 characters or less
if len(str(key)) > 8:
warnings.warn(
"key {key} in extra_keywords is longer than 8 "
"characters. It will be truncated to 8 as required "
"by the uvfits file format.".format(key=key)
)
keyword = key[:8].upper()
if isinstance(value, (dict, list, np.ndarray)):
raise TypeError(
"Extra keyword {keyword} is of {keytype}. "
"Only strings and numbers are "
"supported in uvfits.".format(keyword=key, keytype=type(value))
)
if keyword == "COMMENT":
for line in value.splitlines():
hdu.header.add_comment(line)
else:
hdu.header[keyword] = value
# ADD the ANTENNA table
staxof = np.zeros(self.Nants_telescope)
# 0 specifies alt-az, 6 would specify a phased array
mntsta = np.zeros(self.Nants_telescope)
# beware, X can mean just about anything
poltya = np.full((self.Nants_telescope), "X", dtype=np.object_)
polaa = [90.0] + np.zeros(self.Nants_telescope)
poltyb = np.full((self.Nants_telescope), "Y", dtype=np.object_)
polab = [0.0] + np.zeros(self.Nants_telescope)
col1 = fits.Column(name="ANNAME", format="8A", array=self.antenna_names)
# AIPS memo #117 says that antenna_positions should be relative to
# the array center, but in a rotated ECEF frame so that the x-axis
# goes through the local meridian.
longitude = self.telescope_location_lat_lon_alt[1]
rot_ecef_positions = uvutils.rotECEF_from_ECEF(
self.antenna_positions, longitude
)
col2 = fits.Column(name="STABXYZ", format="3D", array=rot_ecef_positions)
# col3 = fits.Column(name="ORBPARAM", format="0D", array=Norb)
# convert to 1-indexed from 0-indexed indicies
col4 = fits.Column(name="NOSTA", format="1J", array=self.antenna_numbers + 1)
col5 = fits.Column(name="MNTSTA", format="1J", array=mntsta)
col6 = fits.Column(name="STAXOF", format="1E", array=staxof)
col7 = fits.Column(name="POLTYA", format="1A", array=poltya)
col8 = fits.Column(name="POLAA", format="1E", array=polaa)
# col9 = fits.Column(name='POLCALA', format='0E', array=Npcal, Nspws)
col10 = fits.Column(name="POLTYB", format="1A", array=poltyb)
col11 = fits.Column(name="POLAB", format="1E", array=polab)
# col12 = fits.Column(name='POLCALB', format='0E', array=Npcal, Nspws)
col_list = [col1, col2, col4, col5, col6, col7, col8, col10, col11]
# The commented out entires are up above to help check for consistency with the
# UVFITS format. ORBPARAM, POLCALA, and POLCALB are all technically required,
# but are all of zero length. Added here to help with debugging.
if self.antenna_diameters is not None:
col12 = fits.Column(
name="DIAMETER", format="1E", array=self.antenna_diameters
)
col_list.append(col12)
cols = fits.ColDefs(col_list)
ant_hdu = fits.BinTableHDU.from_columns(cols)
ant_hdu.header["EXTNAME"] = "AIPS AN"
ant_hdu.header["EXTVER"] = 1
# write XYZ coordinates
ant_hdu.header["ARRAYX"] = self.telescope_location[0]
ant_hdu.header["ARRAYY"] = self.telescope_location[1]
ant_hdu.header["ARRAYZ"] = self.telescope_location[2]
ant_hdu.header["FRAME"] = "ITRF"
ant_hdu.header["GSTIA0"] = self.gst0
# TODO Karto: Do this more intelligently in the future
if self.future_array_shapes:
ant_hdu.header["FREQ"] = self.freq_array[0]
else:
ant_hdu.header["FREQ"] = self.freq_array[0, 0]
ant_hdu.header["RDATE"] = self.rdate
ant_hdu.header["UT1UTC"] = self.dut1
ant_hdu.header["TIMESYS"] = self.timesys
if self.timesys != "UTC":
raise ValueError(
"This file has a time system {tsys}. "
'Only "UTC" time system files are supported'.format(tsys=self.timesys)
)
ant_hdu.header["ARRNAM"] = self.telescope_name
ant_hdu.header["NO_IF"] = self.Nspws
ant_hdu.header["DEGPDY"] = self.earth_omega
# This is just a statically defined value
ant_hdu.header["IATUTC"] = 37.0
# set mandatory parameters which are not supported by this object
# (or that we just don't understand)
ant_hdu.header["NUMORB"] = 0
# note: Bart had this set to 3. We've set it 0 after aips 117. -jph
ant_hdu.header["NOPCAL"] = 0
ant_hdu.header["POLTYPE"] = "X-Y LIN"
# note: we do not support the concept of "frequency setups"
# -- lists of spws given in a SU table.
# Karto: Here might be a place to address freq setup?
ant_hdu.header["FREQID"] = 1
# if there are offsets in images, this could be the culprit
ant_hdu.header["POLARX"] = 0.0
ant_hdu.header["POLARY"] = 0.0
ant_hdu.header["DATUTC"] = 0 # ONLY UTC SUPPORTED
# we always output right handed coordinates
ant_hdu.header["XYZHAND"] = "RIGHT"
# At some point, we can fill these in more completely using astropy IERS
# utilities, since CASA/AIPS doesn't want to be told what the apparent coords
# are, but rather wants to calculate them itself.
# ant_hdu.header["RDATE"] = '2020-07-24T16:35:39.144087'
# ant_hdu.header["POLARX"] = 0.0
# ant_hdu.header["POLARY"] = 0.0
fits_tables = [hdu, ant_hdu]
# If needed, add the FQ table
if self.Nspws > 1:
fmt_d = "%iD" % self.Nspws
fmt_e = "%iE" % self.Nspws
fmt_j = "%iJ" % self.Nspws
# TODO Karto: Temp implementation until we fix some other things in UVData
if_freq = start_freq_array - ref_freq
ch_width = delta_freq_array
tot_bw = (self.Nfreqs // self.Nspws) * np.abs(delta_freq_array)
sideband = np.sign(delta_freq_array) * np.ones((1, self.Nspws))
# FRQSEL is hardcoded at the moment, could think about doing this
# at least somewhat more intelligently...
col_list = [
fits.Column(name="FRQSEL", format="1J", array=[1]),
fits.Column(name="IF FREQ", unit="HZ", format=fmt_d, array=if_freq),
fits.Column(name="CH WIDTH", unit="HZ", format=fmt_e, array=ch_width),
fits.Column(
name="TOTAL BANDWIDTH", unit="HZ", format=fmt_e, array=tot_bw
),
fits.Column(name="SIDEBAND", format=fmt_j, array=sideband),
]
fq_hdu = fits.BinTableHDU.from_columns(fits.ColDefs(col_list))
fq_hdu.header["EXTNAME"] = "AIPS FQ"
fq_hdu.header["NO_IF"] = self.Nspws
fits_tables.append(fq_hdu)
# If needed, add the SU table
if self.multi_phase_center:
fmt_d = "%iD" % self.Nspws
fmt_e = "%iE" % self.Nspws
fmt_j = "%iJ" % self.Nspws
int_zeros = np.zeros(self.Nphase, dtype=int)
flt_zeros = np.zeros(self.Nphase, dtype=np.float64)
zero_arr = np.zeros((self.Nphase, self.Nspws))
sou_ids = np.zeros(self.Nphase)
name_arr = np.array(list(self.phase_center_catalog.keys()))
cal_code = [" "] * self.Nphase
# These are things we need to flip through on a source-by-source basis
ra_arr = np.zeros(self.Nphase, dtype=np.float64)
app_ra = np.zeros(self.Nphase, dtype=np.float64)
dec_arr = np.zeros(self.Nphase, dtype=np.float64)
app_dec = np.zeros(self.Nphase, dtype=np.float64)
epo_arr = np.zeros(self.Nphase, dtype=np.float64)
pm_ra = np.zeros(self.Nphase, dtype=np.float64)
pm_dec = np.zeros(self.Nphase, dtype=np.float64)
rest_freq = np.zeros((self.Nphase, self.Nspws), dtype=np.float64)
for idx, name in enumerate(name_arr):
phase_dict = self.phase_center_catalog[name]
# This is a stub for something smarter in the future
sou_ids[idx] = self.phase_center_catalog[name]["cat_id"] + id_offset
rest_freq[idx][:] = np.mean(self.freq_array)
pm_ra[idx] = 0.0
pm_dec[idx] = 0.0
if phase_dict["cat_type"] == "sidereal":
# So here's the deal -- we need all the objects to be in the same
# coordinate frame, although nothing in phase_center_catalog forces
# objects to share the same frame. So we want to make sure that
# everything lines up with the coordinate frame listed.
ra_arr[idx], dec_arr[idx] = uvutils.transform_sidereal_coords(
phase_dict["cat_lon"],
phase_dict["cat_lat"],
phase_dict["cat_frame"],
"fk5",
in_coord_epoch=phase_dict.get("cat_epoch"),
out_coord_epoch=phase_dict.get("cat_epoch"),
time_array=np.mean(self.time_array),
)
epo_arr[idx] = (
phase_dict["cat_epoch"]
if "cat_epoch" in (phase_dict.keys())
else 2000.0
)
cat_id = self.phase_center_catalog[name]["cat_id"]
app_ra[idx] = np.median(
self.phase_center_app_ra[self.phase_center_id_array == cat_id]
)
app_dec[idx] = np.median(
self.phase_center_app_dec[self.phase_center_id_array == cat_id]
)
ra_arr *= 180.0 / np.pi
dec_arr *= 180.0 / np.pi
app_ra *= 180.0 / np.pi
app_dec *= 180.0 / np.pi
col_list = [
fits.Column(name="ID. NO.", format="1J", array=sou_ids),
fits.Column(name="SOURCE", format="20A", array=name_arr),
fits.Column(name="QUAL", format="1J", array=int_zeros),
fits.Column(name="CALCODE", format="4A", array=cal_code),
fits.Column(name="IFLUX", format=fmt_e, unit="JY", array=zero_arr),
fits.Column(name="QFLUX", format=fmt_e, unit="JY", array=zero_arr),
fits.Column(name="UFLUX", format=fmt_e, unit="JY", array=zero_arr),
fits.Column(name="VFLUX", format=fmt_e, unit="JY", array=zero_arr),
fits.Column(name="FREQOFF", format=fmt_d, unit="HZ", array=zero_arr),
fits.Column(name="BANDWIDTH", format="1D", unit="HZ", array=flt_zeros),
fits.Column(name="RAEPO", format="1D", unit="DEGREES", array=ra_arr),
fits.Column(name="DECEPO", format="1D", unit="DEGREES", array=dec_arr),
fits.Column(name="EPOCH", format="1D", unit="YEARS", array=epo_arr),
fits.Column(name="RAAPP", format="1D", unit="DEGREES", array=app_ra),
fits.Column(name="DECAPP", format="1D", unit="DEGREES", array=app_dec),
fits.Column(name="LSRVEL", format=fmt_d, unit="M/SEC", array=zero_arr),
fits.Column(name="RESTFREQ", format=fmt_d, unit="HZ", array=rest_freq),
fits.Column(name="PMRA", format="1D", unit="DEG/DAY", array=pm_ra),
fits.Column(name="PMDEC", format="1D", unit="DEG/DAY", array=pm_dec),
]
su_hdu = fits.BinTableHDU.from_columns(fits.ColDefs(col_list))
su_hdu.header["EXTNAME"] = "AIPS SU"
su_hdu.header["NO_IF"] = self.Nspws
su_hdu.header["FREQID"] = 1
su_hdu.header["VELDEF"] = "RADIO"
# TODO: Eventually we want to not have this hardcoded, but pyuvdata at
# present does not carry around any velocity information. As per usual,
# I (Karto) am tipping my hand on what I might be working on next...
su_hdu.header["VELTYP"] = "LSR"
fits_tables.append(su_hdu)
# write the file
hdulist = fits.HDUList(hdus=fits_tables)
hdulist.writeto(filename, overwrite=True)
hdulist.close()
|
HERA-Team/pyuvdata
|
pyuvdata/uvdata/uvfits.py
|
Python
|
bsd-2-clause
| 69,235
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Miscellany of wrapper scripts for command-line bioinformatics tools, public data downloaders and other generic routines.
"""
from jcvi.apps.base import dmain
if __name__ == "__main__":
dmain(__file__)
|
tanghaibao/jcvi
|
jcvi/apps/__main__.py
|
Python
|
bsd-2-clause
| 258
|
# Adding other items
import strategy
import strategy.sample
import strategy.immediate
import strategy.hammer
import strategy.engulfing
import collections
dispatch = collections.OrderedDict([
('hammer', strategy.hammer.HammerStrategy),
('engulfing', strategy.engulfing.EngulfingStrategy),
('sample', strategy.sample.SampleStrategy),
('immediate', strategy.immediate.ImmediateStrategy),
])
|
joequant/sptrader
|
strategy/strategylist.py
|
Python
|
bsd-2-clause
| 410
|
"""
@package mi.instrument.seabird.sbe26plus_v2.driver
@file mi/instrument/seabird/sbe16plus_v2/driver.py
@author David Everett
@brief Driver base class for sbe16plus V2 CTD instrument.
"""
__author__ = 'David Everett'
__license__ = 'Apache 2.0'
import time
import re
from mi.core.log import get_logger, get_logging_metaclass
log = get_logger()
from mi.core.common import BaseEnum, Units
from mi.core.util import dict_equal
from mi.core.instrument.protocol_param_dict import ParameterDictType
from mi.core.instrument.instrument_fsm import InstrumentFSM
from mi.core.instrument.instrument_driver import DriverParameter
from mi.core.instrument.data_particle import CommonDataParticleType
from mi.core.instrument.chunker import StringChunker
from mi.core.instrument.driver_dict import DriverDictKey
from mi.core.instrument.instrument_protocol import CommandResponseInstrumentProtocol, InitializationType
from mi.core.instrument.instrument_driver import SingleConnectionInstrumentDriver
from mi.core.instrument.data_particle import DataParticle
from mi.core.instrument.data_particle import DataParticleKey
from mi.core.instrument.protocol_param_dict import ParameterDictVisibility
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import ResourceAgentState
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.instrument_driver import DriverEvent
from mi.core.exceptions import InstrumentProtocolException
from mi.core.exceptions import InstrumentParameterException
from mi.core.exceptions import NotImplementedException
from mi.core.exceptions import SampleException
from xml.dom.minidom import parseString
from mi.core.time_tools import get_timestamp_delayed
WAKEUP_TIMEOUT = 3
NEWLINE = '\r\n'
SBE_EPOCH = 946713600 # Unix time for SBE epoch 2000-01-01 00:00:00
TIMEOUT = 20
DEFAULT_ENCODER_KEY = '__default__'
ERROR_PATTERN = r"<ERROR type='(.*?)' msg='(.*?)'\/>"
ERROR_REGEX = re.compile(ERROR_PATTERN, re.DOTALL)
class ScheduledJob(BaseEnum):
ACQUIRE_STATUS = 'acquire_status'
CLOCK_SYNC = 'clock_sync'
class Command(BaseEnum):
GET_CD = 'GetCD'
GET_SD = 'GetSD'
GET_CC = 'GetCC'
GET_EC = 'GetEC'
RESET_EC = 'ResetEC'
GET_HD = 'GetHD'
#DS = 'ds' #Superceded by GetCD and GetSD, do not use!
#DCAL = 'dcal' #Superceded by GetCC, do not use!
TS = 'ts'
STARTNOW = 'StartNow'
STOP = 'Stop'
SET = 'set'
class ProtocolState(BaseEnum):
"""
Protocol states for SBE16. Cherry picked from DriverProtocolState
enum.
"""
UNKNOWN = DriverProtocolState.UNKNOWN
COMMAND = DriverProtocolState.COMMAND
AUTOSAMPLE = DriverProtocolState.AUTOSAMPLE
DIRECT_ACCESS = DriverProtocolState.DIRECT_ACCESS
class ProtocolEvent(BaseEnum):
"""
Protocol events for SBE16. Cherry picked from DriverEvent enum.
"""
ENTER = DriverEvent.ENTER
EXIT = DriverEvent.EXIT
GET = DriverEvent.GET
SET = DriverEvent.SET
DISCOVER = DriverEvent.DISCOVER
ACQUIRE_SAMPLE = DriverEvent.ACQUIRE_SAMPLE
START_AUTOSAMPLE = DriverEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = DriverEvent.STOP_AUTOSAMPLE
EXECUTE_DIRECT = DriverEvent.EXECUTE_DIRECT
START_DIRECT = DriverEvent.START_DIRECT
STOP_DIRECT = DriverEvent.STOP_DIRECT
CLOCK_SYNC = DriverEvent.CLOCK_SYNC
SCHEDULED_CLOCK_SYNC = DriverEvent.SCHEDULED_CLOCK_SYNC
ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS
SCHEDULED_ACQUIRED_STATUS = 'PROTOCOL_EVENT_SCHEDULED_ACQUIRE_STATUS'
class Capability(BaseEnum):
"""
Capabilities that are exposed to the user (subset of above)
"""
GET = DriverEvent.GET
SET = DriverEvent.SET
START_AUTOSAMPLE = DriverEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = DriverEvent.STOP_AUTOSAMPLE
CLOCK_SYNC = DriverEvent.CLOCK_SYNC
ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS
ACQUIRE_SAMPLE = DriverEvent.ACQUIRE_SAMPLE
START_DIRECT = DriverEvent.START_DIRECT
STOP_DIRECT = DriverEvent.STOP_DIRECT
class CommonParameter(DriverParameter):
DATE_TIME = "DateTime"
PTYPE = "PType"
VOLT0 = "Volt0"
VOLT1 = "Volt1"
VOLT2 = "Volt2"
VOLT3 = "Volt3"
VOLT4 = "Volt4"
VOLT5 = "Volt5"
SBE38 = "SBE38"
SBE63 = "SBE63"
WETLABS = "WetLabs"
GTD = "GTD"
DUAL_GTD = "DualGTD"
OPTODE = "OPTODE"
OUTPUT_FORMAT = "OutputFormat"
LOGGING = "logging"
class Parameter(CommonParameter):
"""
Device parameters for SBE16.
"""
INTERVAL = 'SampleInterval'
TXREALTIME = 'TXREALTIME'
ECHO = "echo"
OUTPUT_EXEC_TAG = 'OutputExecutedTag'
PUMP_MODE = "PumpMode"
NCYCLES = "NCycles"
BIOWIPER = "Biowiper"
DELAY_BEFORE_SAMPLE = "DelayBeforeSampling"
DELAY_AFTER_SAMPLE = "DelayAfterSampling"
SBE50 = "SBE50"
SYNCMODE = "SyncMode"
SYNCWAIT = "SyncWait"
class ConfirmedParameter(BaseEnum):
"""
List of all parameters that require confirmation
i.e. set sent twice to confirm.
"""
PTYPE = Parameter.PTYPE
SBE63 = Parameter.SBE63
SBE38 = Parameter.SBE38
SBE50 = Parameter.SBE50
GTD = Parameter.GTD
DUAL_GTD = Parameter.DUAL_GTD
OPTODE = Parameter.OPTODE
WETLABS = Parameter.WETLABS
VOLT0 = Parameter.VOLT0
VOLT1 = Parameter.VOLT1
VOLT2 = Parameter.VOLT2
VOLT3 = Parameter.VOLT3
VOLT4 = Parameter.VOLT4
VOLT5 = Parameter.VOLT5
# Device prompts.
class Prompt(BaseEnum):
"""
SBE16 io prompts.
"""
COMMAND = NEWLINE + 'S>'
BAD_COMMAND = '?cmd S>'
AUTOSAMPLE = NEWLINE + 'S>'
EXECUTED = '<Executed/>'
class DataParticleType(BaseEnum):
RAW = CommonDataParticleType.RAW
CTD_PARSED = 'ctdbp_cdef_sample'
DEVICE_STATUS = 'ctdbp_cdef_status'
DEVICE_CALIBRATION = 'ctdbp_cdef_calibration_coefficients'
class Sbe16plusBaseParticle(DataParticle):
"""
Overload the base particle to add in some common parsing logic for SBE
instruments. Add regex methods to help identify and parse multi-line
strings.
"""
@staticmethod
def regex():
"""
Return a regex string to use in matching functions. This can be used
for parsing too if more complex parsing isn't needed.
@return: uncompiled regex string
"""
NotImplementedException()
@staticmethod
def regex_compiled():
"""
Return a regex compiled regex of the regex
@return: compiled regex
"""
NotImplementedException()
def regex_multiline(self):
"""
return a dictionary containing uncompiled regex used to match patterns
in SBE multiline results. includes an encoder method.
@return: dictionary of uncompiled regexs
"""
NotImplementedException()
def regex_multiline_compiled(self):
"""
return a dictionary containing compiled regex used to match patterns
in SBE multiline results.
@return: dictionary of compiled regexs
"""
result = {}
for (key, regex) in self.regex_multiline().iteritems():
result[key] = re.compile(regex, re.DOTALL)
return result
def encoders(self):
"""
return a dictionary containing encoder methods for parameters
a special key 'default' can be used to name the default mechanism
@return: dictionary containing encoder callbacks
"""
NotImplementedException()
def _get_multiline_values(self, split_fun=None):
"""
return a dictionary containing keys and found values from a
multiline sample using the multiline regex
@param: split_fun - function to which splits sample into lines
@return: dictionary of compiled regexs
"""
result = []
if split_fun is None:
split_fun = self._split_on_newline
matchers = self.regex_multiline_compiled()
regexs = self.regex_multiline()
for line in split_fun(self.raw_data):
log.trace("Line: %s" % line)
for key in matchers.keys():
log.trace("match: %s" % regexs.get(key))
match = matchers[key].search(line)
if match:
encoder = self._get_encoder(key)
if encoder:
log.debug("encoding value %s (%s)" % (key, match.group(1)))
value = encoder(match.group(1))
else:
value = match.group(1)
log.trace("multiline match %s = %s (%s)" % (key, match.group(1), value))
result.append({
DataParticleKey.VALUE_ID: key,
DataParticleKey.VALUE: value
})
return result
def _split_on_newline(self, value):
"""
default split method for multiline regex matches
@param: value string to split
@return: list of line split on NEWLINE
"""
return value.split(NEWLINE)
def _get_encoder(self, key):
"""
Get an encoder for a key, if one isn't specified look for a default.
Can return None for no encoder
@param: key encoder we are looking for
@return: dictionary of encoders.
"""
encoder = self.encoders().get(key)
if not encoder:
encoder = self.encoders().get(DEFAULT_ENCODER_KEY)
return encoder
def _map_param_to_xml_tag(self, parameter_name):
"""
@return: a string containing the xml tag name for a parameter
"""
NotImplementedException()
def _extract_xml_elements(self, node, tag, raise_exception_if_none_found=True):
"""
extract elements with tag from an XML node
@param: node - XML node to look in
@param: tag - tag of elements to look for
@param: raise_exception_if_none_found - raise an exception if no element is found
@return: return list of elements found; empty list if none found
"""
elements = node.getElementsByTagName(tag)
if raise_exception_if_none_found and len(elements) == 0:
raise SampleException("_extract_xml_elements: No %s in input data: [%s]" % (tag, self.raw_data))
return elements
def _extract_xml_element_value(self, node, tag, raise_exception_if_none_found=True):
"""
extract element value that has tag from an XML node
@param: node - XML node to look in
@param: tag - tag of elements to look for
@param: raise_exception_if_none_found - raise an exception if no value is found
@return: return value of element
"""
elements = self._extract_xml_elements(node, tag, raise_exception_if_none_found)
if elements is None:
return None
children = elements[0].childNodes
if len(children) == 0 and raise_exception_if_none_found:
raise SampleException("_extract_xml_element_value: No value for %s in input data: [%s]" % (tag, self.raw_data))
return children[0].nodeValue
def _get_xml_parameter(self, xml_element, parameter_name, dtype=float, raise_exception_if_none_found=True):
try:
value = dtype(self._extract_xml_element_value(xml_element, self._map_param_to_xml_tag(parameter_name)))
except SampleException:
if raise_exception_if_none_found:
raise SampleException
value = None
return {DataParticleKey.VALUE_ID: parameter_name,
DataParticleKey.VALUE: value}
########################################################################
# Static helpers.
########################################################################
@staticmethod
def hex2value(hex_value, divisor=None):
"""
Convert a SBE hex value to a value. Some hex values are converted
from raw counts to volts using a divisor. If passed the value
will be calculated, otherwise return an int.
@param hex_value: string to convert
@param divisor: conversion value
@return: int or float of the converted value
"""
if not isinstance(hex_value, str):
raise InstrumentParameterException("hex value not a string")
if divisor is not None and divisor == 0:
raise InstrumentParameterException("divisor can not be 0")
value = int(hex_value, 16)
if divisor is not None:
return float(value) / divisor
return value
@staticmethod
def yesno2bool(value):
"""
convert a yes no response to a bool
@param value: string to convert
@return: bool
"""
if not (isinstance(value, str) or isinstance(value, unicode)):
raise InstrumentParameterException("value not a string")
if value.lower() == 'no':
return 0
elif value.lower() == 'yes':
return 1
raise InstrumentParameterException("Could not convert '%s' to bool" % value)
@staticmethod
def sbetime2unixtime(value):
"""
Convert an SBE integer time (epoch 1-1-2000) to unix time
@param value: sbe integer time
@return: unix time
"""
if not isinstance(value, int):
raise InstrumentParameterException("value not a int")
return SBE_EPOCH + value
@staticmethod
def float_to_int(val):
return int(float(val))
class SBE16DataParticleKey(BaseEnum):
TEMP = "temperature"
CONDUCTIVITY = "conductivity"
PRESSURE = "pressure"
PRESSURE_TEMP = "pressure_temp"
TIME = "ctd_time"
class SBE16DataParticle(Sbe16plusBaseParticle):
"""
Routines for parsing raw data into a data particle structure. Override
the building of values, and the rest should come along for free.
Sample:
#03EC1F0A738A81736187100004000B2CFDC618B859BE
Format:
#ttttttccccccppppppvvvvvvvvvvvvssssssss
Temperature = tttttt = 0A5371 (676721 decimal); temperature A/D counts = 676721
Conductivity = 1BC722 (1820450 decimal); conductivity frequency = 1820450 / 256 = 7111.133 Hz
Internally mounted strain gauge pressure = pppppp = 0C14C1 (791745 decimal);
Strain gauge pressure A/D counts = 791745
Internally mounted strain gauge temperature compensation = vvvv = 7D82 (32,130 decimal);
Strain gauge temperature = 32,130 / 13,107 = 2.4514 volts
First external voltage = vvvv = 0305 (773 decimal); voltage = 773 / 13,107 = 0.0590 volts
Second external voltage = vvvv = 0594 (1428 decimal); voltage = 1428 / 13,107 = 0.1089 volts
Time = ssssssss = 0EC4270B (247,736,075 decimal); seconds since January 1, 2000 = 247,736,075
"""
_data_particle_type = DataParticleType.CTD_PARSED
@staticmethod
def regex():
"""
Regular expression to match a sample pattern
@return: regex string
"""
#ttttttccccccppppppvvvvvvvvvvvvssssssss
pattern = r'#? *' # patter may or may not start with a '
pattern += r'([0-9A-F]{6})' # temperature
pattern += r'([0-9A-F]{6})' # conductivity
pattern += r'([0-9A-F]{6})' # pressure
pattern += r'([0-9A-F]{4})' # pressure temp
pattern += r'[0-9A-F]*' # consume extra voltage measurements
pattern += r'([0-9A-F]{8})' # time
pattern += NEWLINE
return pattern
@staticmethod
def regex_compiled():
"""
get the compiled regex pattern
@return: compiled re
"""
return re.compile(SBE16DataParticle.regex())
def _build_parsed_values(self):
"""
Take something in the autosample/TS format and split it into
C, T, and D values (with appropriate tags)
@throws SampleException If there is a problem with sample creation
"""
match = SBE16DataParticle.regex_compiled().match(self.raw_data)
if not match:
raise SampleException("No regex match of parsed sample data: [%s]" %
self.raw_data)
try:
temperature = self.hex2value(match.group(1))
conductivity = self.hex2value(match.group(2))
pressure = self.hex2value(match.group(3))
pressure_temp = self.hex2value(match.group(4))
elapse_time = self.hex2value(match.group(5))
self.set_internal_timestamp(unix_time=self.sbetime2unixtime(elapse_time))
except ValueError:
raise SampleException("ValueError while converting data: [%s]" %
self.raw_data)
result = [{DataParticleKey.VALUE_ID: SBE16DataParticleKey.TEMP,
DataParticleKey.VALUE: temperature},
{DataParticleKey.VALUE_ID: SBE16DataParticleKey.CONDUCTIVITY,
DataParticleKey.VALUE: conductivity},
{DataParticleKey.VALUE_ID: SBE16DataParticleKey.PRESSURE,
DataParticleKey.VALUE: pressure},
{DataParticleKey.VALUE_ID: SBE16DataParticleKey.PRESSURE_TEMP,
DataParticleKey.VALUE: pressure_temp},
{DataParticleKey.VALUE_ID: SBE16DataParticleKey.TIME,
DataParticleKey.VALUE: elapse_time}]
return result
class SBE16StatusParticleKey(BaseEnum):
FIRMWARE_VERSION = "firmware_version"
SERIAL_NUMBER = "serial_number"
DATE_TIME = "date_time_string"
VBATT = "battery_voltage_main"
VLITH = "battery_voltage_lithium"
IOPER = "operational_current"
IPUMP = "pump_current"
LOGGING_STATUS = "logging_status"
SAMPLES = "num_samples"
MEM_FREE = "mem_free"
SAMPLE_INTERVAL = "sample_interval"
MEASUREMENTS_PER_SAMPLE = "measurements_per_sample"
PUMP_MODE = "pump_mode"
DELAY_BEFORE_SAMPLING = "delay_before_sampling"
DELAY_AFTER_SAMPLING = "delay_after_sampling"
TX_REAL_TIME = "tx_real_time"
BATTERY_CUTOFF = "battery_cutoff"
PRESSURE_SENSOR = "pressure_sensor_type"
RANGE = "pressure_sensor_range"
SBE38 = "sbe38"
SBE50 = "sbe50"
WETLABS = "wetlabs"
OPTODE = "optode"
GAS_TENSION_DEVICE = "gas_tension_device"
EXT_VOLT_0 = "ext_volt_0"
EXT_VOLT_1 = "ext_volt_1"
EXT_VOLT_2 = "ext_volt_2"
EXT_VOLT_3 = "ext_volt_3"
EXT_VOLT_4 = "ext_volt_4"
EXT_VOLT_5 = "ext_volt_5"
ECHO_CHARACTERS = "echo_characters"
OUTPUT_FORMAT = "output_format"
OUTPUT_SALINITY = "output_salinity"
OUTPUT_SOUND_VELOCITY = "output_sound_velocity"
SERIAL_SYNC_MODE = "serial_sync_mode"
class SBE16StatusParticle(Sbe16plusBaseParticle):
"""
Routines for parsing raw data into a data particle structure. Override
the building of values, and the rest should come along for free.
"""
_data_particle_type = DataParticleType.DEVICE_STATUS
@staticmethod
def regex():
pattern = r'(<StatusData.*?</StatusData>).*?(<HardwareData.*?</HardwareData>).*?(<ConfigurationData.*?' \
r'</ConfigurationData>)'
return pattern
@staticmethod
def regex_compiled():
return re.compile(SBE16StatusParticle.regex(), re.DOTALL)
def _map_param_to_xml_tag(self, parameter_name):
map_param_to_tag = {
#GetSD response
SBE16StatusParticleKey.DATE_TIME: 'DateTime',
SBE16StatusParticleKey.VBATT : 'vMain',
SBE16StatusParticleKey.VLITH : 'vLith',
SBE16StatusParticleKey.IOPER : 'iMain',
SBE16StatusParticleKey.IPUMP : 'iPump',
SBE16StatusParticleKey.LOGGING_STATUS : 'LoggingState',
SBE16StatusParticleKey.SAMPLES : 'Samples',
SBE16StatusParticleKey.MEM_FREE : 'SamplesFree',
#GetHD response
SBE16StatusParticleKey.FIRMWARE_VERSION: 'FirmwareVersion',
#GetCD response
SBE16StatusParticleKey.PUMP_MODE : 'AutoRun',
SBE16StatusParticleKey.DELAY_BEFORE_SAMPLING : 'PumpDelay',
SBE16StatusParticleKey.DELAY_AFTER_SAMPLING : 'PumpDelay',
SBE16StatusParticleKey.SBE38 : 'SBE38',
SBE16StatusParticleKey.SBE50 : 'SBE50',
SBE16StatusParticleKey.WETLABS : 'WETLABS',
SBE16StatusParticleKey.OPTODE : 'OPTODE',
SBE16StatusParticleKey.GAS_TENSION_DEVICE : 'GTD',
SBE16StatusParticleKey.EXT_VOLT_0 : 'ExtVolt0',
SBE16StatusParticleKey.EXT_VOLT_1 : 'ExtVolt1',
SBE16StatusParticleKey.EXT_VOLT_2 : 'ExtVolt2',
SBE16StatusParticleKey.EXT_VOLT_3 : 'ExtVolt3',
SBE16StatusParticleKey.EXT_VOLT_4 : 'ExtVolt4',
SBE16StatusParticleKey.EXT_VOLT_5 : 'ExtVolt5',
SBE16StatusParticleKey.ECHO_CHARACTERS : 'EchoCharacters',
SBE16StatusParticleKey.OUTPUT_FORMAT : 'OutputFormat',
#not sure where these values are coming from
SBE16StatusParticleKey.OUTPUT_SALINITY : 'OutputSal',
SBE16StatusParticleKey.OUTPUT_SOUND_VELOCITY : 'OutputSV',
SBE16StatusParticleKey.SERIAL_SYNC_MODE : 'SyncMode',
SBE16StatusParticleKey.RANGE : 'PRange',
SBE16StatusParticleKey.TX_REAL_TIME : 'TxRealTime',
SBE16StatusParticleKey.BATTERY_CUTOFF : 'CutOff',
SBE16StatusParticleKey.PRESSURE_SENSOR : 'type',
SBE16StatusParticleKey.SAMPLE_INTERVAL : 'SampleInterval',
SBE16StatusParticleKey.MEASUREMENTS_PER_SAMPLE : 'NCycles',
}
return map_param_to_tag[parameter_name]
def _build_parsed_values(self):
"""
Take something in the autosample/TS format and split it into
C, T, and D values (with appropriate tags)
@throws SampleException If there is a problem with sample creation
"""
match = SBE16StatusParticle.regex_compiled().match(self.raw_data)
if not match:
raise SampleException("No regex match of parsed status data: [%s]" %
self.raw_data)
dom = parseString(match.group(1))
root = dom.documentElement
serial_number = root.getAttribute("SerialNumber")
result = [{DataParticleKey.VALUE_ID: SBE16StatusParticleKey.SERIAL_NUMBER, DataParticleKey.VALUE: serial_number}]
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.DATE_TIME, str))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.VBATT))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.VLITH))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.IOPER))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.IPUMP))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.LOGGING_STATUS, str))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.SAMPLES, int))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.MEM_FREE, int))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.SAMPLE_INTERVAL, int, False)),
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.MEASUREMENTS_PER_SAMPLE, int, False)),
dom = parseString(match.group(2))
root = dom.documentElement
sensors = self._extract_xml_elements(root, "Sensor")
for sensor in sensors:
sensor_id = sensor.getAttribute("id")
log.debug('SENSOR ID %r', sensor_id)
if sensor_id == "Main Pressure":
result.append(self._get_xml_parameter(sensor, SBE16StatusParticleKey.PRESSURE_SENSOR, str))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.FIRMWARE_VERSION, str))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.RANGE, int, False))
dom = parseString(match.group(3))
root = dom.documentElement
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.PUMP_MODE, str))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.DELAY_BEFORE_SAMPLING))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.DELAY_AFTER_SAMPLING)),
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.SBE38, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.SBE50, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.WETLABS, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.OPTODE, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.GAS_TENSION_DEVICE, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.EXT_VOLT_0, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.EXT_VOLT_1, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.EXT_VOLT_2, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.EXT_VOLT_3, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.EXT_VOLT_4, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.EXT_VOLT_5, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.ECHO_CHARACTERS, self.yesno2bool))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.OUTPUT_FORMAT, str))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.OUTPUT_SALINITY, self.yesno2bool, False))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.OUTPUT_SOUND_VELOCITY, self.yesno2bool, False))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.BATTERY_CUTOFF)),
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.TX_REAL_TIME, self.yesno2bool, False))
result.append(self._get_xml_parameter(root, SBE16StatusParticleKey.SERIAL_SYNC_MODE, self.yesno2bool, False))
return result
class SBE16CalibrationParticleKey(BaseEnum):
FIRMWARE_VERSION = "firmware_version"
SERIAL_NUMBER = "serial_number"
DATE_TIME = "date_time_string"
TEMP_CAL_DATE = "calibration_date_temperature"
TA0 = "temp_coeff_ta0"
TA1 = "temp_coeff_ta1"
TA2 = "temp_coeff_ta2"
TA3 = "temp_coeff_ta3"
TOFFSET = "temp_coeff_offset"
COND_CAL_DATE = "calibration_date_conductivity"
CONDG = "cond_coeff_cg"
CONDH = "cond_coeff_ch"
CONDI = "cond_coeff_ci"
CONDJ = "cond_coeff_cj"
CPCOR = "cond_coeff_cpcor"
CTCOR = "cond_coeff_ctcor"
CSLOPE = "cond_coeff_cslope"
PRES_SERIAL_NUMBER = "pressure_sensor_serial_number"
PRES_RANGE = "pressure_sensor_range"
PRES_CAL_DATE = "calibration_date_pressure"
# Quartz
PC1 = "press_coeff_pc1"
PC2 = "press_coeff_pc2"
PC3 = "press_coeff_pc3"
PD1 = "press_coeff_pd1"
PD2 = "press_coeff_pd2"
PT1 = "press_coeff_pt1"
PT2 = "press_coeff_pt2"
PT3 = "press_coeff_pt3"
PT4 = "press_coeff_pt4"
PSLOPE = "press_coeff_pslope"
# strain gauge
PA0 = "press_coeff_pa0"
PA1 = "press_coeff_pa1"
PA2 = "press_coeff_pa2"
PTCA0 = "press_coeff_ptca0"
PTCA1 = "press_coeff_ptca1"
PTCA2 = "press_coeff_ptca2"
PTCB0 = "press_coeff_ptcb0"
PTCB1 = "press_coeff_ptcb1"
PTCB2 = "press_coeff_ptcb2"
PTEMPA0 = "press_coeff_ptempa0"
PTEMPA1 = "press_coeff_ptempa1"
PTEMPA2 = "press_coeff_ptempa2"
POFFSET = "press_coeff_poffset"
EXT_VOLT0_OFFSET = "ext_volt0_offset"
EXT_VOLT0_SLOPE = "ext_volt0_slope"
EXT_VOLT1_OFFSET = "ext_volt1_offset"
EXT_VOLT1_SLOPE = "ext_volt1_slope"
EXT_VOLT2_OFFSET = "ext_volt2_offset"
EXT_VOLT2_SLOPE = "ext_volt2_slope"
EXT_VOLT3_OFFSET = "ext_volt3_offset"
EXT_VOLT3_SLOPE = "ext_volt3_slope"
EXT_VOLT4_OFFSET = "ext_volt4_offset"
EXT_VOLT4_SLOPE = "ext_volt4_slope"
EXT_VOLT5_OFFSET = "ext_volt5_offset"
EXT_VOLT5_SLOPE = "ext_volt5_slope"
EXT_FREQ = "ext_freq_sf"
class SBE16CalibrationParticle(Sbe16plusBaseParticle):
"""
Routines for parsing raw data into a data particle structure. Override
the building of values, and the rest should come along for free.
"""
_data_particle_type = DataParticleType.DEVICE_CALIBRATION
@staticmethod
def regex():
pattern = r'(<CalibrationCoefficients.*?</CalibrationCoefficients>)' + NEWLINE
return pattern
@staticmethod
def regex_compiled():
return re.compile(SBE16CalibrationParticle.regex(), re.DOTALL)
@staticmethod
def resp_regex():
pattern = r'(<CalibrationCoefficients.*?</CalibrationCoefficients>)'
return pattern
@staticmethod
def resp_regex_compiled():
return re.compile(SBE16CalibrationParticle.resp_regex(), re.DOTALL)
def _map_param_to_xml_tag(self, parameter_name):
map_param_to_tag = {
SBE16CalibrationParticleKey.FIRMWARE_VERSION : "DeviceType",
SBE16CalibrationParticleKey.SERIAL_NUMBER : "SerialNum",
SBE16CalibrationParticleKey.DATE_TIME : "CalDate",
SBE16CalibrationParticleKey.TEMP_CAL_DATE : "CalDate",
SBE16CalibrationParticleKey.TA0 : "TA0",
SBE16CalibrationParticleKey.TA1 : "TA1",
SBE16CalibrationParticleKey.TA2 : "TA2",
SBE16CalibrationParticleKey.TA3 : "TA3",
SBE16CalibrationParticleKey.TOFFSET : "TOFFSET",
SBE16CalibrationParticleKey.COND_CAL_DATE : "CalDate",
SBE16CalibrationParticleKey.CONDG : "G",
SBE16CalibrationParticleKey.CONDH : "H",
SBE16CalibrationParticleKey.CONDI : "I",
SBE16CalibrationParticleKey.CONDJ : "J",
SBE16CalibrationParticleKey.CPCOR : "CPCOR",
SBE16CalibrationParticleKey.CTCOR : "CTCOR",
SBE16CalibrationParticleKey.CSLOPE : "CSLOPE",
SBE16CalibrationParticleKey.PRES_SERIAL_NUMBER : "SerialNum",
SBE16CalibrationParticleKey.PRES_RANGE : r'PRANGE',
SBE16CalibrationParticleKey.PRES_CAL_DATE : "CalDate",
SBE16CalibrationParticleKey.PA0 : "PA0",
SBE16CalibrationParticleKey.PA1 : "PA1",
SBE16CalibrationParticleKey.PA2 : "PA2",
SBE16CalibrationParticleKey.PTCA0 : "PTCA0",
SBE16CalibrationParticleKey.PTCA1 : "PTCA1",
SBE16CalibrationParticleKey.PTCA2 : "PTCA2",
SBE16CalibrationParticleKey.PTCB0 : "PTCB0",
SBE16CalibrationParticleKey.PTCB1 : "PTCB1",
SBE16CalibrationParticleKey.PTCB2 : "PTCB2",
SBE16CalibrationParticleKey.PTEMPA0 : "PTEMPA0",
SBE16CalibrationParticleKey.PTEMPA1 : "PTEMPA1",
SBE16CalibrationParticleKey.PTEMPA2 : "PTEMPA2",
# Quartz
SBE16CalibrationParticleKey.PC1 : "PC1",
SBE16CalibrationParticleKey.PC2 : "PC2",
SBE16CalibrationParticleKey.PC3 : "PC3",
SBE16CalibrationParticleKey.PD1 : "PD1",
SBE16CalibrationParticleKey.PD2 : "PD2",
SBE16CalibrationParticleKey.PT1 : "PT1",
SBE16CalibrationParticleKey.PT2 : "PT2",
SBE16CalibrationParticleKey.PT3 : "PT3",
SBE16CalibrationParticleKey.PT4 : "PT4",
SBE16CalibrationParticleKey.PSLOPE : "PSLOPE",
SBE16CalibrationParticleKey.POFFSET : "POFFSET",
SBE16CalibrationParticleKey.EXT_VOLT0_OFFSET : "OFFSET",
SBE16CalibrationParticleKey.EXT_VOLT0_SLOPE : "SLOPE",
SBE16CalibrationParticleKey.EXT_VOLT1_OFFSET : "OFFSET",
SBE16CalibrationParticleKey.EXT_VOLT1_SLOPE : "SLOPE",
SBE16CalibrationParticleKey.EXT_VOLT2_OFFSET : "OFFSET",
SBE16CalibrationParticleKey.EXT_VOLT2_SLOPE : "SLOPE",
SBE16CalibrationParticleKey.EXT_VOLT3_OFFSET : "OFFSET",
SBE16CalibrationParticleKey.EXT_VOLT3_SLOPE : "SLOPE",
SBE16CalibrationParticleKey.EXT_VOLT4_OFFSET : "OFFSET",
SBE16CalibrationParticleKey.EXT_VOLT4_SLOPE : "SLOPE",
SBE16CalibrationParticleKey.EXT_VOLT5_OFFSET : "OFFSET",
SBE16CalibrationParticleKey.EXT_VOLT5_SLOPE : "SLOPE",
SBE16CalibrationParticleKey.EXT_FREQ : "EXTFREQSF"
}
return map_param_to_tag[parameter_name]
def _build_parsed_values(self):
"""
Parse the output of the calibration command
@throws SampleException If there is a problem with sample creation
"""
match = SBE16CalibrationParticle.regex_compiled().match(self.raw_data)
if not match:
raise SampleException("No regex match of parsed status data: [%s]" %
self.raw_data)
SERIAL_NUMBER = "SerialNumber"
CALIBRATION = "Calibration"
ID = "id"
TEMPERATURE_SENSOR_ID = "Main Temperature"
CONDUCTIVITY_SENSOR_ID = "Main Conductivity"
PRESSURE_SENSOR_ID = "Main Pressure"
VOLT0 = "Volt 0"
VOLT1 = "Volt 1"
VOLT2 = "Volt 2"
VOLT3 = "Volt 3"
VOLT4 = "Volt 4"
VOLT5 = "Volt 5"
EXTERNAL_FREQUENCY_CHANNEL = "external frequency channel"
dom = parseString(self.raw_data)
root = dom.documentElement
serial_number = root.getAttribute(SERIAL_NUMBER)
firmware_version = root.getAttribute("DeviceType")
result = [{DataParticleKey.VALUE_ID: SBE16CalibrationParticleKey.SERIAL_NUMBER, DataParticleKey.VALUE: serial_number},
{DataParticleKey.VALUE_ID: SBE16CalibrationParticleKey.FIRMWARE_VERSION, DataParticleKey.VALUE: firmware_version}]
calibration_elements = self._extract_xml_elements(root, CALIBRATION)
for calibration in calibration_elements:
id_attr = calibration.getAttribute(ID)
if id_attr == TEMPERATURE_SENSOR_ID:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.DATE_TIME, str))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.TEMP_CAL_DATE, str))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.TA0))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.TA1))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.TA2))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.TA3))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.TOFFSET))
elif id_attr == CONDUCTIVITY_SENSOR_ID:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.COND_CAL_DATE, str))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CONDG))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CONDH))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CONDI))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CONDJ))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CPCOR))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CTCOR))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.CSLOPE))
elif id_attr == PRESSURE_SENSOR_ID:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PRES_SERIAL_NUMBER, str))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PRES_CAL_DATE, str))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PA0, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PA1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PA2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTCA0, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTCA1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTCA2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTCB0, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTCB1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTCB2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTEMPA0, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTEMPA1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PTEMPA2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.POFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PRES_RANGE, self.float_to_int, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PC1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PC2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PC3, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PD1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PD2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PT1, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PT2, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PT3, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PT4, float, False))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.PSLOPE, float, False))
elif id_attr == VOLT0:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT0_OFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT0_SLOPE))
elif id_attr == VOLT1:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT1_OFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT1_SLOPE))
elif id_attr == VOLT2:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT2_OFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT2_SLOPE))
elif id_attr == VOLT3:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT3_OFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT3_SLOPE))
elif id_attr == VOLT4:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT4_OFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT4_SLOPE))
elif id_attr == VOLT5:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT5_OFFSET))
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_VOLT5_SLOPE))
elif id_attr == EXTERNAL_FREQUENCY_CHANNEL:
result.append(self._get_xml_parameter(calibration, SBE16CalibrationParticleKey.EXT_FREQ))
log.debug('RESULT = %r', result)
return result
###############################################################################
# Driver
###############################################################################
class SBE16InstrumentDriver(SingleConnectionInstrumentDriver):
"""
InstrumentDriver subclass for SBE16 driver.
Subclasses SingleConnectionInstrumentDriver with connection state
machine.
"""
def __init__(self, evt_callback):
"""
InstrumentDriver constructor.
@param evt_callback Driver process event callback.
"""
SingleConnectionInstrumentDriver.__init__(self, evt_callback)
def get_resource_params(self):
"""
Return list of device parameters available.
"""
return Parameter.list()
########################################################################
# Protocol builder.
########################################################################
def _build_protocol(self):
"""
Construct the driver protocol state machine.
"""
self._protocol = SBE16Protocol(Prompt, NEWLINE, self._driver_event)
###############################################################################
# Seabird Electronics 37-SMP MicroCAT protocol.
###############################################################################
class SBE16Protocol(CommandResponseInstrumentProtocol):
"""
Instrument protocol class for SBE16 driver.
Subclasses SeaBirdProtocol
"""
__metaclass__ = get_logging_metaclass(log_level='debug')
_sampling = False
def __init__(self, prompts, newline, driver_event):
"""
SBE16Protocol constructor.
@param prompts A BaseEnum class containing instrument prompts.
@param newline The SBE16 newline.
@param driver_event Driver process event callback.
"""
CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event)
# Build SBE16 protocol state machine.
self._protocol_fsm = InstrumentFSM(ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT)
# Add event handlers for protocol state machine.
handlers = {
ProtocolState.UNKNOWN: [
(ProtocolEvent.ENTER, self._handler_unknown_enter),
(ProtocolEvent.EXIT, self._handler_generic_exit),
(ProtocolEvent.DISCOVER, self._handler_unknown_discover),
],
ProtocolState.COMMAND: [
(ProtocolEvent.ENTER, self._handler_command_enter),
(ProtocolEvent.EXIT, self._handler_generic_exit),
(ProtocolEvent.ACQUIRE_SAMPLE, self._handler_command_acquire_sample),
(ProtocolEvent.START_AUTOSAMPLE, self._handler_command_start_autosample),
(ProtocolEvent.GET, self._handler_get),
(ProtocolEvent.SET, self._handler_command_set),
(ProtocolEvent.START_DIRECT, self._handler_command_start_direct),
(ProtocolEvent.CLOCK_SYNC, self._handler_command_clock_sync_clock),
(ProtocolEvent.ACQUIRE_STATUS, self._handler_command_acquire_status),
(ProtocolEvent.SCHEDULED_ACQUIRED_STATUS, self._handler_command_acquire_status),
(ProtocolEvent.SCHEDULED_CLOCK_SYNC, self._handler_command_clock_sync_clock)
],
ProtocolState.DIRECT_ACCESS: [
(ProtocolEvent.ENTER, self._handler_direct_access_enter),
(ProtocolEvent.EXIT, self._handler_generic_exit),
(ProtocolEvent.EXECUTE_DIRECT, self._handler_direct_access_execute_direct),
(ProtocolEvent.STOP_DIRECT, self._handler_direct_access_stop_direct)
],
ProtocolState.AUTOSAMPLE: [
(ProtocolEvent.ENTER, self._handler_autosample_enter),
(ProtocolEvent.EXIT, self._handler_generic_exit),
(ProtocolEvent.GET, self._handler_get),
(ProtocolEvent.STOP_AUTOSAMPLE, self._handler_autosample_stop_autosample),
(ProtocolEvent.SCHEDULED_ACQUIRED_STATUS, self._handler_autosample_acquire_status),
(ProtocolEvent.SCHEDULED_CLOCK_SYNC, self._handler_autosample_clock_sync)
]
}
for state in handlers:
for event, handler in handlers[state]:
self._protocol_fsm.add_handler(state, event, handler)
# Construct the parameter dictionary containing device parameters,
# current parameter values, and set formatting functions.
self._build_driver_dict()
self._build_command_dict()
self._build_param_dict()
# Add build handlers for device commands.
# Add build handlers for device commands, only using simple command handler.
for cmd in Command.list():
if cmd == Command.SET:
self._add_build_handler(Command.SET, self._build_set_command)
self._add_response_handler(Command.SET, self._parse_set_response)
else:
self._add_build_handler(cmd, self._build_simple_command)
# Add response handlers for device commands.
self._add_response_handler(Command.GET_SD, self._parse_status_response)
self._add_response_handler(Command.GET_HD, self._parse_status_response)
self._add_response_handler(Command.GET_CD, self._parse_status_response)
self._add_response_handler(Command.GET_CC, self._parse_status_response)
self._add_response_handler(Command.GET_EC, self._parse_status_response)
# State state machine in UNKNOWN state.
self._protocol_fsm.start(ProtocolState.UNKNOWN)
self._chunker = StringChunker(self.sieve_function)
self._add_scheduler_event(ScheduledJob.ACQUIRE_STATUS, ProtocolEvent.ACQUIRE_STATUS)
self._add_scheduler_event(ScheduledJob.CLOCK_SYNC, ProtocolEvent.SCHEDULED_CLOCK_SYNC)
@staticmethod
def sieve_function(raw_data):
"""
The method that splits samples
"""
matchers = []
return_list = []
matchers.append(SBE16DataParticle.regex_compiled())
matchers.append(SBE16StatusParticle.regex_compiled())
matchers.append(SBE16CalibrationParticle.regex_compiled())
for matcher in matchers:
for match in matcher.finditer(raw_data):
return_list.append((match.start(), match.end()))
return return_list
def _filter_capabilities(self, events):
return [x for x in events if Capability.has(x)]
def _got_chunk(self, chunk, timestamp):
"""
The base class got_data has gotten a chunk from the chunker. Pass it to extract_sample
with the appropriate particle objects and REGEXes.
"""
if self._extract_sample(SBE16DataParticle, SBE16DataParticle.regex_compiled(), chunk, timestamp):
self._sampling = True
any([
self._extract_sample(SBE16StatusParticle, SBE16StatusParticle.regex_compiled(), chunk, timestamp),
self._extract_sample(SBE16CalibrationParticle, SBE16CalibrationParticle.regex_compiled(), chunk, timestamp)])
def _build_driver_dict(self):
"""
Populate the driver dictionary with options
"""
self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)
def _build_command_dict(self):
"""
Populate the command dictionary with command.
"""
self._cmd_dict.add(Capability.START_AUTOSAMPLE, display_name="Start Autosample")
self._cmd_dict.add(Capability.STOP_AUTOSAMPLE, display_name="Stop Autosample")
self._cmd_dict.add(Capability.CLOCK_SYNC, display_name="Synchronize Clock")
self._cmd_dict.add(Capability.ACQUIRE_STATUS, display_name="Acquire Status")
self._cmd_dict.add(Capability.ACQUIRE_SAMPLE, display_name="Acquire Sample")
########################################################################
# Unknown handlers.
########################################################################
def _handler_unknown_enter(self, *args, **kwargs):
"""
Enter unknown state.
"""
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_unknown_discover(self, *args, **kwargs):
"""
Discover current state; can be COMMAND or AUTOSAMPLE.
@retval (next_state, next_agent_state), COMMAND or AUTOSAMPLE
@throws InstrumentProtocolException if the device response does not correspond to
an expected state.
"""
#check for a sample particle
self._sampling = False
timeout = 2
end_time = time.time() + timeout
while time.time() < end_time:
time.sleep(.1)
if self._sampling:
return ProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING
return ProtocolState.COMMAND, ResourceAgentState.IDLE
########################################################################
# Command handlers.
########################################################################
def _handler_command_enter(self, *args, **kwargs):
"""
Enter command state.
@throws InstrumentTimeoutException if the device cannot be woken.
@throws InstrumentProtocolException if the update commands and not recognized.
"""
if self._init_type != InitializationType.NONE:
self._update_params()
self._init_params()
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_command_acquire_status(self, *args, **kwargs):
"""
Get device status
"""
result = []
result.append(self._do_cmd_resp(Command.GET_SD, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetSD Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_HD, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetHD Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_CD, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetCD Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_CC, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetCC Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_EC, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetEC Response: %s", result)
# Reset the event counter right after getEC
self._do_cmd_resp(Command.RESET_EC, timeout=TIMEOUT)
return None, (None, ''.join(result))
def _handler_command_set(self, *args, **kwargs):
"""
Perform a set command.
@param args[0] parameter : value dict.
@retval (next_state, result) tuple, (None, None).
@throws InstrumentParameterException if missing set parameters, if set parameters not ALL and
not a dict, or if parameter can't be properly formatted.
"""
startup = False
# Retrieve required parameter.
# Raise if no parameter provided, or not a dict.
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('Set command requires a parameter dict.')
if not isinstance(params, dict):
raise InstrumentParameterException('Set parameters not a dict.')
try:
startup = args[1]
except IndexError:
pass
self._set_params(params, startup)
return None, None
def _set_params(self, *args, **kwargs):
"""
Issue commands to the instrument to set various parameters
"""
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('Set command requires a parameter dict.')
self._verify_not_readonly(*args, **kwargs)
update_params = False
# Pump Mode is the only parameter that is set by the driver
# that where the input isn't validated by the instrument. So
# We will do a quick range check before we start all sets
for (key, val) in params.iteritems():
if key == Parameter.PUMP_MODE and val not in [0, 1, 2]:
raise InstrumentParameterException("pump mode out of range")
for (key, val) in params.iteritems():
old_val = self._param_dict.format(key)
new_val = self._param_dict.format(key, val)
log.debug("KEY = %r OLD VALUE = %r NEW VALUE = %r", key, old_val, new_val)
if old_val != new_val:
update_params = True
if ConfirmedParameter.has(key):
# We add a write delay here because this command has to be sent
# twice, the write delay allows it to process the first command
# before it receives the beginning of the second.
self._do_cmd_resp(Command.SET, key, val, write_delay=0.2)
else:
self._do_cmd_resp(Command.SET, key, val, **kwargs)
log.debug("set complete, update params")
if update_params:
self._update_params()
def _handler_command_acquire_sample(self, *args, **kwargs):
"""
Acquire sample from SBE16.
@retval next_state, (next_agent_state, result) tuple
"""
result = self._do_cmd_resp(Command.TS, *args, **kwargs)
return None, (None, result)
def _handler_command_start_autosample(self, *args, **kwargs):
"""
Switch into autosample mode.
@retval (next_state, result) tuple, (ProtocolState.AUTOSAMPLE,
(next_agent_state, None) if successful.
@throws InstrumentTimeoutException if device cannot be woken for command.
@throws InstrumentProtocolException if command could not be built or misunderstood.
"""
self._start_logging(*args, **kwargs)
return ProtocolState.AUTOSAMPLE, (ResourceAgentState.STREAMING, None)
def _handler_command_start_direct(self):
"""
Start direct access
"""
return ProtocolState.DIRECT_ACCESS, (ResourceAgentState.DIRECT_ACCESS, None)
def _handler_command_clock_sync_clock(self, *args, **kwargs):
"""
sync clock close to a second edge
@retval (next_state, result) tuple, (None, None) if successful.
@throws InstrumentTimeoutException if device cannot be woken for command.
@throws InstrumentProtocolException if command could not be built or misunderstood.
"""
self._wakeup(timeout=TIMEOUT)
self._sync_clock(Command.SET, Parameter.DATE_TIME, TIMEOUT, time_format='%m%d%Y%H%M%S')
return None, (None, None)
########################################################################
# Autosample handlers.
########################################################################
def _handler_autosample_clock_sync(self, *args, **kwargs):
"""
execute a clock sync on the leading edge of a second change from
autosample mode. For this command we have to move the instrument
into command mode, do the clock sync, then switch back. If an
exception is thrown we will try to get ourselves back into
streaming and then raise that exception.
@retval (next_state, result) tuple, (ProtocolState.AUTOSAMPLE,
None) if successful.
@throws InstrumentTimeoutException if device cannot be woken for command.
@throws InstrumentProtocolException if command could not be built or misunderstood.
"""
try:
# Switch to command mode,
self._stop_logging(*args, **kwargs)
# Sync the clock
self._sync_clock(Command.SET, Parameter.DATE_TIME, TIMEOUT, time_format='%m%d%Y%H%M%S')
finally:
# Switch back to streaming
self._start_logging(*args, **kwargs)
return None, (None, None)
def _handler_autosample_enter(self, *args, **kwargs):
"""
Enter autosample state.
"""
if self._init_type != InitializationType.NONE:
self._stop_logging()
self._update_params()
self._init_params()
self._start_logging()
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_autosample_stop_autosample(self, *args, **kwargs):
"""
Stop autosample and switch back to command mode.
@retval (next_state, result) tuple
@throws InstrumentTimeoutException if device cannot be woken for command.
@throws InstrumentProtocolException if command misunderstood or
incorrect prompt received.
"""
self._stop_logging(*args, **kwargs)
return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None)
def _handler_autosample_acquire_status(self, *args, **kwargs):
"""
Get device status
"""
# When in autosample this command requires two wakeups to get to the right prompt
self._wakeup(timeout=WAKEUP_TIMEOUT, delay=0.3)
self._wakeup(timeout=WAKEUP_TIMEOUT, delay=0.3)
result = []
result.append(self._do_cmd_resp(Command.GET_SD, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetSD Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_HD, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetHD Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_CD, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetCD Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_CC, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetCC Response: %s", result)
result.append(self._do_cmd_resp(Command.GET_EC, timeout=TIMEOUT))
log.debug("_handler_command_acquire_status: GetEC Response: %s", result)
# Reset the event counter right after getEC
self._do_cmd_resp(Command.RESET_EC, timeout=TIMEOUT)
return None, (None, ''.join(result))
########################################################################
# Common handlers.
########################################################################
def _sync_clock(self, command, date_time_param, timeout=TIMEOUT, delay=1, time_format="%m%d%Y%H%M%S"):
"""
Send the command to the instrument to synchronize the clock
@param command: command to set6 date time
@param date_time_param: date time parameter that we want to set
@param timeout: command timeout
@param delay: wakeup delay
@param time_format: time format string for set command
@raise: InstrumentProtocolException if command fails
"""
# lets clear out any past data so it doesnt confuse the command
self._linebuf = ''
self._promptbuf = ''
log.debug("Set time format(%s) '%s''", time_format, date_time_param)
str_val = get_timestamp_delayed(time_format)
log.debug("Set time value == '%s'", str_val)
self._do_cmd_resp(command, date_time_param, str_val)
def _handler_generic_exit(self, *args, **kwargs):
"""
Exit unknown state.
"""
pass
########################################################################
# Direct access handlers.
########################################################################
def _handler_direct_access_enter(self, *args, **kwargs):
"""
Enter direct access state.
"""
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
self._sent_cmds = []
def _handler_direct_access_execute_direct(self, data):
"""
"""
self._do_cmd_direct(data)
# add sent command to list for 'echo' filtering in callback
self._sent_cmds.append(data)
return None, (None, None)
def _handler_direct_access_stop_direct(self):
"""
@throw InstrumentProtocolException on invalid command
"""
next_state, next_agent_state = self._handler_unknown_discover()
if next_state == DriverProtocolState.COMMAND:
next_agent_state = ResourceAgentState.COMMAND
return next_state, (next_agent_state, None)
########################################################################
# Private helpers.
########################################################################
def _start_logging(self, *args, **kwargs):
"""
Command the instrument to start logging
@param timeout: how long to wait for a prompt
@return: True if successful
@raise: InstrumentProtocolException if failed to start logging
"""
self._do_cmd_resp(Command.STARTNOW, *args, **kwargs)
def _stop_logging(self, *args, **kwargs):
"""
Command the instrument to stop logging
@param timeout: how long to wait for a prompt
@return: True if successful
@raise: InstrumentTimeoutException if prompt isn't seen
@raise: InstrumentProtocolException failed to stop logging
"""
kwargs['timeout'] = TIMEOUT
self._do_cmd_resp(Command.STOP, *args, **kwargs)
def _send_wakeup(self):
"""
Send a newline to attempt to wake the SBE16 device.
"""
self._connection.send(NEWLINE)
def _update_params(self, *args, **kwargs):
"""
Update the parameter dictionary. Wake the device then issue
display status and display calibration commands. The parameter
dict will match line output and update itself.
@throws InstrumentTimeoutException if device cannot be timely woken.
@throws InstrumentProtocolException if ds/dc misunderstood.
"""
self._wakeup(timeout=WAKEUP_TIMEOUT, delay=0.3)
# For some reason when in streaming we require a second wakeup
self._wakeup(timeout=WAKEUP_TIMEOUT, delay=0.3)
# Get old param dict config.
old_config = self._param_dict.get_config()
# Issue status commands
self._do_cmd_resp(Command.GET_SD, timeout=TIMEOUT)
self._do_cmd_resp(Command.GET_CD, timeout=TIMEOUT)
self._do_cmd_resp(Command.GET_HD, timeout=TIMEOUT)
# Get new param dict config. If it differs from the old config,
# tell driver superclass to publish a config change event.
new_config = self._param_dict.get_config()
log.debug("Old Config: %s", old_config)
log.debug("New Config: %s", new_config)
if not dict_equal(new_config, old_config) and self._protocol_fsm.get_current_state() != ProtocolState.UNKNOWN:
log.debug("parameters updated, sending event")
self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)
def _build_set_command(self, cmd, param, val):
"""
Build handler for set commands. param=val followed by newline.
String val constructed by param dict formatting function.
@param param the parameter key to set.
@param val the parameter value to set.
@ retval The set command to be sent to the device.
@throws InstrumentProtocolException if the parameter is not valid or
if the formatting function could not accept the value passed.
"""
try:
if param is Parameter.DATE_TIME:
set_cmd = '%s=%s%s' % (param, val, NEWLINE)
else:
str_val = self._param_dict.format(param, val)
set_cmd = '%s=%s%s' % (param, str_val, NEWLINE)
# Some set commands need to be sent twice to confirm
if param in ConfirmedParameter.list():
set_cmd += set_cmd
except KeyError:
raise InstrumentParameterException('Unknown driver parameter %s' % param)
return set_cmd
def _find_error(self, response):
"""
Find an error xml message in a response
@param response command response string.
@return tuple with type and message, None otherwise
"""
match = re.search(ERROR_REGEX, response)
if match:
return match.group(1), match.group(2)
return None
def _parse_set_response(self, response, prompt):
"""
Parse handler for set command.
@param response command response string.
@param prompt prompt following command response.
@throws InstrumentProtocolException if set command misunderstood.
"""
error = self._find_error(response)
if error:
log.error("Set command encountered error; type='%s' msg='%s'", error[0], error[1])
raise InstrumentParameterException('Set command failure: type="%s" msg="%s"' % (error[0], error[1]))
if prompt not in [Prompt.EXECUTED, Prompt.COMMAND]:
log.error("Set command encountered error; instrument returned: %s", response)
raise InstrumentProtocolException('Set command not recognized: %s' % response)
def _parse_status_response(self, response, prompt):
"""
Parse handler for status commands.
@param response command response string.
@param prompt prompt following command response.
@throws InstrumentProtocolException if command misunderstood.
"""
if prompt not in [Prompt.COMMAND, Prompt.EXECUTED]:
raise InstrumentProtocolException('Command not recognized: %s.' % response)
for line in response.split(NEWLINE):
self._param_dict.update(line)
return response
def _build_common_param_dict(self):
self._param_dict.add(Parameter.LOGGING,
r'LoggingState>(not )?logging</LoggingState',
lambda match: False if (match.group(1)) else True,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Logging",
description="Enable logging: (true | false)",
visibility=ParameterDictVisibility.READ_ONLY)
self._param_dict.add(Parameter.VOLT0,
r'ExtVolt0>(.*)</ExtVolt0',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Volt 0",
description="Enable external voltage 0: (true | false)",
startup_param=True,
direct_access=True,
default_value=True,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.VOLT1,
r'ExtVolt1>(.*)</ExtVolt1',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Volt 1",
description="Enable external voltage 1: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.VOLT2,
r'ExtVolt2>(.*)</ExtVolt2',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Volt 2",
description="Enable external voltage 2: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.VOLT3,
r'ExtVolt3>(.*)</ExtVolt3',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Volt 3",
description="Enable external voltage 3: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.VOLT4,
r'ExtVolt4>(.*)</ExtVolt4',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Volt 4",
description="Enable external voltage 4: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.VOLT5,
r'ExtVolt5>(.*)</ExtVolt5',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Volt 5",
description="Enable external voltage 5: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.SBE38,
r'SBE38>(.*)</SBE38',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="SBE38 Attached",
description="Enable SBE38: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.SBE63,
r'SBE63>(.*)</SBE63',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="SBE63 Attached",
description="Enable SBE63: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.WETLABS,
r'WETLABS>(.*)</WETLABS',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Wetlabs Sensor Attached",
description="Enable Wetlabs sensor: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.GTD,
r'GTD>(.*)</GTD',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="GTD Attached",
description="Enable GTD: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.DUAL_GTD,
r'GTD>(.*)</GTD',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Dual GTD Attached",
description="Enable second GTD: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.OUTPUT_FORMAT,
r'OutputFormat>(.*)</OutputFormat',
self._output_format_string_2_int,
int,
type=ParameterDictType.INT,
display_name="Output Format",
description="Format for the instrument output: (0:raw hex | 1:converted hex | 2:raw decimal | "
"3:converted decimal | 4:converted hex for afm | 5:converted xml uvic)",
startup_param=True,
direct_access=True,
default_value=0,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.OPTODE,
r'OPTODE>(.*)</OPTODE',
lambda match: True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Optode Attached",
description="Enable optode: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
def _build_param_dict(self):
"""
Populate the parameter dictionary with SBE16 parameters.
For each parameter key, add match string, match lambda function,
and value formatting function for set commands.
"""
self._build_common_param_dict()
self._param_dict.add(Parameter.PTYPE,
r"<Sensor id = 'Main Pressure'>.*?<type>(.*?)</type>.*?</Sensor>",
self._pressure_sensor_to_int,
str,
type=ParameterDictType.INT,
display_name="Pressure Sensor Type",
startup_param=True,
direct_access=True,
default_value=1,
description="Sensor type: (1:strain gauge | 3:quartz with temp comp)",
visibility=ParameterDictVisibility.IMMUTABLE,
regex_flags=re.DOTALL)
self._param_dict.add(Parameter.ECHO,
r'<EchoCharacters>(.*)</EchoCharacters>',
lambda match : True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Echo Characters",
description="Enable characters to be echoed as typed (true | false)",
startup_param=True,
direct_access=True,
default_value=True,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.OUTPUT_EXEC_TAG,
r'<OutputExecutedTag>(.*)</OutputExecutedTag>',
lambda match : True,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Output Execute Tag",
description="Enable display of XML executing and executed tags (true | false)",
startup_param=True,
direct_access=True,
default_value=True,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.PUMP_MODE,
r'<AutoRun>(.*)</AutoRun>',
self._pump_mode_to_int,
str,
type=ParameterDictType.INT,
display_name="Pump Mode",
description="Mode: (0:no pump | 1:run pump for 0.5 sec | 2:run pump during sample)",
startup_param=True,
direct_access=True,
default_value=2)
self._param_dict.add(Parameter.SBE50,
r'SBE50>(.*)</SBE50',
lambda match : True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="SBE50 Attached",
description="Enabled SBE50: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.DELAY_BEFORE_SAMPLE,
r'DelayBeforeSampling>(.*?)</DelayBeforeSampling',
lambda match : float(match.group(1)),
self._float_to_string,
type=ParameterDictType.FLOAT,
display_name="Delay Before Sample",
description=" Time to wait after switching on external voltages and RS-232 sensors "
"before sampling: (0-600).",
startup_param=True,
direct_access=True,
default_value=0.0,
units=Units.SECOND,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.DELAY_AFTER_SAMPLE,
r'DelayAfterSample>(.*?)</DelayBeforeSampling',
lambda match : float(match.group(1)),
str,
type=ParameterDictType.FLOAT,
display_name="Delay After Sample",
description="Time to wait after sampling is completed, before turning off power "
"to external voltages and RS-232 sensors.",
startup_param=True,
direct_access=True,
default_value=0.0,
units=Units.SECOND,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.SYNCMODE,
r'SyncMode>(dis|en)abled</SyncMode',
lambda match : True if match.group(1) == 'en' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Enable Serial Sync",
description="Enable serial line sync mode: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.NCYCLES,
r'NCycles>(.*?)</NCycles',
lambda match : int(match.group(1)),
str,
type=ParameterDictType.INT,
display_name="Ncycles",
description="Number of measurements to take and average every SampleInterval seconds.",
startup_param=True,
direct_access=False,
default_value=4)
self._param_dict.add(Parameter.INTERVAL,
r'SampleInterval>(.*?)</SampleInterval',
lambda match : int(match.group(1)),
str,
type=ParameterDictType.INT,
display_name="Sample Interval",
description="Interval between samples: (10 - 14,400).",
startup_param=True,
direct_access=False,
units=Units.SECOND,
default_value=10)
self._param_dict.add(Parameter.BIOWIPER,
r'Biowiper>(.*?)</Biowiper',
lambda match : False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Biowiper",
description="Enable ECO-FL fluorometer with Bio-Wiper: (true | false)",
startup_param=True,
direct_access=True,
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE)
self._param_dict.add(Parameter.TXREALTIME,
r'TxRealTime>(yes|no)</TxRealTime',
lambda match : True if match.group(1) == 'yes' else False,
self._true_false_to_string,
type=ParameterDictType.BOOL,
display_name="Transmit Real-Time",
description="Enable real-time data output: (true | false)",
startup_param=True,
direct_access=True,
default_value=True,
visibility=ParameterDictVisibility.IMMUTABLE)
########################################################################
# Static helpers to format set commands.
########################################################################
@staticmethod
def _pressure_sensor_to_int(match):
"""
map a pressure sensor string into an int representation
@param match: regex match
@return: mode 1, 2, 3 or None for no match
"""
v = match.group(1)
log.debug("get pressure type from: %s", v)
if v == "strain gauge" or v == "strain-0":
return 1
elif v == "quartz without temp comp":
return 2
elif v == "quartz with temp comp" or v == "quartzTC-0":
return 3
else:
return None
@staticmethod
def _pump_mode_to_int(match):
"""
map a pump mode string into an int representation
@param match: regex match
@return: mode 0, 1, 2 or None for no match
"""
v = match.group(1)
log.debug("get pump mode from: %s", v)
if v == "no pump":
return 0
elif v == "run pump for 0.5 sec":
return 1
elif v == "run pump during sample":
return 2
else:
return None
@staticmethod
def _true_false_to_string(v):
"""
Write a boolean value to string formatted for sbe16 set operations.
@param v a boolean value.
@retval A yes/no string formatted for sbe16 set operations.
@throws InstrumentParameterException if value not a bool.
"""
if not isinstance(v,bool):
raise InstrumentParameterException('Value %s is not a bool.' % str(v))
if v:
return 'y'
else:
return 'n'
@staticmethod
def _string_to_numeric_date_time_string(date_time_string):
"""
convert string from "21 AUG 2012 09:51:55" to numeric "mmddyyyyhhmmss"
"""
return time.strftime("%m%d%Y%H%M%S", time.strptime(date_time_string, "%d %b %Y %H:%M:%S"))
@staticmethod
def _output_format_string_2_int(format_string):
"""
Convert an output format from an string to an int
@param format_string sbe output format as string or regex match
@retval int representation of output format
@raise InstrumentParameterException if format unknown
"""
if not isinstance(format_string, str):
format_string = format_string.group(1)
if format_string.lower() == "raw hex":
return 0
elif format_string.lower() == "converted hex":
return 1
elif format_string.lower() == "raw decimal":
return 2
elif format_string.lower() == "converted decimal":
return 3
elif format_string.lower() == "converted hex for afm":
return 4
elif format_string.lower() == "converted xml uvic":
return 5
raise InstrumentParameterException("output format unknown: %s" % format_string)
|
rmanoni/mi-instrument
|
mi/instrument/seabird/sbe16plus_v2/driver.py
|
Python
|
bsd-2-clause
| 87,807
|
# Django settings for example project.
import os
BASE_DIR = os.path.dirname(__file__)
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'example.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '9i1lt2qz$#&21tqxqhq@ep21(8f#^kpih!5yynr+ba1sq5w8+&'
MIDDLEWARE = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'example.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'example.wsgi.application'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.media',
'django.template.context_processors.request',
'django.template.context_processors.i18n',
'django.template.context_processors.static',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
},
},
]
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'selectable',
'core',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
mlavin/django-selectable
|
example/example/settings.py
|
Python
|
bsd-2-clause
| 4,833
|
import csv
import sys
import typing
import collections
from pathlib import Path
import chardet
import warnings
import pycldf.dataset
from pathlib import Path
from csvw.dsv import UnicodeDictReader
from beastling.util import log
def sniff(filename, default_dialect: typing.Optional[csv.Dialect] = csv.excel):
"""Read the beginning of the file and guess its csv dialect.
Parameters
----------
filename: str or pathlib.Path
Path to a csv file to be sniffed
Returns
-------
csv.Dialect
"""
with Path(filename).open("rb") as fp:
# On large files, csv.Sniffer seems to need a lot of data to make a
# successful inference...
sample = fp.read(1024)
encoding = chardet.detect(sample)["encoding"]
sample = sample.decode(encoding)
while True:
try:
dialect = csv.Sniffer().sniff(sample, [",", "\t"])
dialect.encoding = encoding
return dialect
except csv.Error: # pragma: no cover
blob = fp.read(1024).decode(encoding)
sample += blob
if not blob:
# If blob is emtpy we've somehow hit the end of the file
# without figuring out the dialect. Something is probably
# quite wrong with the file, but let's default to Excel and
# hope for the best...
if default_dialect is not None:
return default_dialect
raise
def sanitise_name(name):
"""
Take a name for a language or a feature which has come from somewhere like
a CLDF dataset and make sure it does not contain any characters which
will cause trouble for BEAST or postanalysis tools.
"""
return name.replace(" ", "_")
def load_data(filename, file_format=None, lang_column=None, value_column=None, expect_multiple=False):
# Handle CSV dialect issues
if str(filename) == 'stdin':
filename = sys.stdin
# We can't sniff from stdin, so guess comma-delimited and hope for
# the best
dialect = "excel" # Default dialect for csv module
elif file_format and file_format.lower() == "cldf":
return read_cldf_dataset(filename, value_column, expect_multiple=expect_multiple)
elif file_format and file_format.lower() == "cldf-legacy":
# CLDF pre-1.0 standard says delimiter is indicated by file extension
if filename.suffix.lower() == ".csv" or str(filename) == "stdin":
dialect = "excel"
elif filename.suffix.lower() == ".tsv":
dialect = "excel-tab"
else:
raise ValueError("CLDF standard dictates that filenames must end in .csv or .tsv")
elif filename.suffix == ".json" or filename.name in {"forms.csv", "values.csv"}:
# TODO: Should we just let the pycldf module try its hands on the file
# and fall back to other formats if that doesn't work?
return read_cldf_dataset(filename, value_column, expect_multiple=expect_multiple)
else:
# Use CSV dialect sniffer in all other cases
dialect = sniff(filename)
# Read
with UnicodeDictReader(filename, dialect=dialect) as reader:
# Guesstimate file format if user has not been explicit
if file_format is None:
file_format = 'cldf-legacy' if all(
[f in reader.fieldnames for f in ("Language_ID", "Value")]) and any(
[f in reader.fieldnames for f in ("Feature_ID", "Parameter_ID")]
) else 'beastling'
# Load data
if file_format == 'cldf-legacy':
data = load_cldf_data(reader, value_column, filename, expect_multiple=expect_multiple)
elif file_format == 'beastling':
data = load_beastling_data(reader, lang_column, filename, expect_multiple=expect_multiple)
else:
raise ValueError("File format specification '{:}' not understood".format(file_format))
return data, {}
_language_column_names = ("iso", "iso_code", "glotto", "glottocode", "language", "language_id", "lang", "lang_id")
def load_beastling_data(reader, lang_column, filename, expect_multiple=False):
if not lang_column:
for candidate in reader.fieldnames:
if candidate.lower() in _language_column_names:
lang_column = candidate
break
if not lang_column or lang_column not in reader.fieldnames:
raise ValueError("Cold not find language column in data file %s" % filename)
data = collections.defaultdict(lambda: collections.defaultdict(lambda: "?"))
for row in reader:
if row[lang_column] in data:
raise ValueError("Duplicated language identifier '%s' found in data file %s" % (row[lang_column], filename))
lang = row.pop(lang_column)
if expect_multiple:
data[lang] = collections.defaultdict(lambda : "?", {key: [value] for key, value in row.items()})
else:
data[lang] = collections.defaultdict(lambda : "?", row)
return data
def load_cldf_data(reader, value_column, filename, expect_multiple=False):
value_column = value_column or "Value"
if "Feature_ID" in reader.fieldnames:
feature_column = "Feature_ID"
elif "Parameter_ID" in reader.fieldnames:
feature_column = "Parameter_ID"
else:
raise ValueError("Could not find Feature_ID or Parameter_ID column, is %s a valid CLDF file?" % filename)
data = collections.defaultdict(lambda: collections.defaultdict(lambda: "?"))
for row in reader:
lang = row["Language_ID"]
if lang not in data:
if expect_multiple:
data[lang] = collections.defaultdict(lambda: [])
else:
data[lang] = collections.defaultdict(lambda: "?")
if expect_multiple:
data[lang][row[feature_column]].append(row[value_column])
else:
data[lang][row[feature_column]] = row[value_column]
return data
def iterlocations(filename):
with UnicodeDictReader(filename, dialect=sniff(filename, default_dialect=None)) as reader:
# Identify fieldnames
fieldnames = [(n.lower(), n) for n in reader.fieldnames]
fieldmap = {}
for field, aliases in [
('language identifier', _language_column_names),
('latitude', ("latitude", "lat")),
('longitude', ("longitude", "lon", "long")),
]:
for lname, fieldname in fieldnames:
if lname in aliases:
fieldmap[field] = fieldname
break
else:
raise ValueError(
"Could not find a {0} column in location data file {1}".format(field, filename))
for row in reader:
(lat, lon) = row[fieldmap['latitude']], row[fieldmap['longitude']]
try:
lat = float(lat) if lat != "?" else lat
lon = float(lon) if lon != "?" else lon
except ValueError:
lat, lon = "?", "?"
yield (row[fieldmap['language identifier']].strip(), (lat, lon))
def get_dataset(fname):
"""Load a CLDF dataset.
Load the file as `json` CLDF metadata description file, or as metadata-free
dataset contained in a single csv file.
The distinction is made depending on the file extension: `.json` files are
loaded as metadata descriptions, all other files are matched against the
CLDF module specifications. Directories are checked for the presence of
any CLDF datasets in undefined order of the dataset types.
Parameters
----------
fname : str or Path
Path to a CLDF dataset
Returns
-------
Dataset
"""
fname = Path(fname)
if not fname.exists():
raise FileNotFoundError('{:} does not exist'.format(fname))
if fname.suffix == '.json':
return pycldf.dataset.Dataset.from_metadata(fname)
return pycldf.dataset.Dataset.from_data(fname)
# TODO: Change the behaviour to always expect multiple.
def read_cldf_dataset(filename, code_column=None, expect_multiple=False):
"""Load a CLDF dataset.
Load the file as `json` CLDF metadata description file, or as metadata-free
dataset contained in a single csv file.
The distinction is made depending on the file extension: `.json` files are
loaded as metadata descriptions, all other files are matched against the
CLDF module specifications. Directories are checked for the presence of
any CLDF datasets in undefined order of the dataset types.
Parameters
----------
fname : str or Path
Path to a CLDF dataset
Returns
-------
Dataset
"""
dataset = get_dataset(filename)
if expect_multiple:
data = collections.defaultdict(lambda: collections.defaultdict(lambda: []))
else:
data = collections.defaultdict(lambda: collections.defaultdict(lambda: "?"))
# Make sure this is a kind of dataset BEASTling can handle
if dataset.module not in ("Wordlist", "StructureDataset"):
raise ValueError("BEASTling does not know how to interpret CLDF {:} data.".format(
dataset.module))
# Build dictionaries of nice IDs for languages and features
col_map = dataset.column_names
lang_ids, language_code_map = build_lang_ids(dataset, col_map)
feature_ids = {}
if col_map.parameters:
for row in dataset["ParameterTable"]:
feature_ids[row[col_map.parameters.id]] = sanitise_name(row[col_map.parameters.name])
# Build actual data dictionary, based on dataset type
if dataset.module == "Wordlist":
# We search for cognatesetReferences in the FormTable or a separate CognateTable.
cognate_column_in_form_table = True
# If we find them in CognateTable, we store them keyed with formReference:
if not code_column: # If code_column is given explicitly, we don't have to search!
code_column = col_map.forms.cognatesetReference
if not code_column:
if (col_map.cognates and
col_map.cognates.cognatesetReference and
col_map.cognates.formReference):
code_column = col_map.cognates.cognatesetReference
form_reference = col_map.cognates.formReference
if expect_multiple:
cognatesets = collections.defaultdict(list)
for row in dataset["CognateTable"]:
cognatesets[row[form_reference]].append(row[code_column])
else:
cognatesets = collections.defaultdict(lambda: "?")
for row in dataset["CognateTable"]:
cognatesets[row[form_reference]] = row[code_column]
else:
raise ValueError(
"Dataset {:} has no cognatesetReference column in its "
"primary table or in a separate cognate table. "
"Is this a metadata-free wordlist and you forgot to "
"specify code_column explicitly?".format(filename))
form_column = dataset["FormTable", "id"].name
cognate_column_in_form_table = False
language_column = col_map.forms.languageReference
parameter_column = col_map.forms.parameterReference
warnings.filterwarnings(
"ignore", '.*Unspecified column "Cognate_Set"', UserWarning, "csvw\.metadata", 0)
warnings.filterwarnings(
"ignore", '.*Unspecified column "{:}"'.format(code_column), UserWarning, "csvw\.metadata", 0)
# We know how to deal with a 'Cognate_Set' column, even in a metadata-free CSV file
for row in dataset["FormTable"].iterdicts():
lang_id = lang_ids.get(row[language_column], row[language_column])
feature_id = feature_ids.get(row[parameter_column], row[parameter_column])
if cognate_column_in_form_table:
if expect_multiple:
data[lang_id][feature_id].append(row[code_column])
else:
data[lang_id][feature_id] = row[code_column]
else:
data[lang_id][feature_id] = cognatesets[row[col_map.forms.id]]
return data, language_code_map
if dataset.module == "StructureDataset":
code_column = col_map.values.codeReference or col_map.values.value
for row in dataset["ValueTable"]:
lang_id = lang_ids.get(
row[col_map.values.languageReference], row[col_map.values.languageReference])
feature_id = feature_ids.get(
row[col_map.values.parameterReference], row[col_map.values.parameterReference])
if expect_multiple:
data[lang_id][feature_id].append(row[code_column] or '')
else:
data[lang_id][feature_id] = row[code_column] or ''
return data, language_code_map
def build_lang_ids(dataset, col_map):
if col_map.languages is None:
# No language table so we can't do anything
return {}, {}
col_map = col_map.languages
lang_ids = {}
language_code_map = {}
# First check for unique names and Glottocodes
names = []
gcs = []
langs = []
for row in dataset["LanguageTable"]:
langs.append(row)
names.append(row[col_map.name])
if row[col_map.glottocode]:
gcs.append(row[col_map.glottocode])
unique_names = len(set(names)) == len(names)
unique_gcs = len(set(gcs)) == len(gcs) == len(names)
log.info('{0} are used as language identifiers'.format(
'Names' if unique_names else ('Glottocodes' if unique_gcs else 'dataset-local IDs')))
for row in langs:
if unique_names:
# Use names if they're unique, for human-friendliness
lang_ids[row[col_map.id]] = sanitise_name(row[col_map.name])
elif unique_gcs:
# Otherwise, use glottocodes as at least they are meaningful
lang_ids[row[col_map.id]] = row[col_map.glottocode]
else:
# As a last resort, use the IDs which are guaranteed to be unique
lang_ids[row[col_map.id]] = row[col_map.id]
if row[col_map.glottocode]:
language_code_map[lang_ids[row[col_map.id]]] = row[col_map.glottocode]
return lang_ids, language_code_map
|
lmaurits/BEASTling
|
beastling/fileio/datareaders.py
|
Python
|
bsd-2-clause
| 14,572
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fsforms', '0067_submissionofflinesite_temporary_site'),
]
operations = [
migrations.AddField(
model_name='submissionofflinesite',
name='fieldsight_form',
field=models.ForeignKey(related_name='offline_submissiob', blank=True, to='fsforms.FieldSightXF', null=True),
),
]
|
awemulya/fieldsight-kobocat
|
onadata/apps/fsforms/migrations/0068_submissionofflinesite_fieldsight_form.py
|
Python
|
bsd-2-clause
| 513
|
#!/usr/bin/env python
"""demonstrate the use of getopt and optparse to parse the command line"""
import getopt, sys, os, shutil
usage = """
Usage: %s [-h | --help] [-d dir | --directory dir] [-i | --confirm] file1 file2 ...
moves file1 file2 ... to the destination directory dir
""" % sys.argv[0]
def getopt_parsing():
try:
options, args = getopt.getopt(sys.argv[1:],
'hd:i', ['help', 'directory=', 'confirm'])
except getopt.GetoptError, msg:
# illegal options (not -h, -d, --help, --directory)
# or missing values (for -d and --directory)
print sys.exc_value # explains what is wrong
print usage
sys.exit(1) # 1 signifies error
# put all information in a dictionary cmlargs:
directory = None
confirm = False
files = args
print "options=",options
print "args=",args
# process options and values:
for option, value in options:
if option in ('-h', '--help'):
print usage; sys.exit(0) # 0: this exit is no error
elif option in ('-d', '--directory'):
directory = value
elif option in ('-i', '--confirm'):
confirm = True
return directory, confirm, files
from optparse import OptionParser
class OptionParserNoError(OptionParser):
def error(self, msg):
return
def optparse_parsing():
parser = OptionParser()
parser = OptionParserNoError()
# help message is automatically provided
parser.add_option('-d', '--directory', dest='directory',
help='destination directory')
parser.add_option('-i', '--confirm', dest='confirm',
action='store_true', default=False,
help='confirm each move')
options, args = parser.parse_args(sys.argv[1:])
print "options=",options
print "args=",args
return options.directory, options.confirm, args
destination, confirm, files = getopt_parsing()
#destination, confirm, files = optparse_parsing()
print destination, confirm, files
from scitools.misc import movefiles
#movefiles(files, destination, confirm=confirm)
|
sniemi/SamPy
|
sandbox/src1/TCSE3-3rd-examples/src/py/examples/cmlparsing.py
|
Python
|
bsd-2-clause
| 2,155
|
from django.conf import settings
from django.test.signals import setting_changed
from rest_framework.settings import APISettings
DEFAULTS = {
'USE_SESSION_AUTH': True,
'SECURITY_DEFINITIONS': {
'basic': {
'type': 'basic'
}
},
'LOGIN_URL': getattr(settings, 'LOGIN_URL', None),
'LOGOUT_URL': getattr(settings, 'LOGOUT_URL', None),
'DOC_EXPANSION': None,
'APIS_SORTER': None,
'OPERATIONS_SORTER': None,
'JSON_EDITOR': False,
'SHOW_REQUEST_HEADERS': False,
'SUPPORTED_SUBMIT_METHODS': [
'get',
'post',
'put',
'delete',
'patch'
],
'VALIDATOR_URL': '',
}
IMPORT_STRINGS = []
swagger_settings = APISettings(
user_settings=getattr(settings, 'SWAGGER_SETTINGS', {}),
defaults=DEFAULTS,
import_strings=IMPORT_STRINGS
)
def reload_settings(*args, **kwargs): # pragma: no cover
"""
Reloads settings during unit tests if override_settings decorator
is used. (Taken from DRF)
"""
# pylint: disable=W0603
global swagger_settings
if kwargs['setting'] == 'LOGIN_URL':
swagger_settings.LOGIN_URL = kwargs['value']
if kwargs['setting'] == 'LOGOUT_URL':
swagger_settings.LOGOUT_URL = kwargs['value']
if kwargs['setting'] != 'SWAGGER_SETTINGS':
return
swagger_settings = APISettings(
kwargs['value'],
DEFAULTS,
IMPORT_STRINGS
)
setting_changed.connect(reload_settings)
|
pombredanne/django-rest-swagger
|
rest_framework_swagger/settings.py
|
Python
|
bsd-2-clause
| 1,484
|
from __future__ import print_function
import sys
import time
import Pyro4
import Pyro4.util
from random import randint
sys.excepthook = Pyro4.util.excepthook
def test_servo(servo):
for i in range(10):
# Set the servo arm to 0 degrees
servo.setAngle(0)
print('Set angle to 0')
time.sleep(1)
# Set the servo arm to 90 degrees
servo.setAngle(90)
print('Set angle to 90')
time.sleep(1)
# Set the servo arm to 180 degrees
servo.setAngle(180)
print('Set angle to 180')
time.sleep(1)
def test_relay(relay):
for i in range(3):
relay.on()
if relay.isOn():
print(relay.name() + ' is on')
time.sleep(1)
relay.off()
if relay.isOff():
print(relay.name() + ' is off')
time.sleep(1)
def main():
Pyro4.config.HOST = '0.0.0.0'
iot = Pyro4.Proxy("PYRONAME:edison.iot.server")
#print(iot.info())
iot.blink(10)
lcd = iot.getLCD()
speaker = iot.getSpeaker()
# Play all 7 of the lowest notes
speaker.playAll()
light = iot.getLightSensor()
button = iot.getButton()
motion = iot.getMotion()
encoder = iot.getEncoder()
uv_sensor = iot.getUV()
# analog voltage, usually 3.3 or 5.0 for UV sensor
GUVAS12D_AREF = 5.0
SAMPLES_PER_QUERY = 1024
# The temperature sensor using I2C
fahrenheit = iot.getTemperature('F')
celsius = iot.getTemperature('C')
humid = iot.getHumidity()
print("{} degrees Celsius, or {} degrees Fahrenheit".format(celsius, fahrenheit))
print("Humidity is {}%".format(humid))
# Read the input and print, waiting one second between readings
for i in range(20):
r, g, b = randint(0, 255), randint(0, 255), randint(0, 255)
lcd.setColor(r, g, b)
lcd.setCursor(0, 0)
#lcd.write('Hello World'.encode('latin_1'))
iot.writeLCD('Hello World')
lcd.setCursor(1, 0)
#lcd.write("R:{} G:{} B:{}".format(r, g, b).encode('latin_1'))
iot.writeLCD("R:{} G:{} B:{}".format(r, g, b))
print("{} raw value is {}".format(light.name(), light.raw_value()), end='')
print(", which is roughly {} lux".format(light.value()))
print("AREF: {0}, Voltage value (higher means more UV): {1}".format(
GUVAS12D_AREF, uv_sensor.value(GUVAS12D_AREF, SAMPLES_PER_QUERY)))
print("Position: {0}".format(encoder.position()))
print("{} value is {}".format(button.name(), button.value()))
if button.value():
# Play a medium C-sharp
speaker.playSound('c', True, "med")
if motion.value():
print("Detecting moving object")
else:
print("No moving objects detected")
time.sleep(0.5)
#test_relay(iot.getRelay())
#test_servo(iot.getServo)
if __name__ == "__main__":
main()
|
ktkirk/HSSI
|
grovekit/client.py
|
Python
|
bsd-2-clause
| 2,914
|
from dateutils import DateUtils
import datetime
import unittest
CURRENT_YEAR = datetime.datetime.now().year
class TestDateUtils(unittest.TestCase):
def test_last_day_of_month(self):
self.assertEqual(DateUtils.last_day_of_month(2019, 3), 31)
self.assertEqual(DateUtils.last_day_of_month(2018, 7), 31)
self.assertEqual(DateUtils.last_day_of_month(2016, 2), 29)
self.assertEqual(DateUtils.last_day_of_month(2017, 2), 28)
def test_date_from_string_exact(self):
dates = DateUtils.date_from_string("01/01/2018")
self.assertEqual(dates[0], datetime.date(2018, 1, 1))
self.assertEqual(dates[1], datetime.date(2018, 1, 1))
dates = DateUtils.date_from_string("31/01/2018")
self.assertEqual(dates[0], datetime.date(2018, 1, 31))
self.assertEqual(dates[1], datetime.date(2018, 1, 31))
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("01/0/2018")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("01/13/2018")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("01/99/2018")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("32/1/2018")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("0/1/2018")
dates = DateUtils.date_from_string("12/06/2323")
self.assertEqual(dates[0], datetime.date(2323, 6, 12))
self.assertEqual(dates[1], datetime.date(2323, 6, 12))
def test_date_from_string_year(self):
dates = DateUtils.date_from_string("2018")
self.assertEqual(dates[0], datetime.date(2018, 1, 1))
self.assertEqual(dates[1], datetime.date(2018, 12, 31))
dates = DateUtils.date_from_string("3000")
self.assertEqual(dates[0], datetime.date(3000, 1, 1))
self.assertEqual(dates[1], datetime.date(3000, 12, 31))
dates = DateUtils.date_from_string("1950")
self.assertEqual(dates[0], datetime.date(1950, 1, 1))
self.assertEqual(dates[1], datetime.date(1950, 12, 31))
# We don't support years that don't have four digits.
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("659")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("23")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("1")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("65900")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("100000000000")
def test_date_from_string_month(self):
dates = DateUtils.date_from_string("12/2018")
self.assertEqual(dates[0], datetime.date(2018, 12, 1))
self.assertEqual(dates[1], datetime.date(2018, 12, 31))
dates = DateUtils.date_from_string("2/2016")
self.assertEqual(dates[0], datetime.date(2016, 2, 1))
self.assertEqual(dates[1], datetime.date(2016, 2, 29))
dates = DateUtils.date_from_string("02/2016")
self.assertEqual(dates[0], datetime.date(2016, 2, 1))
self.assertEqual(dates[1], datetime.date(2016, 2, 29))
# We don't support years that don't have four digits.
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("02/232")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("111/2012")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("0/2012")
def test_date_from_string_week(self):
dates = DateUtils.date_from_string("w1/2019")
self.assertEqual(dates[0], datetime.date(2018, 12, 31))
self.assertEqual(dates[1], datetime.date(2019, 1, 6))
dates = DateUtils.date_from_string("w01/2019")
self.assertEqual(dates[0], datetime.date(2018, 12, 31))
self.assertEqual(dates[1], datetime.date(2019, 1, 6))
dates = DateUtils.date_from_string("w01")
self.assertEqual(dates[0], DateUtils.from_week_number(CURRENT_YEAR, 1))
self.assertEqual(dates[1], DateUtils.from_week_number(CURRENT_YEAR, 1, end=True))
dates = DateUtils.date_from_string("w52/2016")
self.assertEqual(dates[0], datetime.date(2016, 12, 26))
self.assertEqual(dates[1], datetime.date(2017, 1, 1))
dates = DateUtils.date_from_string("w1/2017")
self.assertEqual(dates[0], datetime.date(2017, 1, 2))
self.assertEqual(dates[1], datetime.date(2017, 1, 8))
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("w02/232")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("w111/2012")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("w0/2012")
def test_date_from_string_quarter(self):
dates = DateUtils.date_from_string("q1/2019")
self.assertEqual(dates[0], datetime.date(2019, 1, 1))
self.assertEqual(dates[1], datetime.date(2019, 3, 31))
dates = DateUtils.date_from_string("Q1/2019")
self.assertEqual(dates[0], datetime.date(2019, 1, 1))
self.assertEqual(dates[1], datetime.date(2019, 3, 31))
dates = DateUtils.date_from_string("Q2")
self.assertEqual(dates[0], datetime.date(CURRENT_YEAR, 4, 1))
self.assertEqual(dates[1], datetime.date(CURRENT_YEAR, 6, 30))
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("Q2/232")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("Q2/00232")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("Q5/2012")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("Q0/2012")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("Q0234/2012")
def test_date_from_string_half(self):
dates = DateUtils.date_from_string("h1/2019")
self.assertEqual(dates[0], datetime.date(2019, 1, 1))
self.assertEqual(dates[1], datetime.date(2019, 6, 30))
dates = DateUtils.date_from_string("H1/2019")
self.assertEqual(dates[0], datetime.date(2019, 1, 1))
self.assertEqual(dates[1], datetime.date(2019, 6, 30))
dates = DateUtils.date_from_string("h2/2019")
self.assertEqual(dates[0], datetime.date(2019, 7, 1))
self.assertEqual(dates[1], datetime.date(2019, 12, 31))
dates = DateUtils.date_from_string("H2/2019")
self.assertEqual(dates[0], datetime.date(2019, 7, 1))
self.assertEqual(dates[1], datetime.date(2019, 12, 31))
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("H2/232")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("H2/00232")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("H5/2012")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("H0/2012")
with self.assertRaises(ValueError):
dates = DateUtils.date_from_string("H0234/2012")
def test_date_range(self):
dates = DateUtils.date_range_from_string("w11-")
self.assertEqual(dates[0], DateUtils.date_from_string("w11")[0])
self.assertEqual(dates[1], datetime.date.today())
if __name__ == '__main__':
unittest.main()
|
mrobinson/phpreport-report
|
tests.py
|
Python
|
bsd-2-clause
| 7,609
|
# Copyright 2014 Dietrich Epp.
# This file is part of SGLib. SGLib is licensed under the terms of the
# 2-clause BSD license. For more information, see LICENSE.txt.
from d3build.package import ExternalPackage
def pkg_config(build):
flags = build.pkg_config('alsa')
return None, build.target.module().add_flags(flags)
module = ExternalPackage(
[pkg_config],
name='ALSA',
packages={
'deb': 'libasound2-dev',
'rpm': 'alsa-lib-devel',
'gentoo': 'media-libs/alsa-lib',
'arch': 'alsa-lib',
}
)
|
depp/sglib
|
script/sglib/external/alsa.py
|
Python
|
bsd-2-clause
| 548
|
import os
from django.contrib.auth.models import User
from django.conf import settings
from django.db import models
from django.db.models.signals import (
post_save)
from django.dispatch import receiver
from elasticgit import EG
from elasticinit import TestStory
from backend.utils import push_to_git
import uuid
from github3 import GitHub
join = os.path.join
# from git import GitCommandError
class UserProfile(models.Model):
user = models.ForeignKey('auth.User')
uuid = models.CharField(
max_length=32,
unique=True,
db_index=True,
editable=False)
class Story(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
category = models.IntegerField()
body = models.CharField(max_length=200000)
uuid = models.CharField(
max_length=32,
blank=True,
null=True,
unique=True,
db_index=True,
editable=False)
@receiver(post_save, sender=User)
def auto_create_repo(instance, **kwargs):
try:
userUUID = uuid.uuid4().hex
# creating repo on GitHub
gh = GitHub('minaglobalfoundation@gmail.com',
password='minafoundation15')
githubRepo = gh.create_repository(userUUID, description=u'',
homepage=u'',
private=False,
has_issues=True,
has_wiki=True,
auto_init=True,
gitignore_template=u'')
githubRepo.create_blob('hello', 'utf-8')
githubRepo.create_commit('first commit', '', '')
# creating local repo
repoPath = 'repos/' + userUUID
UserProfile(user=instance, uuid=userUUID)
EG.init_repo(repoPath, bare=False)
# creating workspace in local repo
workspace = EG.workspace(repoPath,
index_prefix='',
es={'urls': ['http://localhost:9200']})
# pushing local repo to GitHub repo
workspace.repo.create_remote('origin', githubRepo.html_url)
repo = workspace.repo
remote = repo.remote()
remote.fetch()
remote_master = remote.refs.master
remote.push(remote_master.remote_head)
except ValueError:
raise
workspace.refresh_index()
# posting to EG
@receiver(post_save, sender=Story)
def auto_save_to_git(instance, **kwargs):
data = TestStory({
"title": instance.title,
"author": instance.author,
"category": instance.category,
"body": instance.body,
"uuid": uuid.uuid4().hex})
try:
ws = EG.workspace(settings.GIT_REPO_PATH,
index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX,
es={'urls': [settings.ELASTICSEARCH_HOST]})
ws.setup('Codie Roelf', 'codiebeulaine@gmail.com')
ws.save(data, 'saving')
ws.refresh_index()
push_to_git(settings.GIT_REPO_PATH,
index_prefix=settings.ELASTIC_GIT_INDEX_PREFIX,
es_host=settings.ELASTICSEARCH_HOST)
except ValueError:
raise
ws.refresh_index()
|
MinaProject/backend
|
backend/models.py
|
Python
|
bsd-2-clause
| 3,326
|
#!/usr/bin/env python2
#!/usr/bin/env python
#!/usr/bin/python
###############################################################################
### COPYRIGHT NOTICE FOLLOWS. DO NOT REMOVE
###############################################################################
### Copyright (c) 2016 - 2020 SQLEXEC LLC
###
### Permission to use, copy, modify, and distribute this software and its
### documentation for any purpose, without fee, and without a written agreement
### is hereby granted, provided that the above copyright notice and this paragraph
### and the following two paragraphs appear in all copies.
###
### IN NO EVENT SHALL SQLEXEC LLC BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
### INDIRECT SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS,
### ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF
### SQLEXEC LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
### SQLEXEC LLC SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
### LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
### PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
### AND SQLEXEC LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
### ENHANCEMENTS, OR MODIFICATIONS.
###
###############################################################################
#
# Original Author: Michael Vitale, michael@commandprompt.com
#
# Description: This python utility program performs PostgreSQL maintenance tasks.
#
# Inputs: all fields are optional except database and action.
# -h <hostname or IP address>
# -d <database>
# -n <schema>
# -p <PORT>
# -u <db user>
# -l <load threshold>
# -w <max rows>
# -o <work window in minutes>
# -e <max .ready files>
# -a <action: ANALYZE, VACUUM_ANALYZE, VACUUM_FREEZE, REPORT>
# -m [html format flag]
# -r [dry run flag]
# -s [smart mode flag]
# -v [verbose output flag, mostly used for debugging]
#
# Examples:
#
# -- vacuum analyze for all user tables in the database but only if load is less than 20% and rows < 1 mil
# ./pg_maint.py -h localhost -d test -p 5433 -u postgres -a vacuum_analyze -l 20 -w 1000000
#
# -- same thing as previous one, but do a dry run. This is useful to see wht commands will be executed, or is also
# useful for generating DDL so you can run it manually
# ./pg_maint.py -h localhost -d test -p 5433 -u postgres -a vacuum_analyze -l 20 -w 1000000 -r
#
# -- smart analyze for all user tables in specific schema, but only if load is less than 40% and rows < 1 mil
# ./pg_maint.py -h localhost -d test -n public -p 5433 -s -u postgres -a analyze -l 40 -w 1000000
#
# -- run report on entire test database:
# ./pg_maint.py -d test -a report
#
# Requirements:
# 1. python 2.6 or 2.7
# 2. psql client
# 3. psutil for windows only: https://pypi.python.org/pypi?:action=display&name=psutil#downloads
# (fyi for gettting it on linux but not required: apt-get install python-psutil or yum install python-psutil)
#
# Download: git clone https://github.com/commandprompt/pg_maint.git pg_maint
#
# Assumptions:
# 1. db user defaults to postgres if not provided as parameter.
# 2. Max rows defaults to 10 million if not provided as parameter
# 3. Password must be in local .pgpass file or client authentication changed to trust or peer
# 4. psql must be in the user's path
# 6. Load detection assumes that you are running this script from the database host.
# 7. SMART type will only consider tables whose pg_class.reltuples value is greater than zero.
# This value can be zero even if a few rows are in the table, because pg_class.reltuples is also a close estimate.
# 8. For analyze, vacuum_analyze, and vacuum_freeze actions, tables with over MAXROWS rows are not
# refreshed and are output in file, /tmp/PROGRAMPID_stats_deferred.sql
#
# -s (smart mode) dictates a filter algorithm to determine what tables will qualify for the maintenance commands.
# For analyze and vacuum analyze:
# 1. Refresh tables with no recent analyze or autovacuum_analyze in the last 60 days.
# 2. Refresh tables where pg_stat_user_tables.n_live_tup is less than half of pg_class.reltuples
# For vacuum freeze:
# 1. Refresh tables where current high XID age divided by autovacuum_freeze_max_age > 70%.
#
#
# Cron Job Info:
# View cron job output: view /var/log/cron
# source the database environment: source ~/db_catalog.ksh
# Example cron job that does smart vacuum freeze commands for entire database every Saturday at 4am:
# * 4 * * 6 /usr/bin/python /var/lib/pgsql/pg_maint/pg_maint.py -d evergreen -p 5432 -a vacuum_freeze -s -l 30 -w 1000000000 >> /var/lib/pgsql/pg_maint/pg_maint_`/bin/date +'\%Y\%m\%d'`.log 2>&1
#
# NOTE: You may have to source the environment variables file in the crontab to get this program to work.
# #!/bin/bash
# source /home/user/.bash_profile
#
# Report logic:
# 1. Get database conflicts, deadlocks, and temp_files.
# 2. Unused indexes are identified where there are less than 20 index scans and thee size of the table is > 100 MB.
# 3. Bloated tables/indexes are identified where at least 20% of the table/index is bloated or the wasted bytes is > 1GB.
# 4. See if archiving is getting behind by more than 1000 WAL files.
# 5. Contrast PG memory configuration to recommended ones
# 6. Identify orphaned large objects.
# 7. List tables getting close to transaction wraparound (more than halfway to max freeze threshold).
# 8. list tables that have not been analyzed or vacuumed in the last 60 days or whose size has grown significantly.
#
# TODOs:
#
#
# History:
# who did it Date did what
# ========== ========= ==============================
# Michael Vitale 01/12/2016 Original coding using python 2.7.x on windows 8.1 and ubuntu 14.04 (pg 9.4)
# Michael Vitale 01/13/2016 Finished porting code from bash script, pg_refreshstats.sh
# Michael Vitale 01/14/2016 First crack at incorporated logic for report action.
# Michael Vitale 01/17/2016 Implemented report output in html
# Michael Vitale 01/18/2016 Fixed a bunch of bugs with html reporting
# Michael Vitale 01/20/2016 Removed linux dependency on psutils module.
# Enhanced unused indexes report to query slaves if available
# Michael Vitale 01/21/2016 Fixed bugs, normalized html output, added connection and locking report
# Michael Vitale 01/23/2016 Reworked html output to display health check at top of page and lists at the bottom.
# Michael Vitale 01/25/2016 Added more health check items: writers, network standbys. Implemented logic related to
# checkpoint, background and backend writers and the pg_stat_bgwriter table.
# Michael Vitale 01/27/2016 loop over tables being worked on, instead of executing
# them in batch: analyze, vacuum analyze, and vacuum freeze actions.
# Michael Vitale 01/28/2016 Fixed python piping to use bash as default shell
# Michael Vitale 10/04/2017 Fix queries based on PG versions since 9.5 (ie 9.6 and 10)
# Michael Vitale 10/16/2020 Qualify vacuumlo command with port number. It had assumed default, 5432
################################################################################################################
import string, sys, os, time, datetime, exceptions
from decimal import *
import smtplib
import subprocess
from subprocess import Popen, PIPE
from optparse import OptionParser
import getpass
# global defs
import maint_globals
# main supporting functions
from maint_funcs import *
#############################################################################################
def setupOptionParser():
parser = OptionParser(add_help_option=False, description=maint_globals.DESCRIPTION)
parser.add_option("-a", "--action", dest="action", help="Action to perform. Values are: ANALYZE, VACUUM_ANALYZE, VACUUM_FREEZE, REPORT", default="",metavar="ACTION")
parser.add_option("-h", "--dbhost", dest="dbhost", help="DB Host Name or IP", default="",metavar="DBHOST")
parser.add_option("-p", "--port", dest="dbport", help="db host port", default="",metavar="DBPORT")
parser.add_option("-u", "--dbuser", dest="dbuser", help="db host user", default="",metavar="DBUSER")
parser.add_option("-d", "--database", dest="database", help="database name", default="",metavar="DATABASE")
parser.add_option("-n", "--schema", dest="schema", help="schema name", default="",metavar="SCHEMA")
parser.add_option("-s", "--smart_mode", dest="smart_mode", help="Smart Mode", default=False, action="store_true")
parser.add_option("-l", "--load_threshold", dest="load_threshold", help="Load Threshold", default="",metavar="LOAD_THRESHOLD")
parser.add_option("-w", "--max_rows", dest="max_rows", help="Max Rows", default="",metavar="MAX_ROWS")
parser.add_option("-o", "--work_window", dest="work_window", help="Work Window Max in mins.", default="",metavar="WORK_WINDOW")
parser.add_option("-e", "--max_ready_files",dest="max_ready_files", help="Maximum .Ready Files", default="",metavar="MAX_READY_FILES")
parser.add_option("-m", "--html", dest="html", help="html report format", default=False, action="store_true")
parser.add_option("-r", "--dry_run", dest="dry_run", help="Dry Run Only", default=False, action="store_true")
parser.add_option("-v", "--verbose", dest="verbose", help="Verbose Output", default=False, action="store_true")
return parser
#############################################################################################
#################################################################
#################### MAIN ENTRY POINT ###########################
#############################################@###################
optionParser = setupOptionParser()
(options,args) = optionParser.parse_args()
# load the instance
pg = maint()
# Load and validate parameters
rc, errors = pg.set_dbinfo(options.action, options.dbhost, options.dbport, options.dbuser, options.database, options.schema, \
options.smart_mode, options.load_threshold, options.max_rows, options.work_window, options.max_ready_files, options.html, options.dry_run, options.verbose, sys.argv)
if rc <> maint_globals.SUCCESS:
print errors
optionParser.print_help()
sys.exit(1)
# returns value like 9.4
# rc, pgversion = pg.get_pgversion()
# print "pg version: %.1f" % Decimal(pgversion)
print "%s version: %.1f %s PG Version: %s PG Database: %s\n\n" % (maint_globals.PROGNAME, maint_globals.VERSION, maint_globals.ADATE, pg.pgversion, pg.database)
# deferring load check to load intensive actions
#rc, results = pg.check_load()
rc, results = pg.do_vac_and_analyze()
if rc < maint_globals.SUCCESS:
pg.cleanup()
sys.exit(1)
rc, results = pg.do_report()
if rc < maint_globals.SUCCESS:
pg.cleanup()
sys.exit(1)
pg.cleanup()
sys.exit(0)
|
MichaelDBA/pg_maint
|
pg_maint.py
|
Python
|
bsd-2-clause
| 11,478
|
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 4 11:59:01 2014
@author: sb238920
"""
import numpy as np
def compute_mvt_confounds(movconf_file):
"""Computes mouvement confounds
Parameters
==========
movconf_file: path
path for the movement parameters
Returns
=======
confounds_data_all: array
mouvement parameters, derivatives and squares
confounds_labels: list of str
labels of the mouvement confounds
"""
confounds_data = np.loadtxt(movconf_file, dtype=float)
confounds_data = confounds_data[:, :6]
confounds_data_dt = confounds_data.copy()
for n in range(6):
conv = np.convolve(confounds_data[:, n], np.array([1., 0., -1.]) / 2,
'valid')
confounds_data_dt[1:-1, n] = conv
confounds_data_all = np.concatenate(
(confounds_data, confounds_data_dt), axis=1)
confounds_data_all = np.concatenate(
(confounds_data_all, confounds_data ** 2), axis=1)
confounds_labels = ["trans_x", "trans_y",
"trans_z", "rot_x", "rot_y", "rot_z"]
confounds_labels_dt = ["d_" + lbl for lbl in confounds_labels]
confounds_labels_sq = ["sq_" + lbl for lbl in confounds_labels]
confounds_labels.extend(confounds_labels_dt)
confounds_labels.extend(confounds_labels_sq)
return confounds_data_all, confounds_labels
|
rphlypo/parietalretreat
|
confound.py
|
Python
|
bsd-2-clause
| 1,383
|
# coding=utf8
# Based on yibo's R script and JianXiao's Python script
from scipy import sparse
from sklearn.feature_selection import SelectPercentile, f_classif, chi2
import pandas as pd
import numpy as np
from scipy import sparse as ssp
import pylab as plt
from sklearn.preprocessing import LabelEncoder,LabelBinarizer,MinMaxScaler,OneHotEncoder
from sklearn.feature_extraction.text import TfidfVectorizer,CountVectorizer
from sklearn.decomposition import TruncatedSVD
from sklearn.pipeline import Pipeline,make_pipeline
from sklearn.cross_validation import StratifiedKFold,KFold
from sklearn.base import BaseEstimator
from sklearn.feature_selection import SelectFromModel,SelectPercentile,f_classif
from sklearn.linear_model import Ridge,LogisticRegression
from keras.preprocessing import sequence
from keras.callbacks import ModelCheckpoint
from keras import backend as K
from keras.layers import Input, Embedding, LSTM, Dense,Flatten, Dropout, merge,Convolution1D,MaxPooling1D,Lambda
from keras.layers.normalization import BatchNormalization
from keras.optimizers import SGD,Nadam
from keras.layers.advanced_activations import PReLU,LeakyReLU,ELU,SReLU
from keras.models import Model
from keras.utils.visualize_util import plot
import xgboost as xgb
seed = 1024
max_index_label = 1021
dim = 128
lsi_dim = 300
path = "data/"
# Create bag-of-apps in character string format
# first by event
# then merge to generate larger bags by device
##################
# App Events
##################
print("# Read App Events")
app_ev = pd.read_csv(path+"app_events.csv", dtype={'device_id': np.str})
# remove duplicates(app_id)
app_ev = app_ev.groupby("event_id")["app_id"].apply(
lambda x: " ".join(set("app_id:" + str(s) for s in x)))
##################
# Events
##################
print("# Read Events")
events = pd.read_csv(path+"events_localized.csv", dtype={'device_id': np.str})
events["app_id"] = events["event_id"].map(app_ev)
events['local_hour'] = events['local_hour'].apply(lambda x: "local_hour:" + str(x))
hours_of_day = events[["device_id", "local_hour"]]
hours_of_day = hours_of_day.groupby("device_id")["local_hour"].apply(lambda x: " ".join(str(" ".join(str(s) for s in x)).split(" ")))
hours_of_day = hours_of_day.reset_index(name="local_hour")
hours_of_day = pd.concat([pd.Series(row['device_id'], row['local_hour'].split(' '))
for _, row in hours_of_day.iterrows()]).reset_index()
hours_of_day.columns = ['local_hour', 'device_id']
events = events.dropna()
del app_ev
events = events[["device_id", "app_id"]]
# remove duplicates(app_id)
events = events.groupby("device_id")["app_id"].apply(
lambda x: " ".join(set(str(" ".join(str(s) for s in x)).split(" "))))
events = events.reset_index(name="app_id")
# expand to multiple rows
events = pd.concat([pd.Series(row['device_id'], row['app_id'].split(' '))
for _, row in events.iterrows()]).reset_index()
events.columns = ['app_id', 'device_id']
##################
# Phone Brand
##################
print("# Read Phone Brand")
pbd = pd.read_csv(path+"phone_brand_device_model.csv",
dtype={'device_id': np.str})
pbd.drop_duplicates('device_id', keep='first', inplace=True)
##################
# Train and Test
##################
print("# Generate Train and Test")
train = pd.read_csv(path+"gender_age_train.csv",
dtype={'device_id': np.str})
train["gender"][train["gender"]=='M']=1
train["gender"][train["gender"]=='F']=0
Y_gender = train["gender"]
Y_age = train["age"]
Y_age = np.log(Y_age)
train.drop(["age", "gender"], axis=1, inplace=True)
test = pd.read_csv(path+"gender_age_test.csv",
dtype={'device_id': np.str})
test["group"] = np.nan
split_len = len(train)
# Group Labels
Y = train["group"]
lable_group = LabelEncoder()
Y = lable_group.fit_transform(Y)
device_id = test["device_id"]
# Concat
Df = pd.concat((train, test), axis=0, ignore_index=True)
Df = pd.merge(Df, pbd, how="left", on="device_id")
Df["phone_brand"] = Df["phone_brand"].apply(lambda x: "phone_brand:" + str(x))
Df["device_model"] = Df["device_model"].apply(
lambda x: "device_model:" + str(x))
###################
# Concat Feature
###################
f1 = Df[["device_id", "phone_brand"]] # phone_brand
f2 = Df[["device_id", "device_model"]] # device_model
f3 = events[["device_id", "app_id"]] # app_id
f4 = hours_of_day[["device_id", "local_hour"]]
del Df
f1.columns.values[1] = "feature"
f2.columns.values[1] = "feature"
f3.columns.values[1] = "feature"
f4.columns.values[1] = "feature"
FLS = pd.concat((f1, f2, f3, f4), axis=0, ignore_index=True)
###################
# User-Item Feature
###################
print("# User-Item-Feature")
device_ids = FLS["device_id"].unique()
feature_cs = FLS["feature"].unique()
data = np.ones(len(FLS))
dec = LabelEncoder().fit(FLS["device_id"])
row = dec.transform(FLS["device_id"])
col = LabelEncoder().fit_transform(FLS["feature"])
sparse_matrix = sparse.csr_matrix(
(data, (row, col)), shape=(len(device_ids), len(feature_cs)))
sparse_matrix = sparse_matrix[:, sparse_matrix.getnnz(0) > 0]
##################
# Data
##################
train_row = dec.transform(train["device_id"])
train_sp = sparse_matrix[train_row, :]
test_row = dec.transform(test["device_id"])
test_sp = sparse_matrix[test_row, :]
skf = StratifiedKFold(Y, n_folds=10, shuffle=True, random_state=seed)
# skf = KFold(train.shape[0],n_folds=5, shuffle=True, random_state=seed)
for ind_tr, ind_te in skf:
X_train = train_sp[ind_tr]
X_val = train_sp[ind_te]
y_train = Y[ind_tr]
y_val = Y[ind_te]
y_train_gender = Y_gender[ind_tr]
y_val_gender = Y_gender[ind_te]
y_train_age = Y_age[ind_tr]
y_val_age = Y_age[ind_te]
break
##################
# Feature Sel
##################
print("# Feature Selection")
selector = SelectPercentile(f_classif, percentile=23)
selector.fit(X_train, y_train)
X_train = selector.transform(X_train).toarray()
X_val = selector.transform(X_val).toarray()
train_sp = selector.transform(train_sp)
test_sp = selector.transform(test_sp).toarray()
print("# Num of Features: ", X_train.shape[1])
group_lb = LabelBinarizer()
labels = group_lb.fit_transform(Y)
y_train = group_lb.transform(y_train)
y_val = group_lb.transform(y_val)
inputs = Input(shape=(X_train.shape[1],), dtype='float32')
fc1 = Dense(512)(inputs)
fc1 = SReLU()(fc1)
dp1 = Dropout(0.5)(fc1)
y_train = [y_train,y_train_gender,y_train_age]
y_val = [y_val,y_val_gender,y_val_age]
# fc1_g = Dense(256)(dp1)
# fc1_g = SReLU()(fc1_g)
# dp1_g = Dropout(0.5)(fc1_g)
outputs_gender = Dense(1,activation='sigmoid',name='outputs_gender')(dp1)
# fc1_a = Dense(256)(dp1)
# fc1_a = SReLU()(fc1_a)
# dp1_a = Dropout(0.5)(fc1_a)
outputs_age = Dense(1,activation='linear',name='outputs_age')(dp1)
# fc2 = Dense(512)(dp1)
# fc2 = SReLU()(fc2)
# dp2 = Dropout(0.5)(fc2)
outputs = Dense(12,activation='softmax',name='outputs')(dp1)
inputs = [
inputs,
]
outputs = [
outputs,
outputs_gender,
outputs_age,
]
model = Model(input=inputs, output=outputs)
nadam = Nadam(lr=1e-4)
sgd = SGD(lr=0.005, decay=1e-6, momentum=0.9, nesterov=True)
# model.compile(
# optimizer=nadam,
# loss={'outputs': 'categorical_crossentropy'}
# )
model.compile(
optimizer=nadam,
loss={'outputs': 'categorical_crossentropy', 'outputs_gender': 'binary_crossentropy','outputs_age':'mse'},
loss_weights={'outputs': 1., 'outputs_gender': 1.,'outputs_age': 1.}
)
model_name = 'mlp_%s.hdf5'%'sparse'
model_checkpoint = ModelCheckpoint(path+model_name, monitor='val_outputs_loss', save_best_only=True)
plot(model, to_file=path+'%s.png'%model_name.replace('.hdf5',''),show_shapes=True)
nb_epoch = 20
batch_size = 128
load_model = True
if load_model:
print('Load Model')
model.load_weights(path+model_name)
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=1, shuffle=True,
callbacks=[model_checkpoint],
validation_data=[X_val,y_val]
)
model.load_weights(path+model_name)
X_train = pd.read_csv(path+'gender_age_train.csv')
group_le = LabelEncoder()
group_lb = LabelBinarizer()
labels = group_le.fit_transform(X_train['group'].values)
labels = group_lb.fit_transform(labels)
device_id = pd.read_csv(path+'gender_age_test.csv')['device_id']
y_preds = model.predict(test_sp)[0]
# Write results
submission = pd.DataFrame(y_preds, columns=group_le.classes_)
submission["device_id"] = device_id
submission = submission.set_index("device_id")
submission.to_csv('submission_mlp_sparse.csv', index=True, index_label='device_id')
|
natesholland/talking-data-kaggle
|
classify-neural-nets-with-hours.py
|
Python
|
bsd-2-clause
| 8,754
|
### This plan is named 9999-* to have it after 999-load.py where xpdacq is configured ####
### Created by Sanjit Ghose 28th Aug, 2017 during new BS/xpdAcq/an upgrades ########
from xpdacq.beamtime import _configure_area_det
import os
import numpy as np
import itertools
from bluesky.plans import (scan, count, list_scan, adaptive_scan)
from bluesky.preprocessors import subs_wrapper, reset_positions_wrapper
from bluesky.plan_stubs import abs_set
from bluesky.callbacks import LiveTable, LivePlot
from bluesky.plan_tools import print_summary
#### Plan to run Gas/RGA2/pe1c over xpdacq protocols of samples ########
gas.gas_list = ['He', 'N2', 'CO2', 'Air']
def Gas_Plan(gas_in = 'He', liveplot_key=None, totExpTime = 5, num_exp = 1, delay = 1):
"""
Execute it
----------
>> %run -i /home/xf28id2/Documents/Sanjit/Scripts/GasXrun_Plan.py
>> change all the parameters inside Gas_Plan as required
>>> gas_plan = Gas_Plan(gas_in = 'He', liveplot_key= 'rga_mass1', totExpTime = 5, num_exp = 3, delay = 1)
>> to run the xrun, save metadata & save_tiff run the following
>>> run_and_save(sample_num = 0)
Example
-------
Set the gasses. They can be in any other, nothing to do with
the order they are used in the plan. But, should match the connections on switch.
>>> gas.gas_list = ['He', 'N2', 'CO2']
>>> RGA mass is set to different value, base shows 1^-13 with He 5 cc flow shows max 2^-8
>>> RGA mass setup in mID mode: 4,18,28,31,44,79,94,32,81
Parameters
----------
gas_in : string
e.g., 'He', default is 'He'
These gas must be in `gas.gas_list` but they may be in any order.
liveplot_key : str, optional
e. g., liveplot_key = rga_mass1
data key for LivePlot. default is None, which means no LivePlot
totExpTime : float
total exposure time per frame in seconds. Dafault value is 5 sec
num_exp : int
number of images/exposure, default is 1
delay: float
delay time between exposures in sec
"""
## switch gas
yield from abs_set(gas, gas_in)
## configure the exposure time first
_configure_area_det(totExpTime) # 5 secs exposuretime
## ScanPlan you need
plan = bp.count([pe1c, gas.current_gas, rga], num=num_exp, delay= delay)
#plan = bpp.subs_wrapper(plan, LiveTable([xpd_configuration['area_det'], rga])) # give you LiveTable
plan = bpp.subs_wrapper(plan, LiveTable([xpd_configuration['area_det'], gas.current_gas, rga]))
if liveplot_key and isinstance(liveplot_key, str):
plan = bpp.subs_wrapper(plan, LivePlot(liveplot_key))
yield from plan
def run_and_save(sample_num = 0):
data_dir = "/direct/XF28ID2/pe2_data/xpdUser/tiff_base/"
file_name = data_dir + "sample_num_" + str(sample_num) + ".csv"
xrun(sample_num, gas_plan)
h = db[-1]
tb = h.table()
tb.to_csv(path_or_buf =file_name, columns = ['time', 'gas_current_gas', 'rga_mass1',
'rga_mass2', 'rga_mass3', 'rga_mass4', 'rga_mass5',
'rga_mass6', 'rga_mass7', 'rga_mass8', 'rga_mass9'])
integrate_and_save_last()
|
NSLS-II-XPD/ipython_ophyd
|
profile_collection/startup/9999-acq-plans.py
|
Python
|
bsd-2-clause
| 3,169
|
"""
AMI bake code."""
from __future__ import print_function
from ConfigParser import NoOptionError
from collections import OrderedDict, defaultdict
from subprocess import check_output
import datetime
import logging
import getpass
import re
import time
from os import path
import boto
import boto.ec2
import boto.exception
import dateutil.parser
from pytz import UTC
from . import normalize_path, read_config
from .resource_helper import wait_for_sshable, keep_trying, wait_for_state
from .disco_storage import DiscoStorage
from .disco_remote_exec import DiscoRemoteExec, SSH_DEFAULT_OPTIONS
from .disco_vpc import DiscoVPC
from .exceptions import CommandError, AMIError, WrongPathError, EarlyExitException
from .disco_constants import DEFAULT_CONFIG_SECTION
AMI_NAME_PATTERN = re.compile(r"^\w+\s(?:[0-9]+\s)?[0-9]{10,50}")
AMI_TAG_LIMIT = 10
class DiscoBake(object):
"""Class orchestrating baking in AWS"""
def __init__(self, config=None, connection=None, use_local_ip=False):
"""
:param config: Configuration object to use.
:param connection: Boto ec2 connection to use.
:param use_local_ip: Use local ip of instances for remote exec instead of public.
"""
if config:
self._config = config
else:
self._config = read_config()
self.connection = connection or boto.connect_ec2()
self.disco_storage = DiscoStorage(self.connection)
self._project_name = self._config.get("disco_aws", "project_name")
self._disco_remote_exec = None # lazily initialized
self._vpc = None # lazily initialized
self._use_local_ip = use_local_ip
self._final_stage = None
@property
def vpc(self):
"""Bake VPC"""
if not self._vpc:
environment_name = self._config.get("bake", "bakery_environment")
self._vpc = DiscoVPC.fetch_environment(environment_name=environment_name)
return self._vpc
@property
def disco_remote_exec(self):
'''Lazily creates a remote execution class'''
if not self._disco_remote_exec:
self._disco_remote_exec = DiscoRemoteExec(
self.vpc.get_credential_buckets(self._project_name))
return self._disco_remote_exec
@staticmethod
def time_diff_in_hours(now, old_time):
'''Returns the difference between two times in hours (floored)'''
if not now or not old_time:
return None
time_diff = now - old_time
return int(time_diff.total_seconds() / 60 / 60)
def pretty_print_ami(self, ami, age_since_when=None, in_prod=False):
'''Prints an a pretty AMI description to the standard output'''
name = ami.name
age_since_when = age_since_when or datetime.datetime.utcnow()
creation_time = self.get_ami_creation_time(ami)
if ami.name and AMI_NAME_PATTERN.match(ami.name):
name = self.ami_hostclass(ami)
output = "{0:12} {1:<19} {2:<35} {3:<12} {4:<8} {5:<15} {6:<5}".format(
ami.id,
str(creation_time),
name,
ami.state,
ami.tags.get("stage", "-"),
ami.tags.get("productline", "-"),
DiscoBake.time_diff_in_hours(age_since_when, creation_time),
)
if in_prod:
output += " prod" if self.is_prod_ami(ami) else " non-prod"
print(output)
def option(self, key):
'''Returns an option from the [bake] section of the disco_aws.ini config file'''
return self._config.get("bake", key)
def option_default(self, key, default=None):
'''Returns an option from the [bake] section of the disco_aws.ini config file'''
try:
return self._config.get("bake", key)
except NoOptionError:
return default
def hc_option(self, hostclass, key):
'''
Returns an option from the [hostclass] section of the disco_aws.ini config file it it is set,
otherwise it returns that value from the [bake] section if it is set,
otherwise it returns that value from the DEFAULT_CONFIG_SECTION if it is set.
'''
if self._config.has_option(hostclass, key):
return self._config.get(hostclass, key)
elif self._config.has_option("bake", key):
return self.option(key)
else:
return self._config.get(DEFAULT_CONFIG_SECTION, "default_{0}".format(key))
def hc_option_default(self, hostclass, key, default=None):
"""Fetch a hostclass configuration option if it exists, otherwise return value passed in as default"""
try:
return self.hc_option(hostclass, key)
except NoOptionError:
return default
def repo_instance(self):
""" Return active repo instance, else none """
# TODO Fix the circular dep between DiscoAWS and DiscoBake so we don't have to do this
from .disco_aws import DiscoAWS
aws = DiscoAWS(self._config, self.option("bakery_environment"))
filters = {"tag:hostclass": self.option("repo_hostclass"), "instance-state-name": "running"}
instances = aws.instances(filters)
return instances[0] if instances else None
def remotecmd(self, instance, *args, **kwargs):
"""
remotecmd accepts a boto instance followed by a list containing a string
of all arguments, starting with the program to run.
remotecmd optionally accepts three additional named arguments:
stdin -- the bytes to send into program input
nothrow -- when True the method will not throw if the program returns a non-zero result.
log_on_error -- when True, command output will be logged at the error level on non-zero result.
In addition to these explicit arguments, this method will redirect the
subprocesses's stderr to stdout, and capture stdout. If the logging level
is set to debug, it will log the captured output.
Returns a tuple of (return_code, captured_output).
examples:
self.remotecmd(inst, ['cat - > /tmp/myfile'], stdin='my content')
ret, out = self.remotecmd(inst, ['ls -l /etc'], nothrow=True)
"""
address = instance.private_ip_address if self._use_local_ip else instance.ip_address
if not address:
raise CommandError("No ip address available for sshing.")
kwargs["user"] = kwargs.get("user", "root")
return self.disco_remote_exec.remotecmd(address, *args, **kwargs)
def is_repo_ready(self):
""" True if repo is up"""
return self.repo_instance()
def init_phase(self, phase, instance, hostclass):
"""
Runs the init script for a particular phase
(i.e. phase1.sh for base bake and phase2.sh for hostclass specific bake).
"""
logging.info("Phase %s config", phase)
wait_for_sshable(self.remotecmd, instance)
self.copy_aws_data(instance)
self.invoke_host_init(instance, hostclass, "phase{0}.sh".format(phase))
def invoke_host_init(self, instance, hostclass, script):
"""
Executes an init script that was dropped off by disco_aws_data.
Hostclass and Hostname are passed in as arguments
"""
logging.info("Running remote init script %s.", script)
script = "{0}/init/{1}".format(self.option("data_destination"), script)
repo = self.repo_instance()
if not repo:
# hack, we insert a comment into /etc/hosts instead of ip.
repo_ip = "#None"
else:
repo_ip = self.repo_instance().private_ip_address
self.remotecmd(instance, [script, hostclass, repo_ip], log_on_error=True)
def ami_stages(self):
""" Return list of configured ami stages"""
return self.option("ami_stages").split()
@property
def final_stage(self):
"""
Name of final AMI promotion stage
"""
self._final_stage = self._final_stage or self.ami_stages()[-1]
return self._final_stage
def promote_ami_to_production(self, ami):
'''
Share this AMI with the production accounts
'''
for prod_account in self.option("prod_account_ids").split():
logging.warning("Permitting %s to be launched by prod account %s", ami.id, prod_account)
ami.set_launch_permissions(prod_account)
def promote_latest_ami_to_production(self, hostclass):
"""
Promote youngest ami of latest stage to production
"""
ami = self.find_ami(stage=self.final_stage, hostclass=hostclass)
self.promote_ami_to_production(ami)
def is_prod_ami(self, ami):
"""
True if ami has been granted prod launch permission. To all prod accounts.
"""
try:
launch_permissions = ami.get_launch_permissions()
except boto.exception.EC2ResponseError:
# Most likely we failed to lookup launch_permissions because its
# not our AMI. So we assume its not prod executable. This is an
# incorrect assumption in prod but there we don't care.
return False
image_account_ids = [
account[0]
for account in launch_permissions.values()
]
prod_accounts = self.option("prod_account_ids").split()
if prod_accounts and set(prod_accounts) - set(image_account_ids):
return False
return True
def promote_ami(self, ami, stage):
'''
Change the stage tag of an AMI.
'''
if stage not in self.ami_stages():
raise AMIError("Unknown ami stage: {0}, check config option 'ami_stage'".format(stage))
self._tag_ami(ami, {"stage": stage})
def get_image(self, ami_id):
"""
Returns an AMI object given an AMI ID.
Raises an AMIError if we can't find the image
"""
try:
return self.connection.get_image(ami_id)
except:
raise AMIError("Could not locate image {0}.".format(ami_id))
def copy_aws_data(self, instance):
"""
Copies all the files in this repo to the destination instance.
"""
logging.info("Copying discoaws data.")
config_data_destination = self.option("data_destination")
asiaq_data_destination = self.option("data_destination") + "/asiaq"
self.remotecmd(instance, ["mkdir", "-p", asiaq_data_destination])
self._rsync(instance,
normalize_path(self.option("config_data_source")),
config_data_destination,
user="root")
# Ensure there is a trailing / for rsync to do the right thing
asiaq_data_source = re.sub(r'//$', '/', normalize_path(self.option("asiaq_data_source")) + "/")
self._rsync(instance,
asiaq_data_source,
asiaq_data_destination,
user="root")
def _rsync(self, instance, *args, **kwargs):
address = instance.private_ip_address if self._use_local_ip else instance.ip_address
return self.disco_remote_exec.rsync(address, *args, **kwargs)
def _get_phase1_ami_id(self, hostclass):
phase1_ami = self.find_ami(self.ami_stages()[-1], self.hc_option(hostclass, "phase1_ami_name"))
if not phase1_ami:
raise AMIError("Couldn't find phase 1 ami.")
return phase1_ami.id
def _enable_root_ssh(self, instance):
# Pylint wants us to name the exceptions, but we want to ignore all of them
# pylint: disable=W0702
ssh_args = SSH_DEFAULT_OPTIONS + ["-tt"]
try:
self.remotecmd(
instance,
["sudo", "cp", "/home/ubuntu/.ssh/authorized_keys", "/root/.ssh/authorized_keys"],
user="ubuntu", ssh_options=ssh_args)
except:
logging.debug("Ubuntu specific; enabling of root login during bake failed")
try:
self.remotecmd(
instance,
["sudo", "cp", "/home/centos/.ssh/authorized_keys", "/root/.ssh/authorized_keys"],
user="centos", ssh_options=ssh_args)
except:
logging.debug("CentOS >6 specific; enabling of root login during bake failed")
def bake_ami(self, hostclass, no_destroy, source_ami_id=None, stage=None):
# Pylint thinks this function has too many local variables and too many statements and branches
# pylint: disable=R0914, R0915, R0912
"""
Boot an AMI run init, and create a new ami.
If hostclass is None then the phase is 1. If hostclass is not None then the default
phase will be the one specified in the bake section but this can be overridden for
a hostclass by specifying an explicit phase.
If no_destroy is True then the instance used to perform baking is not terminated at the end.
"""
config_path = normalize_path(self.option("config_data_source") + "/discoroot")
if not path.exists(config_path):
raise WrongPathError(
"Cannot locate data files relative to current working directory: %s"
"Ensure that you are baking from root of disco_aws_automation repo." % config_path
)
phase = int(self.hc_option(hostclass, "phase")) if hostclass else 1
if phase == 1:
base_image_name = hostclass if hostclass else self.option("phase1_ami_name")
source_ami_id = source_ami_id or self.hc_option(base_image_name, 'bake_ami')
hostclass = self.option("phase1_hostclass")
logging.info("Creating phase 1 AMI named %s based on upstream AMI %s",
base_image_name, source_ami_id)
else:
source_ami_id = source_ami_id or self._get_phase1_ami_id(hostclass)
base_image_name = hostclass
logging.info("Creating phase 2 AMI for hostclass %s based on phase 1 AMI %s",
base_image_name, source_ami_id)
image_name = "{0} {1}".format(base_image_name, int(time.time()))
if hostclass not in self.option("no_repo_hostclasses").split() and not self.is_repo_ready():
raise Exception("A {0} must be running to bake {1}"
.format(self.option("repo_hostclass"), hostclass))
interfaces = self.vpc.networks["tunnel"].create_interfaces_specification(public_ip=True)
image = None
# Don't map the snapshot on bake. Bake scripts shouldn't need the snapshotted volume.
reservation = self.connection.run_instances(
source_ami_id,
block_device_map=self.disco_storage.configure_storage(
hostclass, ami_id=source_ami_id, map_snapshot=False),
instance_type=self.option("bakery_instance_type"),
key_name=self.option("bake_key"),
network_interfaces=interfaces)
instance = reservation.instances[0]
try:
keep_trying(10, instance.add_tag, "hostclass", "bake_{0}".format(hostclass))
except Exception:
logging.exception("Setting hostclass during bake failed. Ignoring error.")
try:
wait_for_sshable(self.remotecmd, instance)
self._enable_root_ssh(instance)
self.init_phase(phase, instance, hostclass)
if no_destroy:
raise EarlyExitException("--no-destroy specified, skipping shutdown to allow debugging")
logging.debug("Stopping instance")
# We delete the authorized keys so there is no possibility of using the bake key
# for root login in production and we shutdown via the shutdown command to make
# sure the snapshot is of a clean filesystem that won't trigger fsck on start.
# We use nothrow to ignore ssh's 255 exit code on shutdown of centos7
self.remotecmd(instance, ["rm -Rf /root/.ssh/authorized_keys ; shutdown now -P"], nothrow=True)
wait_for_state(instance, u'stopped', 300)
logging.info("Creating snapshot from instance")
image_id = instance.create_image(image_name, no_reboot=True)
image = keep_trying(60, self.connection.get_image, image_id)
stage = stage or self.ami_stages()[0]
productline = self.hc_option_default(hostclass, "product_line", None)
DiscoBake._tag_ami_with_metadata(image, stage, source_ami_id, productline)
wait_for_state(image, u'available', 600)
logging.info("Created %s AMI %s", image_name, image_id)
except EarlyExitException as early_exit:
logging.info(str(early_exit))
except:
logging.exception("Snap shot failed. See trace below.")
raise
finally:
if not no_destroy:
instance.terminate()
else:
logging.info("Examine instance command: ssh root@%s",
instance.ip_address or instance.private_ip_address)
return image
@staticmethod
def _tag_ami_with_metadata(ami, stage, source_ami_id, productline=None):
"""
Tags an AMI with the stage, source_ami, the branch/git-hash of disco_aws_automation,
and the productline if provided
"""
tag_dict = OrderedDict()
tag_dict['stage'] = stage
tag_dict['source_ami'] = source_ami_id
tag_dict['baker'] = getpass.getuser()
tag_dict['version-asiaq'] = DiscoBake._git_ref()
if productline:
tag_dict['productline'] = productline
DiscoBake._tag_ami(ami, tag_dict)
@staticmethod
def _tag_ami(ami, tag_dict):
"""
Adds a dict of tags to an AMI with retries
"""
for tag_name in tag_dict.keys():
logging.info('Adding tag %s with value %s to ami', tag_name, tag_dict[tag_name])
keep_trying(10, ami.add_tag, tag_name, tag_dict[tag_name])
@staticmethod
def _old_amis_by_days(amis, max_days):
max_seconds = datetime.timedelta(days=max_days).total_seconds()
oldest_timestamp = int(time.time() - max_seconds)
return set([ami for ami in amis if DiscoBake.ami_timestamp(ami) < oldest_timestamp])
@staticmethod
def _ami_sort_key(ami):
'''
This returns a sort key that can be sorted lexographically
and end up sorted by hostclass and then creation time.
'''
keys = ami.name.split()
return "{0} {1:012d}".format(keys[0], int(keys[1]))
@staticmethod
def _old_amis_by_count(amis, max_count):
amis_sorted_by_creation_time_desc = sorted(
amis, key=DiscoBake._ami_sort_key, reverse=True)
return set(amis_sorted_by_creation_time_desc[max_count:])
def get_amis(self, image_ids=None, filters=None):
"""
Returns images owned by a trusted account (including ourselves)
"""
trusted_accounts = list(set(self.option_default("trusted_account_ids", "").split()) | set(['self']))
return self.connection.get_all_images(
image_ids=image_ids, owners=trusted_accounts, filters=filters)
def cleanup_amis(self, restrict_hostclass, product_line, stage, min_age, min_count, dry_run):
"""
Deletes oldest AMIs so long as they are older than min_age and there
are at least min_count AMIs remaining in the hostclass.
If restrict_hostclass is None then this will iterate over all hostclasses,
else it will only cleanup the amis in the matching hostclass.
If product_line is not None, then this will only iterate over amis tagged
with that specific productline.
"""
# Pylint complains that this function has one too many local variables. But deleting any
# would make it less readable, so...
# pylint: disable=R0914
now = datetime.datetime.utcnow()
filters = {"tag:stage": stage}
if product_line:
filters["tag:productline"] = product_line
amis = self.connection.get_all_images(owners=['self'], filters=filters)
ami_map = defaultdict(list)
for ami in amis:
if AMI_NAME_PATTERN.match(ami.name):
ami_map[DiscoBake.ami_hostclass(ami)].append(ami)
for hostclass, amis in ami_map.iteritems():
if restrict_hostclass and hostclass != restrict_hostclass:
continue
by_days = DiscoBake._old_amis_by_days(amis, min_age)
by_count = DiscoBake._old_amis_by_count(amis, min_count)
to_delete = by_days.intersection(by_count)
if to_delete:
logging.info("Deleting %s AMIs: %s", hostclass, to_delete)
for ami in to_delete:
self.pretty_print_ami(ami, now)
if not dry_run:
orphan_snapshot_ids = []
for ami in to_delete:
orphan_snapshot_ids.extend([bdm.snapshot_id for bdm in ami.block_device_mapping.values()
if bdm.snapshot_id])
ami.deregister()
# Delete snapshots of all the images we deleted
for orphan_snapshot_id in orphan_snapshot_ids:
keep_trying(10, self.connection.delete_snapshot, orphan_snapshot_id)
def list_amis_by_instance(self, instances=None):
"""
Fetch the AMI object from which the instance was started from indexed by instance id
:param instances: instances whose AMI objects we need
Return a dict of AMI's indexed by instance id
"""
ami_dict = defaultdict(list)
for instance in instances:
ami_dict[instance.image_id].append(instance.id)
return {instance_id: image for image in self.get_amis(ami_dict.keys())
for instance_id in ami_dict[image.id]}
def list_amis(self, ami_ids=None, instance_ids=None, stage=None, product_line=None):
"""
Fetch all AMI's filtered by supplied args
:param amis: AMI ids to filter by
:param instance_ids: ID's of instances whose AMI's we should filter by
:param stage: Stage to filter by
:param product_line: Product line to filter by
Return a list of matching AMI's
"""
if instance_ids:
instances = self.instances(instance_ids=instance_ids)
instance_amis = set([instance.image_id for instance in instances])
if ami_ids:
ami_ids = list(instance_amis.intersection(ami_ids))
else:
ami_ids = list(instance_amis)
return self.ami_filter(self.get_amis(ami_ids), stage, product_line)
def list_stragglers(self, days=1, stage=None):
"""
Returns a dictionary where keys represent hostclass for which AMIs have not been
been recently promoted from first stage. The value is last unpromoted AMI for the
hostclass, which is best candidate for promotion. Value will be none if no
unpromoted AMIs exist.
Arguments:
Days -- How recently the AMI should have been promoted in days. (default '1')
Stage -- Minimum stage to which the AMI should have been promoted.
(default 'None', the second stage of promotion)
"""
amis = self.get_amis()
hostclasses = set([DiscoBake.ami_hostclass(ami) for ami in amis])
first_stage = self.ami_stages()[0]
stage = stage or self.ami_stages()[1]
cutoff_time = int(time.time()) - days * 60 * 60 * 24
stragglers = dict()
for hostclass in hostclasses:
latest_promoted = self.find_ami(stage, hostclass)
if not latest_promoted or DiscoBake.ami_timestamp(latest_promoted) < cutoff_time:
latest = self.find_ami(first_stage, hostclass)
stragglers[hostclass] = latest
return stragglers
def delete_ami(self, ami):
"""
Delete an AMI
"""
logging.info("Deleting AMI %s", ami)
self.connection.deregister_image(ami, delete_snapshot=True)
def get_snapshots(self, ami):
"""Returns a snapshot object for an AMI object
If an AMI maps multiple block devices, one is chosen without any specific ordering.
"""
snapshot_ids = [value.snapshot_id for _key, value in ami.block_device_mapping.iteritems()]
ids = [snap for snap in snapshot_ids if snap]
try:
return self.connection.get_all_snapshots(snapshot_ids=ids)
except boto.exception.EC2ResponseError:
return []
def get_ami_creation_time_from_snapshots(self, ami):
"""Returns age of newest snapshot attached to an AMI"""
snapshots = self.get_snapshots(ami)
start_times = [dateutil.parser.parse(snapshot.start_time) for snapshot in snapshots]
return max(start_times).replace(tzinfo=None) if start_times else None
def get_ami_creation_time(self, ami):
"""Returns AMI creation time using least costly method that works, None if none works"""
if not ami:
return None
return (DiscoBake.extract_ami_creation_time_from_ami_name(ami) or
self.get_ami_creation_time_from_snapshots(ami))
@staticmethod
def ami_timestamp(ami):
"""Return creation timestamp from ami name, returns 0 if one is not found"""
try:
return int(ami.name.split()[-1])
except (ValueError, IndexError):
return 0
@staticmethod
def extract_ami_creation_time_from_ami_name(ami):
"""Returns creation time from AMI name, returns None if it isn't there"""
seconds_since_epoch = DiscoBake.ami_timestamp(ami)
if not seconds_since_epoch:
return None
else:
timestamp = datetime.datetime.fromtimestamp(seconds_since_epoch, tz=UTC) # our timestamp is UTC
timestamp_naive = timestamp.replace(tzinfo=None) # but the rest of the code expects no tz
return timestamp_naive
@staticmethod
def ami_hostclass(ami):
"""Return hostclass/ami-type from ami"""
return ami.name.split()[0]
def ami_filter(self, amis, stage=None, product_line=None):
"""
Returns a filtered subset of amis. Optionally filtered by the product line
that they belong to, as well as their stage.
"""
if not product_line and not stage:
return amis
filter_amis = amis[:]
if product_line:
filter_amis = [ami for ami in filter_amis if ami.tags.get("productline", None) == product_line]
if stage:
filter_amis = [ami for ami in filter_amis if ami.tags.get("stage", None) == stage]
return filter_amis
def find_ami(self, stage, hostclass=None, ami_id=None, product_line=None):
"""
Find latest AMI of compatible stage, filtered on AMI's hostclass, id, or product_line
Note that id overrides stage, product_line, and hostclass options.
"""
if ami_id:
amis = self.get_amis([ami_id])
return amis[0] if amis else None
elif hostclass:
filters = {}
filters["name"] = "{0} *".format(hostclass)
amis = self.ami_filter(self.get_amis(filters=filters), stage, product_line)
return max(amis, key=self.ami_timestamp) if amis else None
else:
raise ValueError("Must specify either hostclass or AMI")
@staticmethod
def _git_ref():
"""
Returns a string containing the current branch and git hash
"""
branch = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
githash = check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
return '%s-%s' % (branch, githash)
|
Angakkuit/asiaq-aws
|
disco_aws_automation/disco_bake.py
|
Python
|
bsd-2-clause
| 28,053
|
#!/usr/bin/python
"""
Bottom up rewrite generator
---------------------------
This script takes as input a description of patterns and outputs a
matcher class that can match trees given the patterns.
Patterns are specified as follows::
reg -> ADDI32(reg, reg) 2 (. add NT0 NT1 .)
reg -> MULI32(reg, reg) 3 (. .)
or a multiply add::
reg -> ADDI32(MULI32(reg, reg), reg) 4 (. muladd $1, $2, $3 .)
The general specification pattern is::
[result] -> [tree] [cost] [template code]
Trees
-----
A tree is described using parenthesis notation. For example a node X with
three child nodes is described as:
X(a, b, b)
Trees can be nested:
X(Y(a, a), a)
The 'a' in the example above indicates an open connection to a next tree
pattern.
In the example above 'reg' is a non-terminal. ADDI32 is a terminal.
non-terminals
cannot have child nodes. A special case occurs in this case:
reg -> rc
where 'rc' is a non-terminal. This is an example of a chain rule. Chain rules
can be used to allow several variants of non-terminals.
The generated matcher uses dynamic programming to find the best match of the
tree. This strategy consists of two steps:
- label: During this phase the given tree is traversed in a bottom up way.
each node is labelled with a possible matching rule and the corresponding
cost.
- select: In this step, the tree is traversed again, selecting at each point
the cheapest way to get to the goal.
"""
import sys
from os import path
import argparse
from ppci.lang.common import Token, SourceLocation
from ppci.lang.tools import baselex, yacc
from ppci.utils.tree import Tree
# Generate parser on the fly:
spec_file = path.join(path.dirname(path.abspath(__file__)), "burg.grammar")
burg_parser = yacc.load_as_module(spec_file)
class BurgLexer(baselex.BaseLexer):
""" Overridden base lexer to keep track of sections """
def __init__(self):
tok_spec = [
("id", r"[A-Za-z][A-Za-z\d_]*", lambda typ, val: (typ, val)),
("kw", r"%[A-Za-z][A-Za-z\d_]*", lambda typ, val: (val, val)),
("number", r"\d+", lambda typ, val: (typ, int(val))),
("STRING", r"'[^']*'", lambda typ, val: ("string", val[1:-1])),
("OTHER", r"[:;\|\(\),]", lambda typ, val: (val, val)),
("SKIP", r"[ ]", None),
]
super().__init__(tok_spec)
def tokenize(self, txt):
lines = txt.split("\n")
header_lines = []
section = 0
for line in lines:
loc = SourceLocation(self.filename, 0, 0, 0)
line = line.strip()
if not line:
continue # Skip empty lines
elif line == "%%":
section += 1
if section == 1:
yield Token("header", header_lines, loc)
yield Token("%%", "%%", loc)
else:
if section == 0:
header_lines.append(line)
else:
# we could use yield from below, but python 3.2 does not
# work then:
for tk in super().tokenize(line):
yield tk
class Rule:
""" A rewrite rule. Specifies a tree that can be rewritten into a result
at a specific cost """
def __init__(self, non_term, tree, cost, acceptance, template):
self.non_term = non_term
self.tree = tree
self.cost = cost
self.acceptance = acceptance
self.template = template
self.nr = 0
def __repr__(self):
return "{} -> {} ${}".format(self.non_term, self.tree, self.cost)
class Symbol:
def __init__(self, name):
self.name = name
class Term(Symbol):
pass
class Nonterm(Symbol):
def __init__(self, name):
super().__init__(name)
self.chain_rules = []
class BurgSystem:
def __init__(self):
self.rules = []
self.symbols = {}
self.terminals = set()
self.non_terminals = set()
self.rule_map = {}
self.goal = None
def add_rule(self, non_term, tree, cost, acceptance, template):
if isinstance(template, str):
template = template.strip()
if not template:
template = "pass"
rule = Rule(non_term, tree, cost, acceptance, template)
if len(tree.children) == 0 and tree.name not in self.terminals:
self.non_term(tree.name).chain_rules.append(rule)
self.non_term(rule.non_term)
self.rules.append(rule)
rule.nr = len(self.rules)
# Register the rule in the rule map:
self.rule_map[tree.name].append(rule)
return rule
def get_rule(self, nr):
""" Get a rule by rule number """
rule = self.rules[nr - 1]
assert rule.nr == nr
return rule
def get_rules_for_root(self, name):
""" Get only the rules for a given root name """
return self.rule_map[name]
def non_term(self, name: str):
if name in self.terminals:
raise BurgError("Cannot redefine terminal")
if not self.goal:
self.goal = name
return self.install(name, Nonterm)
def tree(self, name: str, *args):
return Tree(name, *args)
def chain_rules_for_nt(self, non_terminal):
""" Retrieve chain rules for a given non terminal """
return self.symbols[non_terminal].chain_rules
def install(self, name: str, t):
assert isinstance(name, str)
if name in self.symbols:
assert type(self.symbols[name]) is t
else:
self.symbols[name] = t(name)
if t is Term:
self.terminals.add(name)
else:
assert t is Nonterm
self.non_terminals.add(name)
self.rule_map[name] = list()
return self.symbols[name]
def add_terminal(self, terminal):
self.install(terminal, Term)
def tree_terminal_equal(self, t1, t2):
""" Check if the terminals of a tree match """
if t1.name in self.terminals and t2.name in self.terminals:
if t1.name == t2.name:
# match children:
return all(
self.tree_terminal_equal(a, b)
for a, b in zip(t1.children, t2.children)
)
else:
return False
else:
# We hit an open end
assert (
t1.name in self.non_terminals or t2.name in self.non_terminals
)
return True
def get_kids(self, tree, template_tree):
""" Get the kids of a tree given a template that matched """
kids = []
if template_tree.name in self.non_terminals:
assert len(template_tree.children) == 0
kids.append(tree)
else:
for t, tt in zip(tree.children, template_tree.children):
kids.extend(self.get_kids(t, tt))
return kids
def get_nts(self, template_tree):
""" Get the names of the non terminals of a template """
nts = []
if template_tree.name in self.non_terminals:
assert len(template_tree.children) == 0
nts.append(template_tree.name)
else:
for tt in template_tree.children:
nts.extend(self.get_nts(tt))
return nts
def check_tree_defined(self, tree):
""" Check if all names in a tree are defined """
for name in tree.get_defined_names():
if name not in self.symbols.keys():
raise BurgError("{} not defined".format(name))
def check(self):
""" Run sanity checks on this burg system """
for rule in self.rules:
self.check_tree_defined(rule.tree)
# Check burg system for completeness:
# print(self.sys.non_terminals)
# TODO: check if all possible code can be covered.
class BurgError(Exception):
pass
class BurgParser(burg_parser.Parser):
""" Derived from automatically generated parser """
def parse(self, l):
self.system = BurgSystem()
super().parse(l)
return self.system
class BurgGenerator:
def print(self, level, text=""):
""" Print helper function that prints to output file """
print(" " * level + text, file=self.output_file)
def generate(self, system, output_file):
""" Generate script that implements the burg spec """
self.output_file = output_file
self.system = system
self.print(0, "#!/usr/bin/python")
self.print(
0, "from ppci.codegen.treematcher import BaseMatcher, State"
)
self.print(0, "from ppci.utils.tree import Tree")
for header in self.system.header_lines:
self.print(0, header)
self.print(0)
self.print(0, "class Matcher(BaseMatcher):")
self.print(1, "def __init__(self):")
self.print(2, "self.kid_functions = {}")
self.print(2, "self.nts_map = {}")
self.print(2, "self.pat_f = {}")
for rule in self.system.rules:
kids, dummy = self.compute_kids(rule.tree, "t")
rule.num_nts = len(dummy)
lf = "lambda t: [{}]".format(", ".join(kids))
pf = "self.P{}".format(rule.nr)
self.print(0)
self.print(2, "# {}: {}".format(rule.nr, rule))
self.print(2, "self.kid_functions[{}] = {}".format(rule.nr, lf))
self.print(2, "self.nts_map[{}] = {}".format(rule.nr, dummy))
self.print(2, "self.pat_f[{}] = {}".format(rule.nr, pf))
self.print(0)
for rule in self.system.rules:
if rule.num_nts > 0:
args = ", ".join("c{}".format(x) for x in range(rule.num_nts))
args = ", " + args
else:
args = ""
# Create template function:
self.print(0)
self.print(1, "def P{}(self, tree{}):".format(rule.nr, args))
template = rule.template
for t in template.split(";"):
self.print(2, "{}".format(t.strip()))
# Create acceptance function:
if rule.acceptance:
self.print(0)
self.print(1, "def A{}(self, tree):".format(rule.nr))
for t in rule.acceptance.split(";"):
self.print(2, "{}".format(t.strip()))
self.emit_state()
self.print(1, "def gen(self, tree):")
self.print(2, "self.burm_label(tree)")
self.print(
2, 'if not tree.state.has_goal("{}"):'.format(self.system.goal)
)
self.print(3, 'raise Exception("Tree {} not covered".format(tree))')
self.print(
2, 'return self.apply_rules(tree, "{}")'.format(self.system.goal)
)
def emit_record(self, rule, state_var):
# TODO: check for rules fullfilled (by not using 999999)
acc = ""
if rule.acceptance:
acc = " and self.A{}(tree)".format(rule.nr)
self.print(3, "nts = self.nts({})".format(rule.nr))
self.print(3, "kids = self.kids(tree, {})".format(rule.nr))
self.print(
3,
"if all(x.state.has_goal(y) for x, y in zip(kids, nts)){}:".format(
acc
),
)
self.print(
4,
(
"c = sum(x.state.get_cost(y) for x, y in zip(kids, nts)) + {}"
).format(rule.cost),
)
self.print(
4,
'tree.state.set_cost("{}", c, {})'.format(rule.non_term, rule.nr),
)
for cr in self.system.symbols[rule.non_term].chain_rules:
self.print(4, "# Chain rule: {}".format(cr))
self.print(
4,
'tree.state.set_cost("{}", c + {}, {})'.format(
cr.non_term, cr.cost, cr.nr
),
)
def emit_state(self):
""" Emit a function that assigns a new state to a node """
self.print(1, "def burm_state(self, tree):")
self.print(2, "tree.state = State()")
for term in self.system.terminals:
self.emitcase(term)
self.print(0)
def emitcase(self, term):
rules = [rule for rule in self.system.rules if rule.tree.name == term]
for rule in rules:
condition = self.emittest(rule.tree, "tree")
self.print(2, "if {}:".format(condition))
self.emit_record(rule, "state")
def compute_kids(self, t, root_name):
""" Compute of a pattern the blanks that must be provided
from below in the tree """
if t.name in self.system.non_terminals:
return [root_name], [t.name]
else:
k = []
nts = []
for i, c in enumerate(t.children):
pfx = root_name + ".children[{}]".format(i)
kf, dummy = self.compute_kids(c, pfx)
nts.extend(dummy)
k.extend(kf)
return k, nts
def emittest(self, tree, prefix):
""" Generate condition for a tree pattern """
ct = (
c for c in tree.children if c.name not in self.system.non_terminals
)
child_tests = (
self.emittest(c, prefix + ".children[{}]".format(i))
for i, c in enumerate(ct)
)
child_tests = ("({})".format(ct) for ct in child_tests)
child_tests = " and ".join(child_tests)
child_tests = " and " + child_tests if child_tests else ""
tst = '{}.name == "{}"'.format(prefix, tree.name)
return tst + child_tests
def make_argument_parser():
""" Constructs an argument parser """
parser = argparse.ArgumentParser(
description="pyburg bottom up rewrite system generator"
)
parser.add_argument(
"source", type=argparse.FileType("r"), help="the parser specification"
)
parser.add_argument(
"-o", "--output", type=argparse.FileType("w"), default=sys.stdout
)
return parser
def main(args):
src = args.source.read()
args.source.close()
# Parse specification into burgsystem:
l = BurgLexer()
p = BurgParser()
l.feed(src)
burg_system = p.parse(l)
# Generate matcher:
generator = BurgGenerator()
generator.generate(burg_system, args.output)
if __name__ == "__main__":
# Parse arguments:
args = make_argument_parser().parse_args()
main(args)
|
windelbouwman/ppci-mirror
|
ppci/codegen/burg.py
|
Python
|
bsd-2-clause
| 14,548
|
import sys
import subprocess
from path import path
def set_up_virtualenv_and_requirements(airship, bucket, **extra):
from airship.deployer import DeployError
requirements_file = bucket.folder / 'requirements.txt'
if requirements_file.isfile():
config = airship.config.get('python', {})
index_dir = config['dist']
venv = bucket.folder / '_virtualenv'
pip = venv / 'bin' / 'pip'
virtualenv_py = airship.home_path / 'dist' / 'virtualenv.py'
python = config.get('interpreter', 'python')
try:
subprocess.check_call([python, virtualenv_py, venv,
'--distribute', '--never-download',
'--extra-search-dir=' + index_dir])
except subprocess.CalledProcessError:
raise DeployError(bucket, "Failed to create a virtualenv.")
try:
subprocess.check_call([pip, 'install', 'wheel', '--no-index',
'--find-links=file://' + index_dir])
except subprocess.CalledProcessError:
raise DeployError(bucket, "Failed to install wheel.")
try:
subprocess.check_call([pip, 'install', '-r', requirements_file,
'--use-wheel', '--no-index',
'--find-links=file://' + index_dir])
except subprocess.CalledProcessError:
raise DeployError(bucket, "Failed to install requirements.")
def activate_virtualenv(airship, bucket, environ, **extra):
venv = bucket.folder / '_virtualenv'
if venv.isdir():
environ['PATH'] = ((venv / 'bin') + ':' + environ['PATH'])
def load(airship):
from airship.deployer import bucket_setup
from airship.core import bucket_run
bucket_setup.connect(set_up_virtualenv_and_requirements, airship)
bucket_run.connect(activate_virtualenv, airship)
def do_wheel(airship, args):
config = airship.config.get('python', {})
index_dir = config['dist']
argv = [path(sys.prefix) / 'bin' / 'pip',
'wheel',
'--no-deps',
'-w', index_dir]
subprocess.check_call(argv + args.wheel_argv)
from airship.core import define_arguments
@define_arguments.connect
def register_wheel_subcommand(sender, create_command):
import argparse
wheel_cmd = create_command('wheel', do_wheel)
wheel_cmd.add_argument('wheel_argv', nargs=argparse.REMAINDER)
|
mgax/airship
|
airship/contrib/python/__init__.py
|
Python
|
bsd-2-clause
| 2,464
|
# -*- coding: utf-8 -*-
import yt.mods as yt
import numpy as np
mu0 = 1.25663706e-6
gamma = 1.6666
@yt.derived_field(take_log=False, units=r'kg m^{-3}')
def density(field, data):
return data['density_pert'] + data['density_bg']
@yt.derived_field(take_log=False, units=r'T')
def mag_field_x(field, data):
return data['mag_field_x_pert'] + data['mag_field_x_bg']
@yt.derived_field(take_log=False, units=r'T')
def mag_field_y(field, data):
return data['mag_field_y_pert'] + data['mag_field_y_bg']
@yt.derived_field(take_log=False, units=r'T')
def mag_field_z(field, data):
return data['mag_field_z_pert'] + data['mag_field_z_bg']
@yt.derived_field(take_log=False, units=r'T')
def mag_field_magnitude(field, data):
return np.sqrt(data['mag_field_x']**2 + data['mag_field_y']**2 + data['mag_field_z']**2)
@yt.derived_field(take_log=False, units=r'T')
def mag_field_pert_magnitude(field, data):
return np.sqrt(data['mag_field_x_pert']**2 + data['mag_field_y_pert']**2 +
data['mag_field_z_pert']**2)
@yt.derived_field(take_log=False, units=r'T')
def velocity_magnitude(field, data):
return np.sqrt(data['velocity_x']**2 + data['velocity_x']**2 +
data['velocity_x']**2)
@yt.derived_field(take_log=False, units=r'Pa')
def internal_energy(field, data):
return data['internal_energy_pert'] + data['internal_energy_bg']
@yt.derived_field(take_log=False, units=r'Pa')
def mag_pressure(field, data):
return (data['mag_field_x']**2 + data['mag_field_y']**2 + data['mag_field_z']**2) / (2. * mu0)
@yt.derived_field(take_log=False, units=r'Pa')
def thermal_pressure(field, data):
#p = (\gamma -1) ( e - \rho v^2/2 - B^2/2)
g1 = gamma -1 #(header['eqpar'][0]-1)
kp = (data['density'] * (data['velocity_x']**2 + data['velocity_y']**2 + data['velocity_z']**2))/2.
return g1 * (data['internal_energy'] - kp - data['mag_pressure'])
@yt.derived_field(take_log=False, units=r'm s^{-1}')
def alfven_speed(field, data):
return np.sqrt(data['mag_field_x']**2 + data['mag_field_y']**2 + data['mag_field_z']**2) / np.sqrt(data['density'])
@yt.derived_field(take_log=False, units=r'm s^{-1}')
def sound_speed(field, data):
return np.sqrt((gamma * data['thermal_pressure']) / data['density'])
@yt.derived_field(take_log=False, units=r'')
def plasma_beta(field, data):
return data['mag_pressure'] / data['thermal_pressure']
#
#@yt.derived_field(take_log=False, units=r'Pa')
#def wave_flux_x(field, data):
# Bb = np.array([f.w_dict['bg3'], f.w_dict['bg2'], f.w_dict['bg1']])
# Bp = np.array([f.w_dict['b3'], f.w_dict['b2'], f.w_dict['b1']])
# V = np.array([f.w_sac['v3'], f.w_sac['v2'], f.w_sac['v1']])
#
# #Calculate wave flux
# Fp = 0.25*np.pi * (np.sum(Bb*Bp, axis=0)[None] * V) - (np.sum(V*Bp, axis=0)[None] * Bb)
# Fa = pk[None]*V
#
# Fwave = Fa + Fp
# def get_total_p(self):
# if self.header['ndim'] == 3:
# gamma = self.header['eqpar'][0]
#
# vtot2 = (self.w_sac['v1']**2 + self.w_sac['v2']**2 + self.w_sac['v3']**2)
# therm = self.w[self.w_["e"]] - (self.w_sac["rho"] * vtot2) / 2.
#
# Bpert = self.w[self.w_['b1']] + self.w[self.w_['b2']] + self.w[self.w_['b3']]
# Bpert2 = self.w[self.w_['b1']]**2 + self.w[self.w_['b2']]**2 + self.w[self.w_['b3']]**2
# Bback = self.w[self.w_['bg1']] + self.w[self.w_['bg2']] + self.w[self.w_['bg3']]
# mag = Bback * Bpert + (Bpert2 / 2.)
#
# return (gamma - 1) * therm - (gamma - 2) * mag
# else:
# raise NotImplementedError("This Dosen't work for 2D yet, go fix")
#
# def get_temp(self,p=None):
# if not(p):
# p = self.get_thermalp()
# T = (p * 1.2) / (8.3e3 * self.w_sac['rho'])
# return T
#
# def get_bgtemp(self):
# print "WARNING: Background Temprature will not work if inital conditions are not V=0"
# if self.header['ndim'] == 3:
# kp = 0.0#(self.w[self.w_["rhob"]] * (self.w_sac['v1']**2 + self.w_sac['v2']**2 + self.w_sac['v3']**2))/2.
# mp = (self.w[self.w_["bg1"]]**2 + self.w[self.w_["bg2"]]**2 + self.w[self.w_["bg3"]]**2) / 2.
# T = self.w[self.w_["eb"]] - kp - mp
# else:
# kp = 0.0#(self.w[self.w_["rhob"]] * (self.w_sac['v1']**2 + self.w_sac['v2']**2))/2.
# mp = (self.w[self.w_["bg1"]]**2 + self.w[self.w_["bg2"]]**2) / 2.
# T = self.w[self.w_["eb"]] - kp - mp
# return T
#
# def get_va(self):
# return (np.sqrt(self.w_sac['b1']**2 + self.w_sac['b2']**2
# + self.w_sac['b3']**2) / np.sqrt(self.w_sac['rho']))
# #return (abs(self.w_sac['b1']) + abs(self.w_sac['b2']) + abs(self.w_sac['b3'])) / sqrt(self.w_sac['rho'])
#
# def get_cs(self,p=None):
# if not p:
# p = self.get_thermalp()
# g1 = self.header['eqpar'][0]
# return np.sqrt((g1 * p) / self.w_sac['rho'])
|
Cadair/scipy2014plotting
|
yt_fields.py
|
Python
|
bsd-2-clause
| 5,015
|
from webapp.models.user import User
class TestMainController:
def test_home(self, client):
rv = client.get('/')
assert rv.status_code == 200
def test_login(self, client, session):
admin = User('admin', 'supersafepassword')
session.add(admin)
session.commit()
rv = client.post('/login', data={
'username': 'admin',
'password': 'supersafepassword',
})
assert rv.status_code == 200
def test_login_fail(self, client):
rv = client.post('/login', data={
'username': 'admin',
'password': 'badpassword',
})
assert rv.status_code == 401
def test_logout(self, client, session):
admin = User('admin', 'supersafepassword')
session.add(admin)
session.commit()
rv = client.post('/login', data={
'username': 'admin',
'password': 'supersafepassword',
})
rv = client.get('/logout')
assert rv.status_code == 200
def test_restricted(self, client):
rv = client.get('/restricted')
assert rv.status_code == 401
|
CarlEkerot/flask-orm
|
tests/controllers/test_main_controller.py
|
Python
|
bsd-2-clause
| 1,153
|
def safe_float(obj):
'safe version of float()'
try:
retval = float(obj)
except (TypeError,ValueError),diag:
retval = str(diag)
return retval
def main():
'handles all the data processing'
log=open('cardlog.txt','w')
try:
ccfile=open('carddata.txt','r')
except IOError,e:
log.write('no txns this month\n')
log.close()
return
txns=ccfile.readlines()
ccfile.close()
total=0.00
log.write('account log:\n')
for eachTxn in txns:
result = safe_float(eachTxn)
if isinstance(result,float):
print result
total+=result
log.write('data...processed\n')
else:
log.write('ignored:%s' %result)
print '$%.2f (new balance)' %(total)
log.close()
if __name__ == '__main__':
main()
|
github641/python-journey
|
safe_float_10.3.7.py
|
Python
|
bsd-2-clause
| 880
|
"""SCons.Tool.PharLapCommon
This module contains common code used by all Tools for the
Phar Lap ETS tool chain. Right now, this is linkloc and
386asm.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/PharLapCommon.py 3603 2008/10/10 05:46:45 scons"
import os
import os.path
import SCons.Errors
import SCons.Util
import re
import string
def getPharLapPath():
"""Reads the registry to find the installed path of the Phar Lap ETS
development kit.
Raises UserError if no installed version of Phar Lap can
be found."""
if not SCons.Util.can_read_reg:
raise SCons.Errors.InternalError, "No Windows registry module was found"
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
'SOFTWARE\\Pharlap\\ETS')
val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir')
# The following is a hack...there is (not surprisingly)
# an odd issue in the Phar Lap plug in that inserts
# a bunch of junk data after the phar lap path in the
# registry. We must trim it.
idx=val.find('\0')
if idx >= 0:
val = val[:idx]
return os.path.normpath(val)
except SCons.Util.RegError:
raise SCons.Errors.UserError, "Cannot find Phar Lap ETS path in the registry. Is it installed properly?"
REGEX_ETS_VER = re.compile(r'#define\s+ETS_VER\s+([0-9]+)')
def getPharLapVersion():
"""Returns the version of the installed ETS Tool Suite as a
decimal number. This version comes from the ETS_VER #define in
the embkern.h header. For example, '#define ETS_VER 1010' (which
is what Phar Lap 10.1 defines) would cause this method to return
1010. Phar Lap 9.1 does not have such a #define, but this method
will return 910 as a default.
Raises UserError if no installed version of Phar Lap can
be found."""
include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h"))
if not os.path.exists(include_path):
raise SCons.Errors.UserError, "Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?"
mo = REGEX_ETS_VER.search(open(include_path, 'r').read())
if mo:
return int(mo.group(1))
# Default return for Phar Lap 9.1
return 910
def addPathIfNotExists(env_dict, key, path, sep=os.pathsep):
"""This function will take 'key' out of the dictionary
'env_dict', then add the path 'path' to that key if it is not
already there. This treats the value of env_dict[key] as if it
has a similar format to the PATH variable...a list of paths
separated by tokens. The 'path' will get added to the list if it
is not already there."""
try:
is_list = 1
paths = env_dict[key]
if not SCons.Util.is_List(env_dict[key]):
paths = string.split(paths, sep)
is_list = 0
if not os.path.normcase(path) in map(os.path.normcase, paths):
paths = [ path ] + paths
if is_list:
env_dict[key] = paths
else:
env_dict[key] = string.join(paths, sep)
except KeyError:
env_dict[key] = path
def addPharLapPaths(env):
"""This function adds the path to the Phar Lap binaries, includes,
and libraries, if they are not already there."""
ph_path = getPharLapPath()
try:
env_dict = env['ENV']
except KeyError:
env_dict = {}
env['ENV'] = env_dict
addPathIfNotExists(env_dict, 'PATH',
os.path.join(ph_path, 'bin'))
addPathIfNotExists(env_dict, 'INCLUDE',
os.path.join(ph_path, 'include'))
addPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, 'lib'))
addPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, os.path.normpath('lib/vclib')))
env['PHARLAP_PATH'] = getPharLapPath()
env['PHARLAP_VERSION'] = str(getPharLapVersion())
|
frew/simpleproto
|
scons-local-1.1.0/SCons/Tool/PharLapCommon.py
|
Python
|
bsd-2-clause
| 5,131
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DMUserProfile.klass'
db.add_column('accounts_dmuserprofile', 'klass', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['classes.LogicalClass'], null=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'DMUserProfile.klass'
db.delete_column('accounts_dmuserprofile', 'klass_id')
models = {
'accounts.dmuserprofile': {
'Meta': {'object_name': 'DMUserProfile'},
'awards': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'english_band_score': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'english_band_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'ethnic': ('django.db.models.fields.IntegerField', [], {}),
'gender': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'health': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '64', 'blank': 'True'}),
'high_school': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32', 'blank': 'True'}),
'hobby': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '128', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_number': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'join_date': ('django.db.models.fields.DateField', [], {}),
'klass': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classes.LogicalClass']", 'null': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'zh'", 'max_length': '5'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'major': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'mugshot': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '24', 'blank': 'True'}),
'political': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'registered'", 'max_length': '15'}),
'realname': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'role': ('django.db.models.fields.IntegerField', [], {}),
'sign_line': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '128', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'classes.logicalclass': {
'Meta': {'object_name': 'LogicalClass'},
'date': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'major': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['classes.Major']"}),
'seq': ('django.db.models.fields.IntegerField', [], {})
},
'classes.major': {
'Meta': {'object_name': 'Major'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '4'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['accounts']
|
team-xue/xue
|
xue/accounts/migrations/0013_add_class_field.py
|
Python
|
bsd-3-clause
| 7,179
|
# -*- coding: utf-8 -*-
"""
proxy.py
~~~~~~~~
⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-present by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more details.
"""
import unittest
from proxy.http.parser import PROXY_PROTOCOL_V2_SIGNATURE, ProxyProtocol
from proxy.http.exception import HttpProtocolException
class TestProxyProtocol(unittest.TestCase):
def setUp(self) -> None:
self.protocol = ProxyProtocol()
def test_v1(self) -> None:
self.protocol.parse(b'PROXY TCP6 ::1 ::1 64665 8899')
self.assertEqual(self.protocol.version, 1)
self.assertEqual(self.protocol.family, b'TCP6')
self.assertEqual(self.protocol.source, (b'::1', 64665))
self.assertEqual(self.protocol.destination, (b'::1', 8899))
def test_v1_example_from_spec(self) -> None:
self.protocol.parse(b'PROXY TCP4 192.168.0.1 192.168.0.11 56324 443')
self.assertEqual(self.protocol.version, 1)
self.assertEqual(self.protocol.family, b'TCP4')
self.assertEqual(self.protocol.source, (b'192.168.0.1', 56324))
self.assertEqual(self.protocol.destination, (b'192.168.0.11', 443))
def test_v1_worst_case_ipv4_from_spec(self) -> None:
self.protocol.parse(
b'PROXY TCP4 255.255.255.255 255.255.255.255 65535 65535',
)
self.assertEqual(self.protocol.version, 1)
self.assertEqual(self.protocol.family, b'TCP4')
self.assertEqual(self.protocol.source, (b'255.255.255.255', 65535))
self.assertEqual(
self.protocol.destination,
(b'255.255.255.255', 65535),
)
def test_v1_worst_case_ipv6_from_spec(self) -> None:
self.protocol.parse(
b'PROXY TCP6 ffff:f...f:ffff ffff:f...f:ffff 65535 65535',
)
self.assertEqual(self.protocol.version, 1)
self.assertEqual(self.protocol.family, b'TCP6')
self.assertEqual(self.protocol.source, (b'ffff:f...f:ffff', 65535))
self.assertEqual(
self.protocol.destination,
(b'ffff:f...f:ffff', 65535),
)
def test_v1_worst_case_unknown_from_spec(self) -> None:
self.protocol.parse(
b'PROXY UNKNOWN ffff:f...f:ffff ffff:f...f:ffff 65535 65535',
)
self.assertEqual(self.protocol.version, 1)
self.assertEqual(self.protocol.family, b'UNKNOWN')
self.assertEqual(self.protocol.source, (b'ffff:f...f:ffff', 65535))
self.assertEqual(
self.protocol.destination,
(b'ffff:f...f:ffff', 65535),
)
def test_v1_unknown_with_no_src_dst(self) -> None:
self.protocol.parse(b'PROXY UNKNOWN')
self.assertEqual(self.protocol.version, 1)
self.assertEqual(self.protocol.family, b'UNKNOWN')
self.assertEqual(self.protocol.source, None)
self.assertEqual(self.protocol.destination, None)
def test_v2_not_implemented(self) -> None:
with self.assertRaises(NotImplementedError):
self.protocol.parse(PROXY_PROTOCOL_V2_SIGNATURE)
self.assertEqual(self.protocol.version, 2)
def test_unknown_value_error(self) -> None:
with self.assertRaises(HttpProtocolException):
self.protocol.parse(PROXY_PROTOCOL_V2_SIGNATURE[:10])
self.assertEqual(self.protocol.version, None)
|
abhinavsingh/proxy.py
|
tests/http/parser/test_proxy_protocol.py
|
Python
|
bsd-3-clause
| 3,504
|
#!/usr/bin/env python
#
# -----------------------------------------------------------------------------
# Copyright (c) 2015 Daniel Standage <daniel.standage@gmail.com>
# Copyright (c) 2015 Indiana University
#
# This file is part of genhub (http://github.com/standage/genhub) and is
# licensed under the BSD 3-clause license: see LICENSE.txt.
# -----------------------------------------------------------------------------
"""
Process a GFF3 file, and for any feature that has an ID but lacks a `Name`
attribute, copy the ID attribute to the Name attribute.
"""
from __future__ import print_function
import re
import sys
for line in sys.stdin:
line = line.rstrip()
idmatch = re.search("ID=([^;\n]+)", line)
namematch = re.search("Name=([^;\n]+)", line)
if idmatch and not namematch:
line += ";Name=%s" % idmatch.group(1)
print(line)
|
standage/genhub
|
scripts/genhub-namedup.py
|
Python
|
bsd-3-clause
| 868
|
#!/usr/bin/env python
import unittest
import playercontext
class PlayerContextTestCase(unittest.TestCase):
def test_FromString(self):
src = open('testdata/context1.txt').read()
pc = playercontext.PlayerContext.FromString(src)
self.assertEqual(pc.balance, 95.03029999999998)
self.assertEqual(pc.playerid, 100)
self.assertEqual(pc.currentround, 13)
self.assertEqual(len(pc.our_offered), 0)
self.assertEqual(len(pc.their_offered), 1)
self.assertEqual(len(pc.accepted), 0)
self.assertEqual(len(pc.provided), 0)
if __name__ == '__main__':
unittest.main()
|
compbrain/Athena-SCG-Bot
|
src/playercontext_test.py
|
Python
|
bsd-3-clause
| 593
|
try:
from unittest.mock import patch, Mock, PropertyMock, MagicMock # noqa: E501
except ImportError:
from mock import patch, Mock, PropertyMock, MagicMock # noqa: F401
|
PaesslerAG/django-performance-testing
|
tests/testapp/sixmock.py
|
Python
|
bsd-3-clause
| 178
|